diff --git a/.eslintignore b/.eslintignore index 298aa7585765f..29d959598349e 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,2 +1,5 @@ .eslintrc.js jest.config.ts +node_modules +rust +livestream diff --git a/.eslintrc.js b/.eslintrc.js index fa36af54fc983..f1af109bde41a 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -12,7 +12,7 @@ const globals = { } module.exports = { - ignorePatterns: ['node_modules', 'plugin-server'], + ignorePatterns: ['node_modules', 'plugin-server', 'rust', 'livestream'], env, settings: { react: { diff --git a/.github/workflows/alert-on-failed-automerge.yml b/.github/workflows/alert-on-failed-automerge.yml index af9fde9675ed4..31582b50d11c1 100644 --- a/.github/workflows/alert-on-failed-automerge.yml +++ b/.github/workflows/alert-on-failed-automerge.yml @@ -9,7 +9,7 @@ on: jobs: notify_on_failure: if: ${{ github.event.check_suite.conclusion == 'failure' }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Checkout code diff --git a/.github/workflows/automerge.yml b/.github/workflows/automerge.yml index 20a3dbf314649..4b2a41b91384f 100644 --- a/.github/workflows/automerge.yml +++ b/.github/workflows/automerge.yml @@ -23,7 +23,7 @@ on: jobs: automerge: name: Automerge if requested - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 env: IS_POSTHOG_BOT_AVAILABLE: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN != '' }} steps: diff --git a/.github/workflows/browserslist-update.yml b/.github/workflows/browserslist-update.yml new file mode 100644 index 0000000000000..ff27f8d6745f2 --- /dev/null +++ b/.github/workflows/browserslist-update.yml @@ -0,0 +1,40 @@ +name: Update Browserslist database + +on: + schedule: + - cron: '0 12 * * MON' + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + update-browserslist-database: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Configure git + run: | + git config --global user.email "action@github.com" + git config --global user.name "Browserslist Update Action" + + - name: Install pnpm + uses: pnpm/action-setup@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: 18.12.1 + + - name: Update Browserslist database and create PR if applies + uses: c2corg/browserslist-update-action@v2 + with: + github_token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} # This token has permission to open PRs + commit_message: 'build: update Browserslist db' + title: 'build: update Browserslist db' + labels: 'dependencies, automerge' diff --git a/.github/workflows/ci-backend-update-test-timing.yml b/.github/workflows/ci-backend-update-test-timing.yml index 39a1993119701..eb1c36329ce6e 100644 --- a/.github/workflows/ci-backend-update-test-timing.yml +++ b/.github/workflows/ci-backend-update-test-timing.yml @@ -19,7 +19,7 @@ env: jobs: django: name: Run Django tests and save test durations - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 943b6611006d7..410d4deb18461 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -43,7 +43,7 @@ jobs: # Job to decide if we should run backend ci # See https://github.com/dorny/paths-filter#conditional-execution for more details changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 name: Determine need to run backend checks # Set job outputs to values from filter step @@ -94,7 +94,7 @@ jobs: timeout-minutes: 30 name: Python code quality checks - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: # If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined @@ -153,7 +153,7 @@ jobs: timeout-minutes: 10 name: Validate Django and CH migrations - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 @@ -228,7 +228,7 @@ jobs: timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false @@ -328,7 +328,7 @@ jobs: matrix: clickhouse-server-image: ['clickhouse/clickhouse-server:24.8.7.41'] if: needs.changes.outputs.backend == 'true' - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: 'Checkout repo' uses: actions/checkout@v3 @@ -382,7 +382,7 @@ jobs: calculate-running-time: name: Calculate running time needs: [django, async-migrations] - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks needs.changes.outputs.backend == 'true' && ( diff --git a/.github/workflows/ci-e2e.yml b/.github/workflows/ci-e2e.yml index 97deeffe6bd2a..4ec28981e5f1d 100644 --- a/.github/workflows/ci-e2e.yml +++ b/.github/workflows/ci-e2e.yml @@ -14,7 +14,7 @@ concurrency: jobs: changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 name: Determine need to run E2E checks # Set job outputs to values from filter step @@ -53,7 +53,7 @@ jobs: chunks: needs: changes name: Cypress preparation - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 outputs: chunks: ${{ steps.chunk.outputs.chunks }} @@ -67,7 +67,7 @@ jobs: container: name: Build and cache container image - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 60 needs: [changes] permissions: @@ -91,7 +91,7 @@ jobs: cypress: name: Cypress E2E tests (${{ strategy.job-index }}) - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 60 needs: [chunks, changes, container] permissions: @@ -279,7 +279,7 @@ jobs: calculate-running-time: name: Calculate running time - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: [cypress] if: needs.changes.outputs.shouldTriggerCypress == 'true' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog' diff --git a/.github/workflows/ci-frontend.yml b/.github/workflows/ci-frontend.yml index d466e0d3cdb82..f59c7e8eef790 100644 --- a/.github/workflows/ci-frontend.yml +++ b/.github/workflows/ci-frontend.yml @@ -16,7 +16,7 @@ jobs: # we skip each step individually, so they are still reported as success # because many of them are required for CI checks to be green changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 name: Determine need to run frontend checks outputs: @@ -54,7 +54,7 @@ jobs: name: Code quality checks needs: changes # kea typegen and typescript:check need some more oomph - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 @@ -121,7 +121,7 @@ jobs: minimum-change-threshold: 1000 jest: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: changes name: Jest test (${{ matrix.segment }} - ${{ matrix.chunk }}) diff --git a/.github/workflows/ci-hobby.yml b/.github/workflows/ci-hobby.yml index 0025e656c8204..73d29cbdad746 100644 --- a/.github/workflows/ci-hobby.yml +++ b/.github/workflows/ci-hobby.yml @@ -21,7 +21,7 @@ concurrency: jobs: changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 # this is a slow one timeout-minutes: 30 name: Setup DO Hobby Instance and test diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 7fee499a801f1..ea51f70721f5c 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -20,7 +20,7 @@ jobs: # Job to decide if we should run backend ci # See https://github.com/dorny/paths-filter#conditional-execution for more details changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 name: Determine need to run Hog checks # Set job outputs to values from filter step @@ -48,25 +48,23 @@ jobs: hog-tests: needs: changes timeout-minutes: 30 - name: Hog tests - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 + if: needs.changes.outputs.hog == 'true' steps: # If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined # there are backend changes, cancel previous runs - uses: n1hility/cancel-previous-runs@v3 - if: github.actor != 'posthog-bot' && needs.changes.outputs.hog == 'true' + if: github.actor != 'posthog-bot' with: token: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@v3 - if: needs.changes.outputs.hog == 'true' with: fetch-depth: 1 - name: Set up Python - if: needs.changes.outputs.hog == 'true' uses: actions/setup-python@v5 with: python-version: 3.11.9 @@ -76,31 +74,25 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.hog == 'true' - name: Install SAML (python3-saml) dependencies - if: needs.changes.outputs.hog == 'true' run: | sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl - name: Install Python dependencies - if: needs.changes.outputs.hog == 'true' run: | uv pip install --system -r requirements.txt -r requirements-dev.txt - name: Install pnpm - if: needs.changes.outputs.hog == 'true' uses: pnpm/action-setup@v4 - name: Set up Node.js - if: needs.changes.outputs.hog == 'true' uses: actions/setup-node@v4 with: - node-version: 18.12.1 + node-version: 18 - name: Check if ANTLR definitions are up to date - if: needs.changes.outputs.hog == 'true' run: | cd .. sudo apt-get install default-jre @@ -123,27 +115,175 @@ jobs: ANTLR_VERSION: '4.13.2' - name: Check if STL bytecode is up to date - if: needs.changes.outputs.hog == 'true' run: | python -m hogvm.stl.compile git diff --exit-code - name: Run HogVM Python tests - if: needs.changes.outputs.hog == 'true' run: | pytest hogvm - name: Run HogVM TypeScript tests - if: needs.changes.outputs.hog == 'true' run: | cd hogvm/typescript pnpm install --frozen-lockfile pnpm run test - name: Run Hog tests - if: needs.changes.outputs.hog == 'true' run: | cd hogvm/typescript pnpm run build cd ../ ./test.sh && git diff --exit-code + + check-package-version: + name: Check HogVM TypeScript package version and detect an update + needs: hog-tests + if: needs.hog-tests.result == 'success' && needs.changes.outputs.hog == 'true' + runs-on: ubuntu-24.04 + outputs: + committed-version: ${{ steps.check-package-version.outputs.committed-version }} + published-version: ${{ steps.check-package-version.outputs.published-version }} + is-new-version: ${{ steps.check-package-version.outputs.is-new-version }} + steps: + - name: Checkout the repository + uses: actions/checkout@v2 + - name: Check package version and detect an update + id: check-package-version + uses: PostHog/check-package-version@v2 + with: + path: hogvm/typescript + + release-hogvm: + name: Release new HogVM TypeScript version + runs-on: ubuntu-24.04 + needs: check-package-version + if: needs.changes.outputs.hog == 'true' && needs.check-package-version.outputs.is-new-version == 'true' + env: + COMMITTED_VERSION: ${{ needs.check-package-version.outputs.committed-version }} + PUBLISHED_VERSION: ${{ needs.check-package-version.outputs.published-version }} + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11.9 + cache: 'pip' + cache-dependency-path: '**/requirements*.txt' + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + - run: pip install uv + - name: Install SAML (python3-saml) dependencies + run: | + sudo apt-get update + sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl + - name: Install Python dependencies + run: | + uv pip install --system -r requirements.txt -r requirements-dev.txt + - name: Install pnpm + uses: pnpm/action-setup@v4 + - name: Set up Node 18 + uses: actions/setup-node@v4 + with: + node-version: 18 + registry-url: https://registry.npmjs.org + - name: Install package.json dependencies + run: cd hogvm/typescript && pnpm install + - name: Publish the package in the npm registry + run: cd hogvm/typescript && npm publish --access public + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: Sleep 60 seconds to allow npm to update the package + run: sleep 60 + + update-versions: + name: Update versions in package.json + runs-on: ubuntu-24.04 + needs: release-hogvm + if: always() # This ensures the job runs regardless of the result of release-hogvm + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + repository: ${{ github.event.pull_request.head.repo.full_name }} + ref: ${{ github.event.pull_request.head.ref }} + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + + - name: Install pnpm + uses: pnpm/action-setup@v4 + - name: Set up Node 18 + uses: actions/setup-node@v4 + with: + node-version: 18 + registry-url: https://registry.npmjs.org + + - name: Check for version mismatches + id: check-mismatch + run: | + # Extract committed version + HOGVM_VERSION=$(jq -r '.version' hogvm/typescript/package.json) + + # Compare dependencies in package.json + MAIN_VERSION=$(jq -r '.dependencies."@posthog/hogvm"' package.json | tr -d '^') + PLUGIN_VERSION=$(jq -r '.dependencies."@posthog/hogvm"' plugin-server/package.json | tr -d '^') + + echo "HOGVM_VERSION=$HOGVM_VERSION" + echo "MAIN_VERSION=$MAIN_VERSION" + echo "PLUGIN_VERSION=$PLUGIN_VERSION" + + # Set output if mismatches exist + if [[ "$HOGVM_VERSION" != "$MAIN_VERSION" || "$HOGVM_VERSION" != "$PLUGIN_VERSION" ]]; then + echo "mismatch=true" >> "$GITHUB_ENV" + else + echo "mismatch=false" >> "$GITHUB_ENV" + fi + + - name: Update package.json versions + if: env.mismatch == 'true' + run: | + VERSION=$(jq ".version" hogvm/typescript/package.json -r) + + retry_pnpm_install() { + local retries=0 + local max_retries=20 # 10 minutes total + local delay=30 + + while [[ $retries -lt $max_retries ]]; do + echo "Attempting pnpm install (retry $((retries+1))/$max_retries)..." + pnpm install --no-frozen-lockfile && break + echo "Install failed. Retrying in $delay seconds..." + sleep $delay + retries=$((retries + 1)) + done + + if [[ $retries -eq $max_retries ]]; then + echo "pnpm install failed after $max_retries attempts." + exit 1 + fi + } + + # Update main package.json + mv package.json package.old.json + jq --indent 4 '.dependencies."@posthog/hogvm" = "^'$VERSION'"' package.old.json > package.json + rm package.old.json + retry_pnpm_install + + # Update plugin-server/package.json + cd plugin-server + mv package.json package.old.json + jq --indent 4 '.dependencies."@posthog/hogvm" = "^'$VERSION'"' package.old.json > package.json + rm package.old.json + retry_pnpm_install + + - name: Commit updated package.json files + if: env.mismatch == 'true' + uses: EndBug/add-and-commit@v9 + with: + add: '["package.json", "pnpm-lock.yaml", "plugin-server/package.json", "plugin-server/pnpm-lock.yaml", "hogvm/typescript/package.json"]' + message: 'Update @posthog/hogvm version in package.json' + default_author: github_actions + github_token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index ecd13c789ddf8..30ca845cd89b6 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -24,7 +24,7 @@ jobs: # Job to decide if we should run plugin server ci # See https://github.com/dorny/paths-filter#conditional-execution for more details changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 name: Determine need to run plugin server checks outputs: @@ -53,7 +53,7 @@ jobs: name: Code quality needs: changes if: needs.changes.outputs.plugin-server == 'true' - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 defaults: run: working-directory: 'plugin-server' @@ -81,7 +81,7 @@ jobs: tests: name: Plugin Server Tests (${{matrix.shard}}) needs: changes - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false @@ -193,7 +193,7 @@ jobs: functional-tests: name: Functional tests needs: changes - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 env: REDIS_URL: 'redis://localhost' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a914e2a95225e..56f1ed3bf0330 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -31,7 +31,7 @@ jobs: # - https://gh.io/supported-runners-and-hardware-resources # - https://gh.io/using-larger-runners (GitHub.com only) # Consider using larger runners or machines with greater resources for possible analysis time improvements. - runs-on: 'ubuntu-latest' + runs-on: 'ubuntu-24.04' timeout-minutes: 15 permissions: # required for all workflows diff --git a/.github/workflows/codespaces.yml b/.github/workflows/codespaces.yml index b725c032ddacd..06f796951dbe2 100644 --- a/.github/workflows/codespaces.yml +++ b/.github/workflows/codespaces.yml @@ -21,7 +21,7 @@ on: jobs: build: name: Build Codespaces image - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 # Build on master and PRs with the label 'codespaces-build' only if: ${{ github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'codespaces-build') }} diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml index e7fe1a1608b73..b393214ec72da 100644 --- a/.github/workflows/container-images-cd.yml +++ b/.github/workflows/container-images-cd.yml @@ -22,7 +22,7 @@ jobs: posthog_build: name: Build and push PostHog if: github.repository == 'PostHog/posthog' - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: id-token: write # allow issuing OIDC tokens for this workflow run contents: read # allow at least reading the repo contents, add other permissions if necessary diff --git a/.github/workflows/container-images-ci.yml b/.github/workflows/container-images-ci.yml index c690c11dd2663..7b434a7cb546d 100644 --- a/.github/workflows/container-images-ci.yml +++ b/.github/workflows/container-images-ci.yml @@ -13,7 +13,7 @@ concurrency: jobs: posthog_build: name: Build Docker image - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: id-token: write # allow issuing OIDC tokens for this workflow run contents: read # allow at least reading the repo contents, add other permissions if necessary @@ -51,7 +51,7 @@ jobs: lint: name: Lint changed Dockerfiles - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Check out uses: actions/checkout@v3 diff --git a/.github/workflows/copy-clickhouse-udfs.yml b/.github/workflows/copy-clickhouse-udfs.yml index 3dc6fce3ade07..b55d66bc30e8b 100644 --- a/.github/workflows/copy-clickhouse-udfs.yml +++ b/.github/workflows/copy-clickhouse-udfs.yml @@ -9,7 +9,7 @@ on: jobs: trigger_udfs_workflow: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Trigger UDFs Workflow uses: benc-uk/workflow-dispatch@v1 diff --git a/.github/workflows/foss-sync.yml b/.github/workflows/foss-sync.yml index 82334bfe89f5a..b8edfe63210b9 100644 --- a/.github/workflows/foss-sync.yml +++ b/.github/workflows/foss-sync.yml @@ -10,7 +10,7 @@ jobs: repo-sync: name: Sync posthog-foss with posthog if: github.repository == 'PostHog/posthog' - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Sync repositories 1 to 1 - master branch uses: PostHog/git-sync@v3 diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index 37c4e75ddb9bc..4fc7344a97216 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -7,7 +7,7 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Checkout code diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml index 3efcc6c9523de..2f700829f2921 100644 --- a/.github/workflows/lint-pr.yml +++ b/.github/workflows/lint-pr.yml @@ -10,7 +10,7 @@ on: jobs: lint-pr: name: Validate PR title against Conventional Commits - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: amannn/action-semantic-pull-request@v5 env: diff --git a/.github/workflows/livestream-docker-image.yml b/.github/workflows/livestream-docker-image.yml index 7023ee98c03f1..5efde377f5866 100644 --- a/.github/workflows/livestream-docker-image.yml +++ b/.github/workflows/livestream-docker-image.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: contents: read @@ -59,7 +59,7 @@ jobs: labels: ${{ steps.meta.outputs.labels }} deploy: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: build steps: - name: get deployer token diff --git a/.github/workflows/pr-cleanup.yml b/.github/workflows/pr-cleanup.yml index 26f46533847f7..0c0586e8ec0b5 100644 --- a/.github/workflows/pr-cleanup.yml +++ b/.github/workflows/pr-cleanup.yml @@ -9,7 +9,7 @@ on: jobs: deploy_preview_cleanup: name: Deploy Preview Cleanup - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 if: ${{ contains(github.event.pull_request.labels.*.name, 'deploy') }} permissions: diff --git a/.github/workflows/pr-deploy.yml b/.github/workflows/pr-deploy.yml index 26896c7566091..807b1823dc17b 100644 --- a/.github/workflows/pr-deploy.yml +++ b/.github/workflows/pr-deploy.yml @@ -10,7 +10,7 @@ on: jobs: deploy_preview: name: Deploy preview environment - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: id-token: write diff --git a/.github/workflows/release-hogvm.yml b/.github/workflows/release-hogvm.yml deleted file mode 100644 index 3d173bcca4591..0000000000000 --- a/.github/workflows/release-hogvm.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: 'Publish HogVM package' - -on: - push: - branches: - - main - - master - -jobs: - release: - name: Publish - runs-on: ubuntu-latest - defaults: - run: - working-directory: hogvm/typescript - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - - - name: Check package version and detect an update - id: check-package-version - uses: PostHog/check-package-version@v2 - with: - path: hogvm/typescript - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Set up Node - uses: actions/setup-node@v4 - if: steps.check-package-version.outputs.is-new-version == 'true' - with: - node-version: 18.12.1 - registry-url: https://registry.npmjs.org - cache: pnpm - cache-dependency-path: hogvm/typescript/pnpm-lock.yaml - - - name: Install dependencies - if: steps.check-package-version.outputs.is-new-version == 'true' - run: pnpm i --frozen-lockfile - - - name: Build - if: steps.check-package-version.outputs.is-new-version == 'true' - run: pnpm build - - - name: Publish the package in the npm registry - id: publish-package - if: steps.check-package-version.outputs.is-new-version == 'true' - run: | - pnpm publish --access public --tag latest - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/rust-docker-build.yml b/.github/workflows/rust-docker-build.yml index 1535867f572f0..4237599d9b311 100644 --- a/.github/workflows/rust-docker-build.yml +++ b/.github/workflows/rust-docker-build.yml @@ -118,7 +118,7 @@ jobs: deploy: name: deploy ${{ matrix.release }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: build if: github.ref == 'refs/heads/master' strategy: diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 0dd62fcd25f7f..29268601c4db4 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -13,7 +13,7 @@ jobs: # Job to decide if we should run rust ci # See https://github.com/dorny/paths-filter#conditional-execution for more details changes: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 if: github.repository == 'PostHog/posthog' name: Determine need to run Rust checks diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 9f2797cca668a..635e01600286c 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -7,7 +7,7 @@ jobs: stale: # Only unleash the stale bot on PostHog/posthog, as there's no POSTHOG_BOT_GITHUB_TOKEN token on forks if: ${{ github.repository == 'PostHog/posthog' }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/stale@v9 with: diff --git a/.github/workflows/storybook-chromatic.yml b/.github/workflows/storybook-chromatic.yml index 809328e296c90..7d5d68e1ac2a3 100644 --- a/.github/workflows/storybook-chromatic.yml +++ b/.github/workflows/storybook-chromatic.yml @@ -18,7 +18,7 @@ concurrency: jobs: storybook-chromatic: name: Publish to Chromatic - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 15 if: github.event.pull_request.head.repo.full_name == github.repository # Don't run on forks outputs: @@ -50,7 +50,7 @@ jobs: visual-regression: name: Visual regression tests - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 30 container: image: mcr.microsoft.com/playwright:v1.45.0 @@ -211,7 +211,7 @@ jobs: visual-regression-summary: name: Summarize visual regression tests - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 timeout-minutes: 5 needs: visual-regression if: always() # Run even if visual-regression fails for one (or more) of the browsers diff --git a/.github/workflows/storybook-deploy.yml b/.github/workflows/storybook-deploy.yml index 9a04f8e582ced..33520a355a244 100644 --- a/.github/workflows/storybook-deploy.yml +++ b/.github/workflows/storybook-deploy.yml @@ -8,7 +8,7 @@ on: jobs: storybook-deployment: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 if: github.repository == 'PostHog/posthog' steps: - name: Check out PostHog/posthog repo diff --git a/.github/workflows/vector-docker-build-deploy.yml b/.github/workflows/vector-docker-build-deploy.yml index a1d97f846f298..d78e2e9b2d089 100644 --- a/.github/workflows/vector-docker-build-deploy.yml +++ b/.github/workflows/vector-docker-build-deploy.yml @@ -74,7 +74,7 @@ jobs: platforms: linux/arm64,linux/amd64 deploy: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 needs: build if: github.ref == 'refs/heads/master' steps: diff --git a/.gitignore b/.gitignore index a41dd0980a217..362ec30bce107 100644 --- a/.gitignore +++ b/.gitignore @@ -76,3 +76,4 @@ pyrightconfig.json .temporal-worker-settings temp_test_run_data.json .temp-deepeval-cache.json +.eslintcache \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 3aa83e0bb4e0d..9eb3fe62780f2 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -106,6 +106,30 @@ "group": "main" } }, + { + "name": "Celery Beat", + "consoleName": "Celery Beat", + "type": "debugpy", + "justMyCode": true, + "autoReload": { + "enable": true, + "include": ["posthog/**/*.py"] + }, + "request": "launch", + "program": "${workspaceFolder}/manage.py", + "args": ["run_autoreload_celery", "--type=beat"], + "console": "integratedTerminal", + "cwd": "${workspaceFolder}", + "env": { + "SKIP_ASYNC_MIGRATIONS_SETUP": "1", + "DEBUG": "1", + "BILLING_SERVICE_URL": "https://billing.dev.posthog.dev", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1" + }, + "presentation": { + "group": "main" + } + }, { "name": "Plugin Server", "command": "npm run start:dev", @@ -200,6 +224,27 @@ "console": "integratedTerminal", "internalConsoleOptions": "neverOpen", "runtimeExecutable": "${env:HOME}/.nvm/versions/node/${input:pickVersion}/bin/node" + }, + { + "name": "Python Debugger: Django Migrations", + "type": "debugpy", + "request": "launch", + "args": ["migrate", "posthog"], + "django": true, + "env": { + "PYTHONUNBUFFERED": "1", + "DJANGO_SETTINGS_MODULE": "posthog.settings", + "DEBUG": "1", + "CLICKHOUSE_SECURE": "False", + "KAFKA_HOSTS": "localhost", + "DATABASE_URL": "postgres://posthog:posthog@localhost:5432/posthog", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1", + "PRINT_SQL": "1", + "BILLING_SERVICE_URL": "http://localhost:8100/", + "CLOUD_DEPLOYMENT": "dev" + }, + "autoStartBrowser": false, + "program": "${workspaceFolder}/manage.py" } ], "inputs": [ @@ -212,7 +257,14 @@ "compounds": [ { "name": "PostHog", - "configurations": ["Backend", "Celery Threaded Pool", "Frontend", "Plugin Server", "Temporal Worker"], + "configurations": [ + "Backend", + "Celery Threaded Pool", + "Celery Beat", + "Frontend", + "Plugin Server", + "Temporal Worker" + ], "stopAll": true, "presentation": { "order": 1, diff --git a/bin/mprocs.yaml b/bin/mprocs.yaml new file mode 100644 index 0000000000000..c7831d818d589 --- /dev/null +++ b/bin/mprocs.yaml @@ -0,0 +1,24 @@ +procs: + celery-worker: + shell: 'source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=worker' + + celery-beat: + shell: 'source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=beat' + + plugin-server: + shell: './bin/plugin-server' + + backend: + shell: './bin/start-backend' + + frontend: + shell: './bin/start-frontend' + + temporal-worker: + # added a sleep to give the docker stuff time to start + shell: 'sleep 10 && python3 manage.py start_temporal_worker' + + docker-compose: + shell: 'docker compose -f docker-compose.dev.yml up' + stop: + send-keys: [''] diff --git a/bin/start-mprocs b/bin/start-mprocs new file mode 100755 index 0000000000000..ceaddede2140f --- /dev/null +++ b/bin/start-mprocs @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +export DEBUG=${DEBUG:-1} +export SKIP_SERVICE_VERSION_REQUIREMENTS=${SKIP_SERVICE_VERSION_REQUIREMENTS:-1} +export BILLING_SERVICE_URL=${BILLING_SERVICE_URL:-https://billing.dev.posthog.dev} +export HOG_HOOK_URL=${HOG_HOOK_URL:-http://localhost:3300/hoghook} + +[ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) + +exec mprocs --config bin/mprocs.yaml diff --git a/cypress/README.md b/cypress/README.md index 0930a0ad7bcbf..f043f6f0546b4 100644 --- a/cypress/README.md +++ b/cypress/README.md @@ -2,17 +2,13 @@ The Cypress tests run with a PostHog instance that has no feature flags set up. -To test feature flags you can intercept the call to the `decide` endpoint +To test feature flags you can intercept the call to the `decide` endpoint using this helper ```javascript // sometimes the system under test calls `/decide` // and sometimes it calls https://app.posthog.com/decide -cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // add feature flags here, for e.g. - // 'feature-flag-key': true, - }) - ) -) +setupFeatureFlags({ + // add feature flags here, for e.g. + 'feature-flag-key': true, +}) ``` diff --git a/cypress/e2e/alerts.cy.ts b/cypress/e2e/alerts.cy.ts index 82bd6bc10f4fb..91ecad1d24489 100644 --- a/cypress/e2e/alerts.cy.ts +++ b/cypress/e2e/alerts.cy.ts @@ -1,15 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' import { createInsight, createInsightWithBreakdown } from '../productAnalytics' +import { setupFeatureFlags } from '../support/decide' describe('Alerts', () => { beforeEach(() => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - alerts: true, - }) - ) - ) + setupFeatureFlags({ + alerts: true, + }) createInsight('insight') }) diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index b5f62097ebee1..cefbb60b6fe69 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -379,6 +379,8 @@ describe('Dashboard', () => { cy.get('[data-attr="date-filter"]').click() cy.contains('span', 'Last 14 days').click() + cy.wait(2000) + // insight meta should be updated to show new date range cy.get('h5').contains('Last 14 days').should('exist') diff --git a/cypress/e2e/experiments.cy.ts b/cypress/e2e/experiments.cy.ts index 5a7d92c3f49c1..a635cf7841cad 100644 --- a/cypress/e2e/experiments.cy.ts +++ b/cypress/e2e/experiments.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Experiments', () => { let randomNum @@ -47,13 +47,10 @@ describe('Experiments', () => { }) const createExperimentInNewUi = (): void => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'new-experiments-ui': true, - }) - ) - ) + setupFeatureFlags({ + 'new-experiments-ui': true, + }) + cy.visit('/experiments') // Name, flag key, description diff --git a/cypress/e2e/exports.cy.ts b/cypress/e2e/exports.cy.ts index 7e96b0c56d454..8131a98425602 100644 --- a/cypress/e2e/exports.cy.ts +++ b/cypress/e2e/exports.cy.ts @@ -1,18 +1,14 @@ import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' // NOTE: As the API data is randomly generated, we are only really testing here that the overall output is correct // The actual graph is not under test describe('Exporting Insights', () => { beforeEach(() => { - cy.intercept('https://us.i.posthog.com/decide/*', (req) => - req.reply( - decideResponse({ - 'export-dashboard-insights': true, - }) - ) - ) + setupFeatureFlags({ + 'export-dashboard-insights': true, + }) cy.visit(urls.insightNew()) // apply filter cy.get('[data-attr$=add-filter-group]').click() diff --git a/cypress/e2e/featureFlags.cy.ts b/cypress/e2e/featureFlags.cy.ts index 2dceb97af6b21..df4d740b8ec4b 100644 --- a/cypress/e2e/featureFlags.cy.ts +++ b/cypress/e2e/featureFlags.cy.ts @@ -1,10 +1,10 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Feature Flags', () => { let name beforeEach(() => { - cy.intercept('**/decide/*', (req) => req.reply(decideResponse({}))) + setupFeatureFlags({}) cy.intercept('/api/projects/*/property_definitions?type=person*', { fixture: 'api/feature-flags/property_definition', diff --git a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts index 85f472a09c97b..a902e861bacd6 100644 --- a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts +++ b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts @@ -56,6 +56,9 @@ describe('Insights', () => { it('can open a new stickiness insight', () => { insight.clickTab('STICKINESS') + // this test flaps, so check for a parent element, that is present even when failing + // in the hope that it slows the test down a little and stops it flapping + cy.get('.InsightVizDisplay--type-stickiness').should('exist') cy.get('.TrendsInsight canvas').should('exist') }) diff --git a/cypress/e2e/onboarding.cy.ts b/cypress/e2e/onboarding.cy.ts index b9453689a12aa..3ffd5ccc4bc27 100644 --- a/cypress/e2e/onboarding.cy.ts +++ b/cypress/e2e/onboarding.cy.ts @@ -1,16 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Onboarding', () => { beforeEach(() => { cy.intercept('/api/billing/', { fixture: 'api/billing/billing-unsubscribed.json' }) - - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'product-intro-pages': 'test', - }) - ) - ) + setupFeatureFlags({ + 'product-intro-pages': 'test', + }) }) it('Navigate between /products to /onboarding to a product intro page', () => { diff --git a/cypress/e2e/signup.cy.ts b/cypress/e2e/signup.cy.ts index 9774236ef81c4..76d7a694d8c50 100644 --- a/cypress/e2e/signup.cy.ts +++ b/cypress/e2e/signup.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' const VALID_PASSWORD = 'hedgE-hog-123%' @@ -171,13 +171,9 @@ describe('Signup', () => { }) it('Shows redirect notice if redirecting for maintenance', () => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'redirect-signups-to-instance': 'us', - }) - ) - ) + setupFeatureFlags({ + 'redirect-signups-to-instance': 'us', + }) cy.visit('/logout') cy.location('pathname').should('include', '/login') diff --git a/cypress/fixtures/api/decide.js b/cypress/fixtures/api/decide.js index 102f1211152c1..7c03b11c6dc48 100644 --- a/cypress/fixtures/api/decide.js +++ b/cypress/fixtures/api/decide.js @@ -8,6 +8,7 @@ export function decideResponse(featureFlags) { }, isAuthenticated: true, supportedCompression: ['gzip', 'gzip-js', 'lz64'], + hasFeatureFlags: Object.keys(featureFlags).length > 0, featureFlags, sessionRecording: { endpoint: '/s/', diff --git a/cypress/support/decide.ts b/cypress/support/decide.ts new file mode 100644 index 0000000000000..a32e192f74d25 --- /dev/null +++ b/cypress/support/decide.ts @@ -0,0 +1,28 @@ +import { decideResponse } from '../fixtures/api/decide' + +export const setupFeatureFlags = (overrides: Record = {}): void => { + // Tricky - the new RemoteConfig endpoint is optimised to not load decide if there are no feature flags in the DB. + // We need to intercept both the RemoteConfig and the decide endpoint to ensure that the feature flags are always loaded. + + cy.intercept('**/array/*/config', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) + + cy.intercept('**/array/*/config.js', (req) => + req.continue((res) => { + res.send(res.body) + }) + ) + + cy.intercept('**/decide/*', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) +} diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index fe164bf074b3a..f05a549c9bf30 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -4,7 +4,7 @@ import 'cypress-axe' import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from './decide' try { // eslint-disable-next-line @typescript-eslint/no-var-requires @@ -86,14 +86,7 @@ beforeEach(() => { Cypress.env('POSTHOG_PROPERTY_GITHUB_ACTION_RUN_URL', process.env.GITHUB_ACTION_RUN_URL) cy.useSubscriptionStatus('subscribed') - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // Feature flag to be treated as rolled out in E2E tests, e.g.: - // 'toolbar-launch-side-action': true, - }) - ) - ) + setupFeatureFlags({}) // un-intercepted sometimes this doesn't work and the page gets stuck on the SpinnerOverlay cy.intercept(/app.posthog.com\/api\/projects\/@current\/feature_flags\/my_flags.*/, (req) => req.reply([])) diff --git a/ee/api/billing.py b/ee/api/billing.py index 7fe7e4942b586..7d717ca740510 100644 --- a/ee/api/billing.py +++ b/ee/api/billing.py @@ -62,7 +62,10 @@ def list(self, request: Request, *args: Any, **kwargs: Any) -> Response: plan_keys = request.query_params.get("plan_keys", None) billing_manager = self.get_billing_manager() - response = billing_manager.get_billing(org, plan_keys) + query = {} + if "include_forecasting" in request.query_params: + query["include_forecasting"] = request.query_params.get("include_forecasting") + response = billing_manager.get_billing(org, plan_keys, query) return Response(response) diff --git a/ee/api/conversation.py b/ee/api/conversation.py new file mode 100644 index 0000000000000..70e314b94039f --- /dev/null +++ b/ee/api/conversation.py @@ -0,0 +1,69 @@ +from typing import cast + +from django.http import StreamingHttpResponse +from pydantic import ValidationError +from rest_framework import serializers +from rest_framework.renderers import BaseRenderer +from rest_framework.request import Request +from rest_framework.viewsets import GenericViewSet + +from ee.hogai.assistant import Assistant +from ee.models.assistant import Conversation +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.models.user import User +from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle +from posthog.schema import HumanMessage + + +class MessageSerializer(serializers.Serializer): + content = serializers.CharField(required=True, max_length=1000) + conversation = serializers.UUIDField(required=False) + + def validate(self, data): + try: + message = HumanMessage(content=data["content"]) + data["message"] = message + except ValidationError: + raise serializers.ValidationError("Invalid message content.") + return data + + +class ServerSentEventRenderer(BaseRenderer): + media_type = "text/event-stream" + format = "txt" + + def render(self, data, accepted_media_type=None, renderer_context=None): + return data + + +class ConversationViewSet(TeamAndOrgViewSetMixin, GenericViewSet): + scope_object = "INTERNAL" + serializer_class = MessageSerializer + renderer_classes = [ServerSentEventRenderer] + queryset = Conversation.objects.all() + lookup_url_kwarg = "conversation" + + def safely_get_queryset(self, queryset): + # Only allow access to conversations created by the current user + return queryset.filter(user=self.request.user) + + def get_throttles(self): + return [AIBurstRateThrottle(), AISustainedRateThrottle()] + + def create(self, request: Request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + conversation_id = serializer.validated_data.get("conversation") + if conversation_id: + self.kwargs[self.lookup_url_kwarg] = conversation_id + conversation = self.get_object() + else: + conversation = self.get_queryset().create(user=request.user, team=self.team) + assistant = Assistant( + self.team, + conversation, + serializer.validated_data["message"], + user=cast(User, request.user), + is_new_conversation=not conversation_id, + ) + return StreamingHttpResponse(assistant.stream(), content_type=ServerSentEventRenderer.media_type) diff --git a/ee/api/dashboard_collaborator.py b/ee/api/dashboard_collaborator.py index 1687bf5d83115..4cdd8833983cd 100644 --- a/ee/api/dashboard_collaborator.py +++ b/ee/api/dashboard_collaborator.py @@ -91,7 +91,7 @@ class DashboardCollaboratorViewSet( scope_object = "dashboard" permission_classes = [CanEditDashboardCollaborator] pagination_class = None - queryset = DashboardPrivilege.objects.select_related("dashboard").filter(user__is_active=True) + queryset = DashboardPrivilege.objects.select_related("dashboard", "dashboard__team").filter(user__is_active=True) lookup_field = "user__uuid" serializer_class = DashboardCollaboratorSerializer filter_rewrite_rules = {"project_id": "dashboard__team__project_id"} diff --git a/ee/api/ee_event_definition.py b/ee/api/ee_event_definition.py index 325a845aaa804..2c36958e505c7 100644 --- a/ee/api/ee_event_definition.py +++ b/ee/api/ee_event_definition.py @@ -11,6 +11,10 @@ ) from loginas.utils import is_impersonated_session +from typing import cast +import posthoganalytics +from posthog.event_usage import groups +from posthog.models import User class EnterpriseEventDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer): @@ -104,6 +108,23 @@ def update(self, event_definition: EnterpriseEventDefinition, validated_data): detail=Detail(name=str(event_definition.name), changes=changes), ) + verified_old = event_definition.verified + verified_new = validated_data.get("verified", verified_old) + + # If verified status has changed, track it + if "verified" in validated_data and verified_old != verified_new: + user = cast(User, self.context["request"].user) + posthoganalytics.capture( + str(user.distinct_id), + "event verification toggled", + properties={ + "verified": verified_new, + "event_name": event_definition.name, + "is_custom_event": not event_definition.name.startswith("$"), + }, + groups=groups(user.organization), + ) + return super().update(event_definition, validated_data) def to_representation(self, instance): diff --git a/ee/api/hooks.py b/ee/api/hooks.py index 6dd6dfd85e5c3..22d54c4b7bf8e 100644 --- a/ee/api/hooks.py +++ b/ee/api/hooks.py @@ -23,6 +23,7 @@ def create_zapier_hog_function(hook: Hook, serializer_context: dict) -> HogFunct serializer = HogFunctionSerializer( data={ "template_id": template_zapier.id, + "type": "destination", "name": f"Zapier webhook for action {hook.resource_id}", "filters": {"actions": [{"id": str(hook.resource_id), "name": "", "type": "actions", "order": 0}]}, "inputs": { diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 2875145441d69..d2afa2bc46569 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -840,6 +840,37 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma assert self.organization.customer_trust_scores == {"recordings": 0, "events": 15, "rows_synced": 0} + @patch("ee.api.billing.requests.get") + def test_billing_with_supported_params(self, mock_get): + """Test that the include_forecasting param is passed through to the billing service.""" + + def mock_implementation(url: str, headers: Any = None, params: Any = None) -> MagicMock: + mock = MagicMock() + mock.status_code = 200 + + if "api/billing/portal" in url: + mock.json.return_value = {"url": "https://billing.stripe.com/p/session/test_1234"} + elif "api/billing" in url: + mock.json.return_value = create_billing_response( + customer=create_billing_customer(has_active_subscription=True) + ) + + return mock + + mock_get.side_effect = mock_implementation + + response = self.client.get("/api/billing/?include_forecasting=true") + assert response.status_code == 200 + + # Verify the billing service was called with the correct query param + billing_calls = [ + call + for call in mock_get.call_args_list + if "api/billing" in call[0][0] and "api/billing/portal" not in call[0][0] + ] + assert len(billing_calls) == 1 + assert billing_calls[0].kwargs["params"] == {"include_forecasting": "true"} + class TestPortalBillingAPI(APILicensedTest): @patch("ee.api.billing.requests.get") @@ -932,7 +963,7 @@ def test_deactivate_success(self, mock_get_billing, mock_deactivate_products): self.assertEqual(response.status_code, status.HTTP_200_OK) mock_deactivate_products.assert_called_once_with(self.organization, "product_1") - mock_get_billing.assert_called_once_with(self.organization, None) + mock_get_billing.assert_called_once_with(self.organization, None, {}) def test_deactivate_failure(self): url = "/api/billing/deactivate" diff --git a/ee/api/test/test_conversation.py b/ee/api/test/test_conversation.py new file mode 100644 index 0000000000000..6eb466876dc01 --- /dev/null +++ b/ee/api/test/test_conversation.py @@ -0,0 +1,157 @@ +from unittest.mock import patch + +from rest_framework import status + +from ee.hogai.assistant import Assistant +from ee.models.assistant import Conversation +from posthog.models.team.team import Team +from posthog.models.user import User +from posthog.test.base import APIBaseTest + + +class TestConversation(APIBaseTest): + def setUp(self): + super().setUp() + self.other_team = Team.objects.create(organization=self.organization, name="other team") + self.other_user = User.objects.create_and_join( + organization=self.organization, + email="other@posthog.com", + password="password", + first_name="Other", + ) + + def _get_streaming_content(self, response): + return b"".join(response.streaming_content) + + def test_create_conversation(self): + with patch.object(Assistant, "_stream", return_value=["test response"]) as stream_mock: + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(self._get_streaming_content(response), b"test response") + self.assertEqual(Conversation.objects.count(), 1) + conversation: Conversation = Conversation.objects.first() + self.assertEqual(conversation.user, self.user) + self.assertEqual(conversation.team, self.team) + stream_mock.assert_called_once() + + def test_add_message_to_existing_conversation(self): + with patch.object(Assistant, "_stream", return_value=["test response"]) as stream_mock: + conversation = Conversation.objects.create(user=self.user, team=self.team) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": str(conversation.id), + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(self._get_streaming_content(response), b"test response") + self.assertEqual(Conversation.objects.count(), 1) + stream_mock.assert_called_once() + + def test_cant_access_other_users_conversation(self): + conversation = Conversation.objects.create(user=self.other_user, team=self.team) + + self.client.force_login(self.user) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"conversation": conversation.id, "content": "test query"}, + ) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_cant_access_other_teams_conversation(self): + conversation = Conversation.objects.create(user=self.user, team=self.other_team) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"conversation": conversation.id, "content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_invalid_message_format(self): + response = self.client.post("/api/environments/@current/conversations/") + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_rate_limit_burst(self): + # Create multiple requests to trigger burst rate limit + with patch.object(Assistant, "_stream", return_value=["test response"]): + for _ in range(11): # Assuming burst limit is less than this + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) + + def test_empty_content(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": ""}, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_content_too_long(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "x" * 1001}, # Very long message + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_invalid_conversation_id(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": "not-a-valid-uuid", + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_nonexistent_conversation(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": "12345678-1234-5678-1234-567812345678", + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_deleted_conversation(self): + # Create and then delete a conversation + conversation = Conversation.objects.create(user=self.user, team=self.team) + conversation_id = conversation.id + conversation.delete() + + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": str(conversation_id), + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_unauthenticated_request(self): + self.client.logout() + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + def test_streaming_error_handling(self): + def raise_error(): + yield "some content" + raise Exception("Streaming error") + + with patch.object(Assistant, "_stream", side_effect=raise_error): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + with self.assertRaises(Exception) as context: + b"".join(response.streaming_content) + self.assertTrue("Streaming error" in str(context.exception)) diff --git a/ee/benchmarks/benchmarks.py b/ee/benchmarks/benchmarks.py index 1784cae10a402..645c04a72f00a 100644 --- a/ee/benchmarks/benchmarks.py +++ b/ee/benchmarks/benchmarks.py @@ -4,9 +4,9 @@ from datetime import timedelta from ee.clickhouse.materialized_columns.analyze import ( backfill_materialized_columns, - get_materialized_columns, materialize, ) +from ee.clickhouse.materialized_columns.columns import MaterializedColumn from ee.clickhouse.queries.stickiness import ClickhouseStickiness from ee.clickhouse.queries.funnels.funnel_correlation import FunnelCorrelation from posthog.queries.funnels import ClickhouseFunnel @@ -771,15 +771,17 @@ def track_person_property_values_materialized(self): def setup(self): for table, properties in MATERIALIZED_PROPERTIES.items(): - existing_materialized_columns = get_materialized_columns(table) - for property in properties: - if (property, "properties") not in existing_materialized_columns: - materialize(table, property) - backfill_materialized_columns( - table, - [(property, "properties")], - backfill_period=timedelta(days=1_000), - ) + columns = [ + materialize(table, property) + for property in ( + set(properties) - {column.details.property_name for column in MaterializedColumn.get_all(table)} + ) + ] + backfill_materialized_columns( + table, + columns, + backfill_period=timedelta(days=1_000), + ) # :TRICKY: Data in benchmark servers has ID=2 team = Team.objects.filter(id=2).first() diff --git a/ee/benchmarks/helpers.py b/ee/benchmarks/helpers.py index 285a1dc97ee9f..8535e6adef47d 100644 --- a/ee/benchmarks/helpers.py +++ b/ee/benchmarks/helpers.py @@ -14,7 +14,7 @@ django.setup() -from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns # noqa: E402 +from ee.clickhouse.materialized_columns.columns import get_enabled_materialized_columns # noqa: E402 from posthog import client # noqa: E402 from posthog.clickhouse.query_tagging import reset_query_tags, tag_queries # noqa: E402 from posthog.models.utils import UUIDT # noqa: E402 diff --git a/ee/billing/billing_manager.py b/ee/billing/billing_manager.py index 5363053487c9e..ea033a4e0a9e0 100644 --- a/ee/billing/billing_manager.py +++ b/ee/billing/billing_manager.py @@ -74,9 +74,14 @@ def __init__(self, license, user: Optional[User] = None): self.license = license or get_cached_instance_license() self.user = user - def get_billing(self, organization: Optional[Organization], plan_keys: Optional[str]) -> dict[str, Any]: + def get_billing( + self, + organization: Optional[Organization], + plan_keys: Optional[str], + query_params: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: if organization and self.license and self.license.is_v2_license: - billing_service_response = self._get_billing(organization) + billing_service_response = self._get_billing(organization, query_params) # Ensure the license and org are updated with the latest info if billing_service_response.get("license"): @@ -225,7 +230,7 @@ def update_license_details(self, billing_status: BillingStatus) -> License: return self.license - def _get_billing(self, organization: Organization) -> BillingStatus: + def _get_billing(self, organization: Organization, query_params: Optional[dict[str, Any]] = None) -> BillingStatus: """ Retrieves billing info and updates local models if necessary """ @@ -235,6 +240,7 @@ def _get_billing(self, organization: Organization) -> BillingStatus: res = requests.get( f"{BILLING_SERVICE_URL}/api/billing", headers=self.get_auth_headers(organization), + params=query_params, ) handle_billing_service_error(res) diff --git a/ee/clickhouse/materialized_columns/analyze.py b/ee/clickhouse/materialized_columns/analyze.py index 43a1e83256912..fd1d1d09cb159 100644 --- a/ee/clickhouse/materialized_columns/analyze.py +++ b/ee/clickhouse/materialized_columns/analyze.py @@ -8,6 +8,7 @@ from ee.clickhouse.materialized_columns.columns import ( DEFAULT_TABLE_COLUMN, + MaterializedColumn, backfill_materialized_columns, get_materialized_columns, materialize, @@ -28,6 +29,7 @@ from posthog.models.property import PropertyName, TableColumn, TableWithProperties from posthog.models.property_definition import PropertyDefinition from posthog.models.team import Team +from posthog.settings import CLICKHOUSE_CLUSTER Suggestion = tuple[TableWithProperties, TableColumn, PropertyName] @@ -129,7 +131,7 @@ def _analyze(since_hours_ago: int, min_query_time: int, team_id: Optional[int] = --formatReadableSize(avg(read_bytes)), --formatReadableSize(max(read_bytes)) FROM - clusterAllReplicas(posthog, system, query_log) + clusterAllReplicas({cluster}, system, query_log) WHERE query_start_time > now() - toIntervalHour({since}) and query LIKE '%JSONExtract%' @@ -157,6 +159,7 @@ def _analyze(since_hours_ago: int, min_query_time: int, team_id: Optional[int] = since=since_hours_ago, min_query_time=min_query_time, team_id_filter=f"and JSONExtractInt(log_comment, 'team_id') = {team_id}" if team_id else "", + cluster=CLICKHOUSE_CLUSTER, ), ) @@ -164,30 +167,31 @@ def _analyze(since_hours_ago: int, min_query_time: int, team_id: Optional[int] = def materialize_properties_task( - columns_to_materialize: Optional[list[Suggestion]] = None, + properties_to_materialize: Optional[list[Suggestion]] = None, time_to_analyze_hours: int = MATERIALIZE_COLUMNS_ANALYSIS_PERIOD_HOURS, maximum: int = MATERIALIZE_COLUMNS_MAX_AT_ONCE, min_query_time: int = MATERIALIZE_COLUMNS_MINIMUM_QUERY_TIME, backfill_period_days: int = MATERIALIZE_COLUMNS_BACKFILL_PERIOD_DAYS, dry_run: bool = False, team_id_to_analyze: Optional[int] = None, + is_nullable: bool = False, ) -> None: """ Creates materialized columns for event and person properties based off of slow queries """ - if columns_to_materialize is None: - columns_to_materialize = _analyze(time_to_analyze_hours, min_query_time, team_id_to_analyze) + if properties_to_materialize is None: + properties_to_materialize = _analyze(time_to_analyze_hours, min_query_time, team_id_to_analyze) - columns_by_table: dict[TableWithProperties, list[tuple[TableColumn, PropertyName]]] = defaultdict(list) - for table, table_column, property_name in columns_to_materialize: - columns_by_table[table].append((table_column, property_name)) + properties_by_table: dict[TableWithProperties, list[tuple[TableColumn, PropertyName]]] = defaultdict(list) + for table, table_column, property_name in properties_to_materialize: + properties_by_table[table].append((table_column, property_name)) result: list[Suggestion] = [] - for table, columns in columns_by_table.items(): - existing_materialized_columns = get_materialized_columns(table) - for table_column, property_name in columns: - if (property_name, table_column) not in existing_materialized_columns: + for table, properties in properties_by_table.items(): + existing_materialized_properties = get_materialized_columns(table).keys() + for table_column, property_name in properties: + if (property_name, table_column) not in existing_materialized_properties: result.append((table, table_column, property_name)) if len(result) > 0: @@ -195,18 +199,15 @@ def materialize_properties_task( else: logger.info("Found no columns to materialize.") - properties: dict[TableWithProperties, list[tuple[PropertyName, TableColumn]]] = { - "events": [], - "person": [], - } + materialized_columns: dict[TableWithProperties, list[MaterializedColumn]] = defaultdict(list) for table, table_column, property_name in result[:maximum]: logger.info(f"Materializing column. table={table}, property_name={property_name}") - if not dry_run: - materialize(table, property_name, table_column=table_column) - properties[table].append((property_name, table_column)) + materialized_columns[table].append( + materialize(table, property_name, table_column=table_column, is_nullable=is_nullable) + ) if backfill_period_days > 0 and not dry_run: logger.info(f"Starting backfill for new materialized columns. period_days={backfill_period_days}") - backfill_materialized_columns("events", properties["events"], timedelta(days=backfill_period_days)) - backfill_materialized_columns("person", properties["person"], timedelta(days=backfill_period_days)) + for table, columns in materialized_columns.items(): + backfill_materialized_columns(table, columns, timedelta(days=backfill_period_days)) diff --git a/ee/clickhouse/materialized_columns/columns.py b/ee/clickhouse/materialized_columns/columns.py index c9624bf96bacd..ab051fee55e40 100644 --- a/ee/clickhouse/materialized_columns/columns.py +++ b/ee/clickhouse/materialized_columns/columns.py @@ -1,33 +1,35 @@ from __future__ import annotations +import logging import re -from collections.abc import Callable, Iterator +from collections.abc import Callable, Iterable, Iterator from copy import copy from dataclasses import dataclass, replace from datetime import timedelta -from typing import Any, Literal, NamedTuple, TypeVar, cast +from typing import Any, Literal, TypeVar, cast from clickhouse_driver import Client from django.utils.timezone import now +from posthog.cache_utils import cache_for from posthog.clickhouse.client.connection import default_client from posthog.clickhouse.cluster import ClickhouseCluster, ConnectionInfo, FuturesMap, HostInfo from posthog.clickhouse.kafka_engine import trim_quotes_expr from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns from posthog.client import sync_execute from posthog.models.event.sql import EVENTS_DATA_TABLE -from posthog.models.instance_setting import get_instance_setting from posthog.models.person.sql import PERSONS_TABLE from posthog.models.property import PropertyName, TableColumn, TableWithProperties from posthog.models.utils import generate_random_short_suffix from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_PER_TEAM_SETTINGS, TEST + +logger = logging.getLogger(__name__) + T = TypeVar("T") DEFAULT_TABLE_COLUMN: Literal["properties"] = "properties" -TRIM_AND_EXTRACT_PROPERTY = trim_quotes_expr("JSONExtractRaw({table_column}, %(property)s)") - SHORT_TABLE_COLUMN_NAME = { "properties": "p", "group_properties": "gp", @@ -40,15 +42,36 @@ } -class MaterializedColumn(NamedTuple): +@dataclass +class MaterializedColumn: name: ColumnName details: MaterializedColumnDetails + is_nullable: bool + + @property + def type(self) -> str: + if self.is_nullable: + return "Nullable(String)" + else: + return "String" + + def get_expression_and_parameters(self) -> tuple[str, dict[str, Any]]: + if self.is_nullable: + return ( + f"JSONExtract({self.details.table_column}, %(property_name)s, %(property_type)s)", + {"property_name": self.details.property_name, "property_type": self.type}, + ) + else: + return ( + trim_quotes_expr(f"JSONExtractRaw({self.details.table_column}, %(property)s)"), + {"property": self.details.property_name}, + ) @staticmethod def get_all(table: TablesWithMaterializedColumns) -> Iterator[MaterializedColumn]: rows = sync_execute( """ - SELECT name, comment + SELECT name, comment, type like 'Nullable(%%)' as is_nullable FROM system.columns WHERE database = %(database)s AND table = %(table)s @@ -58,8 +81,8 @@ def get_all(table: TablesWithMaterializedColumns) -> Iterator[MaterializedColumn {"database": CLICKHOUSE_DATABASE, "table": table}, ) - for name, comment in rows: - yield MaterializedColumn(name, MaterializedColumnDetails.from_column_comment(comment)) + for name, comment, is_nullable in rows: + yield MaterializedColumn(name, MaterializedColumnDetails.from_column_comment(comment), is_nullable) @staticmethod def get(table: TablesWithMaterializedColumns, column_name: ColumnName) -> MaterializedColumn: @@ -111,22 +134,24 @@ def from_column_comment(cls, comment: str) -> MaterializedColumnDetails: def get_materialized_columns( table: TablesWithMaterializedColumns, - exclude_disabled_columns: bool = False, -) -> dict[tuple[PropertyName, TableColumn], ColumnName]: - if not get_instance_setting("MATERIALIZED_COLUMNS_ENABLED"): - return {} - +) -> dict[tuple[PropertyName, TableColumn], MaterializedColumn]: return { - (column.details.property_name, column.details.table_column): column.name + (column.details.property_name, column.details.table_column): column for column in MaterializedColumn.get_all(table) - if not (exclude_disabled_columns and column.details.is_disabled) } +@cache_for(timedelta(minutes=15)) +def get_enabled_materialized_columns( + table: TablesWithMaterializedColumns, +) -> dict[tuple[PropertyName, TableColumn], MaterializedColumn]: + return {k: column for k, column in get_materialized_columns(table).items() if not column.details.is_disabled} + + def get_cluster() -> ClickhouseCluster: extra_hosts = [] for host_config in map(copy, CLICKHOUSE_PER_TEAM_SETTINGS.values()): - extra_hosts.append(ConnectionInfo(host_config.pop("host"), host_config.pop("port", None))) + extra_hosts.append(ConnectionInfo(host_config.pop("host"))) assert len(host_config) == 0, f"unexpected values: {host_config!r}" return ClickhouseCluster(default_client(), extra_hosts=extra_hosts) @@ -161,6 +186,10 @@ def map_data_nodes(self, cluster: ClickhouseCluster, fn: Callable[[Client], T]) } +def get_minmax_index_name(column: str) -> str: + return f"minmax_{column}" + + @dataclass class CreateColumnOnDataNodesTask: table: str @@ -169,20 +198,17 @@ class CreateColumnOnDataNodesTask: add_column_comment: bool def execute(self, client: Client) -> None: + expression, parameters = self.column.get_expression_and_parameters() actions = [ - f""" - ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR - MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=self.column.details.table_column)} - """, + f"ADD COLUMN IF NOT EXISTS {self.column.name} {self.column.type} MATERIALIZED {expression}", ] - parameters = {"property": self.column.details.property_name} if self.add_column_comment: actions.append(f"COMMENT COLUMN {self.column.name} %(comment)s") parameters["comment"] = self.column.details.as_column_comment() if self.create_minmax_index: - index_name = f"minmax_{self.column.name}" + index_name = get_minmax_index_name(self.column.name) actions.append(f"ADD INDEX IF NOT EXISTS {index_name} {self.column.name} TYPE minmax GRANULARITY 1") client.execute( @@ -201,7 +227,7 @@ def execute(self, client: Client) -> None: client.execute( f""" ALTER TABLE {self.table} - ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR, + ADD COLUMN IF NOT EXISTS {self.column.name} {self.column.type}, COMMENT COLUMN {self.column.name} %(comment)s """, {"comment": self.column.details.as_column_comment()}, @@ -215,10 +241,11 @@ def materialize( column_name: ColumnName | None = None, table_column: TableColumn = DEFAULT_TABLE_COLUMN, create_minmax_index=not TEST, -) -> ColumnName | None: - if (property, table_column) in get_materialized_columns(table): + is_nullable: bool = False, +) -> MaterializedColumn: + if existing_column := get_materialized_columns(table).get((property, table_column)): if TEST: - return None + return existing_column raise ValueError(f"Property already materialized. table={table}, property={property}, column={table_column}") @@ -235,6 +262,7 @@ def materialize( property_name=property, is_disabled=False, ), + is_nullable=is_nullable, ) table_info.map_data_nodes( @@ -255,70 +283,112 @@ def materialize( ).execute ).result() - return column.name + return column @dataclass class UpdateColumnCommentTask: table: str - column: MaterializedColumn + columns: list[MaterializedColumn] def execute(self, client: Client) -> None: + actions = [] + parameters = {} + for i, column in enumerate(self.columns): + parameter_name = f"comment_{i}" + actions.append(f"COMMENT COLUMN {column.name} %({parameter_name})s") + parameters[parameter_name] = column.details.as_column_comment() + client.execute( - f"ALTER TABLE {self.table} COMMENT COLUMN {self.column.name} %(comment)s", - {"comment": self.column.details.as_column_comment()}, + f"ALTER TABLE {self.table} " + ", ".join(actions), + parameters, settings={"alter_sync": 2 if TEST else 1}, ) -def update_column_is_disabled(table: TablesWithMaterializedColumns, column_name: str, is_disabled: bool) -> None: +def update_column_is_disabled( + table: TablesWithMaterializedColumns, column_names: Iterable[str], is_disabled: bool +) -> None: cluster = get_cluster() table_info = tables[table] + columns = [MaterializedColumn.get(table, column_name) for column_name in column_names] + cluster.map_all_hosts( UpdateColumnCommentTask( table_info.read_table, - MaterializedColumn( - name=column_name, - details=replace( - MaterializedColumn.get(table, column_name).details, - is_disabled=is_disabled, - ), - ), + [replace(column, details=replace(column.details, is_disabled=is_disabled)) for column in columns], ).execute ).result() +def check_index_exists(client: Client, table: str, index: str) -> bool: + [(count,)] = client.execute( + """ + SELECT count() + FROM system.data_skipping_indices + WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s + """, + {"table": table, "name": index}, + ) + assert 1 >= count >= 0 + return bool(count) + + +def check_column_exists(client: Client, table: str, column: str) -> bool: + [(count,)] = client.execute( + """ + SELECT count() + FROM system.columns + WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s + """, + {"table": table, "name": column}, + ) + assert 1 >= count >= 0 + return bool(count) + + @dataclass class DropColumnTask: table: str - column_name: str + column_names: list[str] try_drop_index: bool def execute(self, client: Client) -> None: - # XXX: copy/pasted from create task - if self.try_drop_index: - index_name = f"minmax_{self.column_name}" + actions = [] + + for column_name in self.column_names: + if self.try_drop_index: + index_name = get_minmax_index_name(column_name) + drop_index_action = f"DROP INDEX IF EXISTS {index_name}" + if check_index_exists(client, self.table, index_name): + actions.append(drop_index_action) + else: + logger.info("Skipping %r, nothing to do...", drop_index_action) + + drop_column_action = f"DROP COLUMN IF EXISTS {column_name}" + if check_column_exists(client, self.table, column_name): + actions.append(drop_column_action) + else: + logger.info("Skipping %r, nothing to do...", drop_column_action) + + if actions: client.execute( - f"ALTER TABLE {self.table} DROP INDEX IF EXISTS {index_name}", + f"ALTER TABLE {self.table} " + ", ".join(actions), settings={"alter_sync": 2 if TEST else 1}, ) - client.execute( - f"ALTER TABLE {self.table} DROP COLUMN IF EXISTS {self.column_name}", - settings={"alter_sync": 2 if TEST else 1}, - ) - -def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None: +def drop_column(table: TablesWithMaterializedColumns, column_names: Iterable[str]) -> None: cluster = get_cluster() table_info = tables[table] + column_names = [*column_names] if isinstance(table_info, ShardedTableInfo): cluster.map_all_hosts( DropColumnTask( table_info.dist_table, - column_name, + column_names, try_drop_index=False, # no indexes on distributed tables ).execute ).result() @@ -327,7 +397,7 @@ def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None: cluster, DropColumnTask( table_info.data_table, - column_name, + column_names, try_drop_index=True, ).execute, ).result() @@ -345,12 +415,13 @@ def execute(self, client: Client) -> None: # Note that for this to work all inserts should list columns explicitly # Improve this if https://github.com/ClickHouse/ClickHouse/issues/27730 ever gets resolved for column in self.columns: + expression, parameters = column.get_expression_and_parameters() client.execute( f""" ALTER TABLE {self.table} - MODIFY COLUMN {column.name} VARCHAR DEFAULT {TRIM_AND_EXTRACT_PROPERTY.format(table_column=column.details.table_column)} + MODIFY COLUMN {column.name} {column.type} DEFAULT {expression} """, - {"property": column.details.property_name}, + parameters, settings=self.test_settings, ) @@ -373,7 +444,7 @@ def execute(self, client: Client) -> None: def backfill_materialized_columns( table: TableWithProperties, - properties: list[tuple[PropertyName, TableColumn]], + columns: Iterable[MaterializedColumn], backfill_period: timedelta, test_settings=None, ) -> None: @@ -382,25 +453,14 @@ def backfill_materialized_columns( This will require reading and writing a lot of data on clickhouse disk. """ - - if len(properties) == 0: - return - cluster = get_cluster() table_info = tables[table] - # TODO: this will eventually need to handle duplicates - materialized_columns = { - (column.details.property_name, column.details.table_column): column - for column in MaterializedColumn.get_all(table) - } - columns = [materialized_columns[property] for property in properties] - table_info.map_data_nodes( cluster, BackfillColumnTask( table_info.data_table, - columns, + [*columns], backfill_period if table == "events" else None, # XXX test_settings, ).execute, @@ -420,10 +480,10 @@ def _materialized_column_name( prefix += f"{SHORT_TABLE_COLUMN_NAME[table_column]}_" property_str = re.sub("[^0-9a-zA-Z$]", "_", property) - existing_materialized_columns = set(get_materialized_columns(table).values()) + existing_materialized_column_names = {column.name for column in get_materialized_columns(table).values()} suffix = "" - while f"{prefix}{property_str}{suffix}" in existing_materialized_columns: + while f"{prefix}{property_str}{suffix}" in existing_materialized_column_names: suffix = "_" + generate_random_short_suffix() return f"{prefix}{property_str}{suffix}" diff --git a/ee/clickhouse/materialized_columns/test/test_analyze.py b/ee/clickhouse/materialized_columns/test/test_analyze.py index 6fdb0fb05cb0e..3b225ab670f92 100644 --- a/ee/clickhouse/materialized_columns/test/test_analyze.py +++ b/ee/clickhouse/materialized_columns/test/test_analyze.py @@ -49,9 +49,9 @@ def test_mat_columns(self, patch_backfill, patch_materialize): materialize_properties_task() patch_materialize.assert_has_calls( [ - call("events", "materialize_me", table_column="properties"), - call("events", "materialize_me2", table_column="properties"), - call("events", "materialize_person_prop", table_column="person_properties"), - call("events", "materialize_me3", table_column="properties"), + call("events", "materialize_me", table_column="properties", is_nullable=False), + call("events", "materialize_me2", table_column="properties", is_nullable=False), + call("events", "materialize_person_prop", table_column="person_properties", is_nullable=False), + call("events", "materialize_me3", table_column="properties", is_nullable=False), ] ) diff --git a/ee/clickhouse/materialized_columns/test/test_columns.py b/ee/clickhouse/materialized_columns/test/test_columns.py index 4cbbef0c4a416..bf0912114359a 100644 --- a/ee/clickhouse/materialized_columns/test/test_columns.py +++ b/ee/clickhouse/materialized_columns/test/test_columns.py @@ -1,5 +1,6 @@ from datetime import timedelta from time import sleep +from collections.abc import Iterable from unittest import TestCase from unittest.mock import patch @@ -10,11 +11,13 @@ MaterializedColumnDetails, backfill_materialized_columns, drop_column, + get_enabled_materialized_columns, get_materialized_columns, materialize, update_column_is_disabled, ) -from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns +from ee.tasks.materialized_columns import mark_all_materialized +from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns from posthog.client import sync_execute from posthog.conftest import create_clickhouse_tables from posthog.constants import GROUP_TYPES_LIMIT @@ -129,27 +132,20 @@ def test_caching_and_materializing(self): @patch("secrets.choice", return_value="X") def test_materialized_column_naming(self, mock_choice): - materialize("events", "$foO();--sqlinject", create_minmax_index=True) - mock_choice.return_value = "Y" - materialize("events", "$foO();ääsqlinject", create_minmax_index=True) - mock_choice.return_value = "Z" - materialize("events", "$foO_____sqlinject", create_minmax_index=True) - materialize("person", "SoMePrOp", create_minmax_index=True) + assert materialize("events", "$foO();--sqlinject", create_minmax_index=True).name == "mat_$foO_____sqlinject" - self.assertDictContainsSubset( - { - ("$foO();--sqlinject", "properties"): "mat_$foO_____sqlinject", - ("$foO();ääsqlinject", "properties"): "mat_$foO_____sqlinject_YYYY", - ("$foO_____sqlinject", "properties"): "mat_$foO_____sqlinject_ZZZZ", - }, - get_materialized_columns("events"), + mock_choice.return_value = "Y" + assert ( + materialize("events", "$foO();ääsqlinject", create_minmax_index=True).name == "mat_$foO_____sqlinject_YYYY" ) - self.assertEqual( - get_materialized_columns("person"), - {("SoMePrOp", "properties"): "pmat_SoMePrOp"}, + mock_choice.return_value = "Z" + assert ( + materialize("events", "$foO_____sqlinject", create_minmax_index=True).name == "mat_$foO_____sqlinject_ZZZZ" ) + assert materialize("person", "SoMePrOp", create_minmax_index=True).name == "pmat_SoMePrOp" + def test_backfilling_data(self): sync_execute("ALTER TABLE events DROP COLUMN IF EXISTS mat_prop") sync_execute("ALTER TABLE events DROP COLUMN IF EXISTS mat_another") @@ -196,8 +192,10 @@ def test_backfilling_data(self): properties={"another": 6}, ) - materialize("events", "prop", create_minmax_index=True) - materialize("events", "another", create_minmax_index=True) + columns = [ + materialize("events", "prop", create_minmax_index=True), + materialize("events", "another", create_minmax_index=True), + ] self.assertEqual(self._count_materialized_rows("mat_prop"), 0) self.assertEqual(self._count_materialized_rows("mat_another"), 0) @@ -205,7 +203,7 @@ def test_backfilling_data(self): with freeze_time("2021-05-10T14:00:01Z"): backfill_materialized_columns( "events", - [("prop", "properties"), ("another", "properties")], + columns, timedelta(days=50), test_settings={"mutations_sync": "0"}, ) @@ -240,21 +238,27 @@ def test_backfilling_data(self): ) def test_column_types(self): - materialize("events", "myprop", create_minmax_index=True) - - expr = "replaceRegexpAll(JSONExtractRaw(properties, 'myprop'), '^\"|\"$', '')" - self.assertEqual(("MATERIALIZED", expr), self._get_column_types("mat_myprop")) + columns = [ + materialize("events", "myprop", create_minmax_index=True), + materialize("events", "myprop_nullable", create_minmax_index=True, is_nullable=True), + ] + + expr_nonnullable = "replaceRegexpAll(JSONExtractRaw(properties, 'myprop'), '^\"|\"$', '')" + expr_nullable = "JSONExtract(properties, 'myprop_nullable', 'Nullable(String)')" + self.assertEqual(("String", "MATERIALIZED", expr_nonnullable), self._get_column_types("mat_myprop")) + self.assertEqual( + ("Nullable(String)", "MATERIALIZED", expr_nullable), self._get_column_types("mat_myprop_nullable") + ) - backfill_materialized_columns("events", [("myprop", "properties")], timedelta(days=50)) - self.assertEqual(("DEFAULT", expr), self._get_column_types("mat_myprop")) + backfill_materialized_columns("events", columns, timedelta(days=50)) + self.assertEqual(("String", "DEFAULT", expr_nonnullable), self._get_column_types("mat_myprop")) + self.assertEqual(("Nullable(String)", "DEFAULT", expr_nullable), self._get_column_types("mat_myprop_nullable")) - try: - from ee.tasks.materialized_columns import mark_all_materialized - except ImportError: - pass - else: - mark_all_materialized() - self.assertEqual(("MATERIALIZED", expr), self._get_column_types("mat_myprop")) + mark_all_materialized() + self.assertEqual(("String", "MATERIALIZED", expr_nonnullable), self._get_column_types("mat_myprop")) + self.assertEqual( + ("Nullable(String)", "MATERIALIZED", expr_nullable), self._get_column_types("mat_myprop_nullable") + ) def _count_materialized_rows(self, column): return sync_execute( @@ -284,7 +288,7 @@ def _get_count_of_mutations_running(self) -> int: def _get_column_types(self, column: str): return sync_execute( """ - SELECT default_kind, default_expression + SELECT type, default_kind, default_expression FROM system.columns WHERE database = %(database)s AND table = %(table)s AND name = %(column)s """, @@ -297,42 +301,119 @@ def _get_column_types(self, column: str): def test_lifecycle(self): table: TablesWithMaterializedColumns = "events" - property: PropertyName = "myprop" + property_names = ["foo", "bar"] source_column: TableColumn = "properties" - # create the materialized column - destination_column = materialize(table, property, table_column=source_column, create_minmax_index=True) - assert destination_column is not None - - # ensure it exists everywhere - key = (property, source_column) - assert get_materialized_columns(table)[key] == destination_column - assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( - destination_column, - MaterializedColumnDetails(source_column, property, is_disabled=False), + # create materialized columns + materialized_columns = {} + for property_name in property_names: + materialized_columns[property_name] = materialize( + table, property_name, table_column=source_column, create_minmax_index=True + ).name + + assert set(property_names) == materialized_columns.keys() + + # ensure they exist everywhere + for property_name, destination_column in materialized_columns.items(): + key = (property_name, source_column) + assert get_materialized_columns(table)[key].name == destination_column + assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( + destination_column, + MaterializedColumnDetails(source_column, property_name, is_disabled=False), + is_nullable=False, + ) + + # disable them and ensure updates apply as needed + update_column_is_disabled(table, materialized_columns.values(), is_disabled=True) + for property_name, destination_column in materialized_columns.items(): + key = (property_name, source_column) + assert get_materialized_columns(table)[key].name == destination_column + assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( + destination_column, + MaterializedColumnDetails(source_column, property_name, is_disabled=True), + is_nullable=False, + ) + + # re-enable them and ensure updates apply as needed + update_column_is_disabled(table, materialized_columns.values(), is_disabled=False) + for property_name, destination_column in materialized_columns.items(): + key = (property_name, source_column) + assert get_materialized_columns(table)[key].name == destination_column + assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( + destination_column, + MaterializedColumnDetails(source_column, property_name, is_disabled=False), + is_nullable=False, + ) + + # drop them and ensure updates apply as needed + drop_column(table, materialized_columns.values()) + for property_name, destination_column in materialized_columns.items(): + key = (property_name, source_column) + assert key not in get_materialized_columns(table) + with self.assertRaises(ValueError): + MaterializedColumn.get(table, destination_column) + + def _get_latest_mutation_id(self, table: str) -> str: + [(mutation_id,)] = sync_execute( + """ + SELECT max(mutation_id) + FROM system.mutations + WHERE + database = currentDatabase() + AND table = %(table)s + """, + {"table": table}, ) + return mutation_id + + def _get_mutations_since_id(self, table: str, id: str) -> Iterable[str]: + return [ + command + for (command,) in sync_execute( + """ + SELECT command + FROM system.mutations + WHERE + database = currentDatabase() + AND table = %(table)s + AND mutation_id > %(mutation_id)s + ORDER BY mutation_id + """, + {"table": table, "mutation_id": id}, + ) + ] - # disable it and ensure updates apply as needed - update_column_is_disabled(table, destination_column, is_disabled=True) - assert get_materialized_columns(table)[key] == destination_column - assert key not in get_materialized_columns(table, exclude_disabled_columns=True) - assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( - destination_column, - MaterializedColumnDetails(source_column, property, is_disabled=True), + def test_drop_optimized_no_index(self): + table: TablesWithMaterializedColumns = ( + "person" # little bit easier than events because no shard awareness needed ) + property: PropertyName = "myprop" + source_column: TableColumn = "properties" + + destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False) + + latest_mutation_id_before_drop = self._get_latest_mutation_id(table) - # re-enable it and ensure updates apply as needed - update_column_is_disabled(table, destination_column, is_disabled=False) - assert get_materialized_columns(table, exclude_disabled_columns=False)[key] == destination_column - assert get_materialized_columns(table, exclude_disabled_columns=True)[key] == destination_column - assert MaterializedColumn.get(table, destination_column) == MaterializedColumn( - destination_column, - MaterializedColumnDetails(source_column, property, is_disabled=False), + drop_column(table, destination_column.name) + + mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop) + assert not any("DROP INDEX" in mutation for mutation in mutations_ran) + + def test_drop_optimized_no_column(self): + table: TablesWithMaterializedColumns = ( + "person" # little bit easier than events because no shard awareness needed ) + property: PropertyName = "myprop" + source_column: TableColumn = "properties" - # drop it and ensure updates apply as needed - drop_column(table, destination_column) - assert key not in get_materialized_columns(table, exclude_disabled_columns=False) - assert key not in get_materialized_columns(table, exclude_disabled_columns=True) - with self.assertRaises(ValueError): - MaterializedColumn.get(table, destination_column) + # create the materialized column + destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False) + + sync_execute(f"ALTER TABLE {table} DROP COLUMN {destination_column.name}", settings={"alter_sync": 1}) + + latest_mutation_id_before_drop = self._get_latest_mutation_id(table) + + drop_column(table, destination_column.name) + + mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop) + assert not any("DROP COLUMN" in mutation for mutation in mutations_ran) diff --git a/ee/clickhouse/models/test/test_cohort.py b/ee/clickhouse/models/test/test_cohort.py index 8af41154c48a5..1600584169a28 100644 --- a/ee/clickhouse/models/test/test_cohort.py +++ b/ee/clickhouse/models/test/test_cohort.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from typing import Optional from django.utils import timezone from freezegun import freeze_time @@ -8,12 +9,13 @@ from posthog.models.action import Action from posthog.models.cohort import Cohort from posthog.models.cohort.sql import GET_COHORTPEOPLE_BY_COHORT_ID -from posthog.models.cohort.util import format_filter_query, get_person_ids_by_cohort_id +from posthog.models.cohort.util import format_filter_query from posthog.models.filters import Filter from posthog.models.organization import Organization from posthog.models.person import Person from posthog.models.property.util import parse_prop_grouped_clauses from posthog.models.team import Team +from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query from posthog.queries.util import PersonPropertiesMode from posthog.schema import PersonsOnEventsMode from posthog.test.base import ( @@ -25,6 +27,7 @@ snapshot_clickhouse_insert_cohortpeople_queries, snapshot_clickhouse_queries, ) +from posthog.models.person.sql import GET_LATEST_PERSON_SQL, GET_PERSON_IDS_BY_FILTER def _create_action(**kwargs): @@ -34,12 +37,44 @@ def _create_action(**kwargs): return action +def get_person_ids_by_cohort_id( + team_id: int, + cohort_id: int, + limit: Optional[int] = None, + offset: Optional[int] = None, +): + from posthog.models.property.util import parse_prop_grouped_clauses + + filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]}) + filter_query, filter_params = parse_prop_grouped_clauses( + team_id=team_id, + property_group=filter.property_groups, + table_name="pdi", + hogql_context=filter.hogql_context, + ) + + results = sync_execute( + GET_PERSON_IDS_BY_FILTER.format( + person_query=GET_LATEST_PERSON_SQL, + distinct_query=filter_query, + query="", + GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id), + offset="OFFSET %(offset)s" if offset else "", + limit="ORDER BY _timestamp ASC LIMIT %(limit)s" if limit else "", + ), + {**filter_params, "team_id": team_id, "offset": offset, "limit": limit}, + ) + + return [str(row[0]) for row in results] + + class TestCohort(ClickhouseTestMixin, BaseTest): - def _get_cohortpeople(self, cohort: Cohort): + def _get_cohortpeople(self, cohort: Cohort, *, team_id: Optional[int] = None): + team_id = team_id or cohort.team_id return sync_execute( GET_COHORTPEOPLE_BY_COHORT_ID, { - "team_id": self.team.pk, + "team_id": team_id, "cohort_id": cohort.pk, "version": cohort.version, }, @@ -452,7 +487,7 @@ def test_cohort_get_person_ids_by_cohort_id(self): name="cohort1", ) - results = get_person_ids_by_cohort_id(self.team, cohort.id) + results = get_person_ids_by_cohort_id(self.team.pk, cohort.id) self.assertEqual(len(results), 2) self.assertIn(str(user1.uuid), results) self.assertIn(str(user3.uuid), results) @@ -468,7 +503,7 @@ def test_insert_by_distinct_id_or_email(self): cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True) cohort.insert_users_by_list(["1", "123"]) cohort = Cohort.objects.get() - results = get_person_ids_by_cohort_id(self.team, cohort.id) + results = get_person_ids_by_cohort_id(self.team.pk, cohort.id) self.assertEqual(len(results), 2) self.assertEqual(cohort.is_calculating, False) @@ -483,12 +518,12 @@ def test_insert_by_distinct_id_or_email(self): #  If we accidentally call calculate_people it shouldn't erase people cohort.calculate_people_ch(pending_version=0) - results = get_person_ids_by_cohort_id(self.team, cohort.id) + results = get_person_ids_by_cohort_id(self.team.pk, cohort.id) self.assertEqual(len(results), 3) # if we add people again, don't increase the number of people in cohort cohort.insert_users_by_list(["123"]) - results = get_person_ids_by_cohort_id(self.team, cohort.id) + results = get_person_ids_by_cohort_id(self.team.pk, cohort.id) self.assertEqual(len(results), 3) @snapshot_clickhouse_insert_cohortpeople_queries @@ -1370,3 +1405,45 @@ def test_cohort_versioning(self): # Should have p1 in this cohort even if version is different results = self._get_cohortpeople(cohort1) self.assertEqual(len(results), 1) + + def test_calculate_people_ch_in_multiteam_project(self): + # Create another team in the same project + team2 = Team.objects.create(organization=self.organization, project=self.team.project) + + # Create people in team 1 + _person1_team1 = _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"$some_prop": "else"}, + ) + person2_team1 = _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"$some_prop": "something"}, + ) + # Create people in team 2 with same property + person1_team2 = _create_person( + team_id=team2.pk, + distinct_ids=["person1_team2"], + properties={"$some_prop": "something"}, + ) + _person2_team2 = _create_person( + team_id=team2.pk, + distinct_ids=["person2_team2"], + properties={"$some_prop": "else"}, + ) + # Create cohort in team 2 (but same project as team 1) + shared_cohort = Cohort.objects.create( + team=team2, + groups=[{"properties": [{"key": "$some_prop", "value": "something", "type": "person"}]}], + name="shared cohort", + ) + # Calculate cohort + shared_cohort.calculate_people_ch(pending_version=0) + + # Verify shared_cohort is now calculated for both teams + results_team1 = self._get_cohortpeople(shared_cohort, team_id=self.team.pk) + results_team2 = self._get_cohortpeople(shared_cohort, team_id=team2.pk) + + self.assertCountEqual([r[0] for r in results_team1], [person2_team1.uuid]) + self.assertCountEqual([r[0] for r in results_team2], [person1_team2.uuid]) diff --git a/ee/clickhouse/queries/event_query.py b/ee/clickhouse/queries/event_query.py index 3f7857fd9a970..64f08da69d6bf 100644 --- a/ee/clickhouse/queries/event_query.py +++ b/ee/clickhouse/queries/event_query.py @@ -7,7 +7,6 @@ from posthog.models.filters.path_filter import PathFilter from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter from posthog.models.filters.retention_filter import RetentionFilter -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.property import PropertyName from posthog.models.team import Team @@ -25,7 +24,6 @@ def __init__( PathFilter, RetentionFilter, StickinessFilter, - SessionRecordingsFilter, PropertiesTimelineFilter, ], team: Team, diff --git a/ee/clickhouse/queries/funnels/funnel_correlation.py b/ee/clickhouse/queries/funnels/funnel_correlation.py index 3e5b69005d689..0b909c84b398e 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation.py @@ -13,7 +13,7 @@ from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery -from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns +from posthog.clickhouse.materialized_columns import get_materialized_column_for_property from posthog.constants import ( AUTOCAPTURE_EVENT, TREND_FILTER_TYPE_ACTIONS, @@ -156,8 +156,6 @@ def properties_to_include(self) -> list[str]: ): # When dealing with properties, make sure funnel response comes with properties # so we don't have to join on persons/groups to get these properties again - mat_event_cols = get_enabled_materialized_columns("events") - for property_name in cast(list, self._filter.correlation_property_names): if self._filter.aggregation_group_type_index is not None: continue # We don't support group properties on events at this time @@ -165,10 +163,11 @@ def properties_to_include(self) -> list[str]: if "$all" == property_name: return [f"person_properties"] - possible_mat_col = mat_event_cols.get((property_name, "person_properties")) - - if possible_mat_col is not None: - props_to_include.append(possible_mat_col) + possible_mat_col = get_materialized_column_for_property( + "events", "person_properties", property_name + ) + if possible_mat_col is not None and not possible_mat_col.is_nullable: + props_to_include.append(possible_mat_col.name) else: props_to_include.append(f"person_properties") diff --git a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py index 4617ffde3c2d5..c6954e15eed9b 100644 --- a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py +++ b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py @@ -251,6 +251,7 @@ def test_create_funnel_correlation_cohort(self, _insert_cohort_from_insight_filt "funnel_correlation_person_entity": "{'id': 'positively_related', 'type': 'events'}", "funnel_correlation_person_converted": "TrUe", }, + self.team.pk, ) insert_cohort_from_insight_filter(cohort_id, params) diff --git a/ee/clickhouse/queries/related_actors_query.py b/ee/clickhouse/queries/related_actors_query.py index 4b6198c222710..99817998d7119 100644 --- a/ee/clickhouse/queries/related_actors_query.py +++ b/ee/clickhouse/queries/related_actors_query.py @@ -41,7 +41,7 @@ def __init__( def run(self) -> list[SerializedActor]: results: list[SerializedActor] = [] results.extend(self._query_related_people()) - for group_type_mapping in GroupTypeMapping.objects.filter(team_id=self.team.pk): + for group_type_mapping in GroupTypeMapping.objects.filter(project_id=self.team.project_id): results.extend(self._query_related_groups(group_type_mapping.group_type_index)) return results diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index 391fc1a6aa5ab..b8444d819bafa 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -429,8 +429,8 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg {"key": "test", "name": "Test Variant", "rollout_percentage": 50}, ] - filters = { - "groups": [{"properties": properties, "rollout_percentage": 100}], + feature_flag_filters = { + "groups": feature_flag.filters.get("groups", []), "multivariate": {"variants": variants or default_variants}, "aggregation_group_type_index": aggregation_group_type_index, "holdout_groups": holdout_groups, @@ -438,7 +438,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg existing_flag_serializer = FeatureFlagSerializer( feature_flag, - data={"filters": filters}, + data={"filters": feature_flag_filters}, partial=True, context=self.context, ) diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index 4970a770854a2..be692dc597525 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -35,7 +35,9 @@ class GroupsTypesViewSet(TeamAndOrgViewSetMixin, mixins.ListModelMixin, viewsets @action(detail=False, methods=["PATCH"], name="Update group types metadata") def update_metadata(self, request: request.Request, *args, **kwargs): for row in cast(list[dict], request.data): - instance = GroupTypeMapping.objects.get(team=self.team, group_type_index=row["group_type_index"]) + instance = GroupTypeMapping.objects.get( + project_id=self.team.project_id, group_type_index=row["group_type_index"] + ) serializer = self.get_serializer(instance, data=row) serializer.is_valid(raise_exception=True) serializer.save() diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index a5b7547779592..983cdf00b5aa0 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -1,17 +1,5 @@ # serializer version: 1 # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -27,7 +15,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -106,19 +94,86 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -134,6 +189,85 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 ''' /* user_id:0 request:_snapshot_ */ @@ -213,19 +347,181 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants + ''' + /* user_id:0 request:_snapshot_ */ + SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -241,7 +537,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -276,13 +572,13 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop FROM (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, if(event = '$pageview', 1, 0) as step_0, if(step_0 = 1, timestamp, null) as latest_0, @@ -320,34 +616,6 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 - ''' - /* user_id:0 request:_snapshot_ */ - SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, - count(*) as count - FROM events e - WHERE team_id = 99999 - AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 ''' /* user_id:0 request:_snapshot_ */ @@ -428,18 +696,6 @@ ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -458,7 +714,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -506,7 +762,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -526,7 +782,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -590,19 +846,71 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -619,7 +927,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -665,7 +973,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -683,7 +991,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ''' /* user_id:0 request:_snapshot_ */ SELECT [now()] AS date, @@ -692,19 +1000,16 @@ LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -721,7 +1026,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -767,7 +1072,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -785,7 +1090,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -847,19 +1152,69 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 07:00:00', 'US/Pacific')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 02:10:00', 'US/Pacific')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'US/Pacific')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -877,7 +1232,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -924,7 +1279,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -942,6 +1297,68 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.4 ''' /* user_id:0 request:_snapshot_ */ diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index aa6d791c6d3d0..a4c8bf9f3eb13 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -1753,6 +1753,114 @@ def test_create_draft_experiment_without_filters(self) -> None: self.assertEqual(response.json()["name"], "Test Experiment") self.assertEqual(response.json()["feature_flag_key"], ff_key) + def test_feature_flag_and_experiment_sync(self): + # Create an experiment with control and test variants + response = self.client.post( + f"/api/projects/{self.team.id}/experiments/", + { + "name": "Test Experiment", + "description": "My test experiment", + "feature_flag_key": "experiment-test-flag", + "parameters": { + "feature_flag_variants": [ + {"key": "control", "name": "Control Group", "rollout_percentage": 50}, + {"key": "test", "name": "Test Variant", "rollout_percentage": 50}, + ] + }, + "filters": {"insight": "TRENDS", "events": [{"order": 0, "id": "$pageview"}]}, + }, + ) + + self.assertEqual(response.status_code, 201) + experiment_id = response.json()["id"] + feature_flag_id = response.json()["feature_flag"]["id"] + + # Fetch the FeatureFlag object + feature_flag = FeatureFlag.objects.get(id=feature_flag_id) + + variants = feature_flag.filters["multivariate"]["variants"] + + # Verify that the variants are correctly populated + self.assertEqual(len(variants), 2) + + self.assertEqual(variants[0]["key"], "control") + self.assertEqual(variants[0]["name"], "Control Group") + self.assertEqual(variants[0]["rollout_percentage"], 50) + + self.assertEqual(variants[1]["key"], "test") + self.assertEqual(variants[1]["name"], "Test Variant") + self.assertEqual(variants[1]["rollout_percentage"], 50) + + # Change the rollout percentages and groups of the feature flag + response = self.client.patch( + f"/api/projects/{self.team.id}/feature_flags/{feature_flag_id}", + { + "filters": { + "groups": [ + {"properties": [], "rollout_percentage": 99}, + {"properties": [], "rollout_percentage": 1}, + ], + "payloads": {}, + "multivariate": { + "variants": [ + {"key": "control", "rollout_percentage": 10}, + {"key": "test", "rollout_percentage": 90}, + ] + }, + "aggregation_group_type_index": 1, + } + }, + ) + + # Verify that Experiment.parameters.feature_flag_variants reflects the updated FeatureFlag.filters.multivariate.variants + experiment = Experiment.objects.get(id=experiment_id) + self.assertEqual( + experiment.parameters["feature_flag_variants"], + [{"key": "control", "rollout_percentage": 10}, {"key": "test", "rollout_percentage": 90}], + ) + self.assertEqual(experiment.parameters["aggregation_group_type_index"], 1) + + # Update the experiment with an unrelated change + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{experiment_id}", + {"name": "Updated Test Experiment"}, + ) + + # Verify that the feature flag variants and groups remain unchanged + feature_flag = FeatureFlag.objects.get(id=feature_flag_id) + self.assertEqual( + feature_flag.filters["multivariate"]["variants"], + [{"key": "control", "rollout_percentage": 10}, {"key": "test", "rollout_percentage": 90}], + ) + self.assertEqual( + feature_flag.filters["groups"], + [{"properties": [], "rollout_percentage": 99}, {"properties": [], "rollout_percentage": 1}], + ) + + # Test removing aggregation_group_type_index + response = self.client.patch( + f"/api/projects/{self.team.id}/feature_flags/{feature_flag_id}", + { + "filters": { + "groups": [ + {"properties": [], "rollout_percentage": 99}, + {"properties": [], "rollout_percentage": 1}, + ], + "payloads": {}, + "multivariate": { + "variants": [ + {"key": "control", "rollout_percentage": 10}, + {"key": "test", "rollout_percentage": 90}, + ] + }, + } + }, + ) + + # Verify that aggregation_group_type_index is removed from experiment parameters + experiment = Experiment.objects.get(id=experiment_id) + self.assertNotIn("aggregation_group_type_index", experiment.parameters) + class TestExperimentAuxiliaryEndpoints(ClickhouseTestMixin, APILicensedTest): def _generate_experiment(self, start_date="2024-01-01T10:23", extra_parameters=None): diff --git a/ee/clickhouse/views/test/test_clickhouse_path_person.py b/ee/clickhouse/views/test/test_clickhouse_path_person.py index 48fc8a2475c06..597aa6dffc5e2 100644 --- a/ee/clickhouse/views/test/test_clickhouse_path_person.py +++ b/ee/clickhouse/views/test/test_clickhouse_path_person.py @@ -97,6 +97,7 @@ def test_create_paths_cohort(self, _insert_cohort_from_insight_filter): "date_from": "2021-05-01", "date_to": "2021-05-10", }, + self.team.pk, ) insert_cohort_from_insight_filter(cohort_id, params) diff --git a/ee/hogai/assistant.py b/ee/hogai/assistant.py index 77b1c2c050008..3a296ba9ce7d6 100644 --- a/ee/hogai/assistant.py +++ b/ee/hogai/assistant.py @@ -1,9 +1,12 @@ +import json from collections.abc import AsyncGenerator, Generator, Iterator from functools import partial -from typing import Any, Literal, Optional, TypedDict, TypeGuard, Union +from typing import Any, Optional +from uuid import uuid4 from asgiref.sync import sync_to_async from langchain_core.messages import AIMessageChunk +from langchain_core.runnables.config import RunnableConfig from langfuse.callback import CallbackHandler from langgraph.graph.state import CompiledStateGraph from pydantic import BaseModel @@ -17,7 +20,19 @@ from ee.hogai.trends.nodes import ( TrendsGeneratorNode, ) -from ee.hogai.utils import AssistantNodeName, AssistantState, Conversation +from ee.hogai.utils.state import ( + GraphMessageUpdateTuple, + GraphTaskStartedUpdateTuple, + GraphValueUpdateTuple, + is_message_update, + is_state_update, + is_task_started_update, + is_value_update, + validate_state_update, + validate_value_update, +) +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState +from ee.models import Conversation from posthog.event_usage import report_user_action from posthog.models import Team, User from posthog.schema import ( @@ -40,42 +55,6 @@ langfuse_handler = None -def is_value_update(update: list[Any]) -> TypeGuard[tuple[Literal["values"], dict[AssistantNodeName, AssistantState]]]: - """ - Transition between nodes. - """ - return len(update) == 2 and update[0] == "updates" - - -class LangGraphState(TypedDict): - langgraph_node: AssistantNodeName - - -def is_message_update( - update: list[Any], -) -> TypeGuard[tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]]]: - """ - Streaming of messages. Returns a partial state. - """ - return len(update) == 2 and update[0] == "messages" - - -def is_state_update(update: list[Any]) -> TypeGuard[tuple[Literal["updates"], AssistantState]]: - """ - Update of the state. - """ - return len(update) == 2 and update[0] == "values" - - -def is_task_started_update( - update: list[Any], -) -> TypeGuard[tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]]]: - """ - Streaming of messages. Returns a partial state. - """ - return len(update) == 2 and update[0] == "debug" and update[1]["type"] == "task" - - VISUALIZATION_NODES: dict[AssistantNodeName, type[SchemaGeneratorNode]] = { AssistantNodeName.TRENDS_GENERATOR: TrendsGeneratorNode, AssistantNodeName.FUNNEL_GENERATOR: FunnelGeneratorNode, @@ -87,13 +66,25 @@ class Assistant: _graph: CompiledStateGraph _user: Optional[User] _conversation: Conversation + _latest_message: HumanMessage + _state: Optional[AssistantState] - def __init__(self, team: Team, conversation: Conversation, user: Optional[User] = None): + def __init__( + self, + team: Team, + conversation: Conversation, + new_message: HumanMessage, + user: Optional[User] = None, + is_new_conversation: bool = False, + ): self._team = team self._user = user self._conversation = conversation + self._latest_message = new_message.model_copy(deep=True, update={"id": str(uuid4())}) + self._is_new_conversation = is_new_conversation self._graph = AssistantGraph(team).compile_full_graph() self._chunks = AIMessageChunk(content="") + self._state = None def stream(self): if SERVER_GATEWAY_INTERFACE == "ASGI": @@ -110,15 +101,19 @@ async def _astream(self) -> AsyncGenerator[str, None]: break def _stream(self) -> Generator[str, None, None]: - callbacks = [langfuse_handler] if langfuse_handler else [] + state = self._init_or_update_state() + config = self._get_config() + generator: Iterator[Any] = self._graph.stream( - self._initial_state, - config={"recursion_limit": 24, "callbacks": callbacks}, - stream_mode=["messages", "values", "updates", "debug"], + state, config=config, stream_mode=["messages", "values", "updates", "debug"] ) - # Send a chunk to establish the connection avoiding the worker's timeout. - yield self._serialize_message(AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.ACK)) + # Assign the conversation id to the client. + if self._is_new_conversation: + yield self._serialize_conversation() + + # Send the last message with the initialized id. + yield self._serialize_message(self._latest_message) try: last_viz_message = None @@ -127,7 +122,15 @@ def _stream(self) -> Generator[str, None, None]: if isinstance(message, VisualizationMessage): last_viz_message = message yield self._serialize_message(message) - self._report_conversation(last_viz_message) + + # Check if the assistant has requested help. + state = self._graph.get_state(config) + if state.next: + yield self._serialize_message( + AssistantMessage(content=state.tasks[0].interrupts[0].value, id=str(uuid4())) + ) + else: + self._report_conversation_state(last_viz_message) except: # This is an unhandled error, so we just stop further generation at this point yield self._serialize_message(FailureMessage()) @@ -135,8 +138,34 @@ def _stream(self) -> Generator[str, None, None]: @property def _initial_state(self) -> AssistantState: - messages = [message.root for message in self._conversation.messages] - return {"messages": messages, "intermediate_steps": None, "plan": None} + return AssistantState(messages=[self._latest_message], start_id=self._latest_message.id) + + def _get_config(self) -> RunnableConfig: + callbacks = [langfuse_handler] if langfuse_handler else [] + config: RunnableConfig = { + "recursion_limit": 24, + "callbacks": callbacks, + "configurable": {"thread_id": self._conversation.id}, + } + return config + + def _init_or_update_state(self): + config = self._get_config() + snapshot = self._graph.get_state(config) + if snapshot.next: + saved_state = validate_state_update(snapshot.values) + self._state = saved_state + if saved_state.intermediate_steps: + intermediate_steps = saved_state.intermediate_steps.copy() + intermediate_steps[-1] = (intermediate_steps[-1][0], self._latest_message.content) + self._graph.update_state( + config, + PartialAssistantState(messages=[self._latest_message], intermediate_steps=intermediate_steps), + ) + return None + initial_state = self._initial_state + self._state = initial_state + return initial_state def _node_to_reasoning_message( self, node_name: AssistantNodeName, input: AssistantState @@ -152,7 +181,7 @@ def _node_to_reasoning_message( ): substeps: list[str] = [] if input: - if intermediate_steps := input.get("intermediate_steps"): + if intermediate_steps := input.intermediate_steps: for action, _ in intermediate_steps: match action.tool: case "retrieve_event_properties": @@ -178,42 +207,65 @@ def _node_to_reasoning_message( return None def _process_update(self, update: Any) -> BaseModel | None: - if is_value_update(update): - _, state_update = update + if is_state_update(update): + _, new_state = update + self._state = validate_state_update(new_state) + elif is_value_update(update) and (new_message := self._process_value_update(update)): + return new_message + elif is_message_update(update) and (new_message := self._process_message_update(update)): + return new_message + elif is_task_started_update(update) and (new_message := self._process_task_started_update(update)): + return new_message + return None - if AssistantNodeName.ROUTER in state_update and "messages" in state_update[AssistantNodeName.ROUTER]: - return state_update[AssistantNodeName.ROUTER]["messages"][0] - elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): - # Reset chunks when schema validation fails. - self._chunks = AIMessageChunk(content="") + def _process_value_update(self, update: GraphValueUpdateTuple) -> BaseModel | None: + _, maybe_state_update = update + state_update = validate_value_update(maybe_state_update) + + if node_val := state_update.get(AssistantNodeName.ROUTER): + if isinstance(node_val, PartialAssistantState) and node_val.messages: + return node_val.messages[0] + elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): + # Reset chunks when schema validation fails. + self._chunks = AIMessageChunk(content="") - node_name = intersected_nodes.pop() - if "messages" in state_update[node_name]: - return state_update[node_name]["messages"][0] - elif state_update[node_name].get("intermediate_steps", []): - return AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) - elif AssistantNodeName.SUMMARIZER in state_update: + node_name = intersected_nodes.pop() + node_val = state_update[node_name] + if not isinstance(node_val, PartialAssistantState): + return None + if node_val.messages: + return node_val.messages[0] + elif node_val.intermediate_steps: + return AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) + elif node_val := state_update.get(AssistantNodeName.SUMMARIZER): + if isinstance(node_val, PartialAssistantState) and node_val.messages: self._chunks = AIMessageChunk(content="") - return state_update[AssistantNodeName.SUMMARIZER]["messages"][0] - elif is_message_update(update): - langchain_message, langgraph_state = update[1] - if isinstance(langchain_message, AIMessageChunk): - if langgraph_state["langgraph_node"] in VISUALIZATION_NODES.keys(): - self._chunks += langchain_message # type: ignore - parsed_message = VISUALIZATION_NODES[langgraph_state["langgraph_node"]].parse_output( - self._chunks.tool_calls[0]["args"] - ) - if parsed_message: - return VisualizationMessage(answer=parsed_message.query) - elif langgraph_state["langgraph_node"] == AssistantNodeName.SUMMARIZER: - self._chunks += langchain_message # type: ignore - return AssistantMessage(content=self._chunks.content) - elif is_task_started_update(update): - _, task_update = update - node_name = task_update["payload"]["name"] # type: ignore - node_input = task_update["payload"]["input"] # type: ignore - if reasoning_message := self._node_to_reasoning_message(node_name, node_input): - return reasoning_message + return node_val.messages[0] + + return None + + def _process_message_update(self, update: GraphMessageUpdateTuple) -> BaseModel | None: + langchain_message, langgraph_state = update[1] + if isinstance(langchain_message, AIMessageChunk): + if langgraph_state["langgraph_node"] in VISUALIZATION_NODES.keys(): + self._chunks += langchain_message # type: ignore + parsed_message = VISUALIZATION_NODES[langgraph_state["langgraph_node"]].parse_output( + self._chunks.tool_calls[0]["args"] + ) + if parsed_message: + initiator_id = self._state.start_id if self._state is not None else None + return VisualizationMessage(answer=parsed_message.query, initiator=initiator_id) + elif langgraph_state["langgraph_node"] == AssistantNodeName.SUMMARIZER: + self._chunks += langchain_message # type: ignore + return AssistantMessage(content=self._chunks.content) + return None + + def _process_task_started_update(self, update: GraphTaskStartedUpdateTuple) -> BaseModel | None: + _, task_update = update + node_name = task_update["payload"]["name"] # type: ignore + node_input = task_update["payload"]["input"] # type: ignore + if reasoning_message := self._node_to_reasoning_message(node_name, node_input): + return reasoning_message return None def _serialize_message(self, message: BaseModel) -> str: @@ -224,9 +276,15 @@ def _serialize_message(self, message: BaseModel) -> str: output += f"event: {AssistantEventType.MESSAGE}\n" return output + f"data: {message.model_dump_json(exclude_none=True)}\n\n" - def _report_conversation(self, message: Optional[VisualizationMessage]): - human_message = self._conversation.messages[-1].root - if self._user and message and isinstance(human_message, HumanMessage): + def _serialize_conversation(self) -> str: + output = f"event: {AssistantEventType.CONVERSATION}\n" + json_conversation = json.dumps({"id": str(self._conversation.id)}) + output += f"data: {json_conversation}\n\n" + return output + + def _report_conversation_state(self, message: Optional[VisualizationMessage]): + human_message = self._latest_message + if self._user and message: report_user_action( self._user, "chat with ai", diff --git a/ee/hogai/django_checkpoint/__init__.py b/ee/hogai/django_checkpoint/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/django_checkpoint/checkpointer.py b/ee/hogai/django_checkpoint/checkpointer.py new file mode 100644 index 0000000000000..78817dca9df76 --- /dev/null +++ b/ee/hogai/django_checkpoint/checkpointer.py @@ -0,0 +1,309 @@ +import json +import random +import threading +from collections.abc import Iterable, Iterator, Sequence +from typing import Any, Optional, cast + +from django.db import transaction +from django.db.models import Q +from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.base import ( + WRITES_IDX_MAP, + BaseCheckpointSaver, + ChannelVersions, + Checkpoint, + CheckpointMetadata, + CheckpointTuple, + PendingWrite, + get_checkpoint_id, +) +from langgraph.checkpoint.serde.jsonplus import JsonPlusSerializer +from langgraph.checkpoint.serde.types import ChannelProtocol + +from ee.models.assistant import ConversationCheckpoint, ConversationCheckpointBlob, ConversationCheckpointWrite + + +class DjangoCheckpointer(BaseCheckpointSaver[str]): + jsonplus_serde = JsonPlusSerializer() + _lock: threading.Lock + + def __init__(self, *args): + super().__init__(*args) + self._lock = threading.Lock() + + def _load_writes(self, writes: Sequence[ConversationCheckpointWrite]) -> list[PendingWrite]: + return ( + [ + ( + str(checkpoint_write.task_id), + checkpoint_write.channel, + self.serde.loads_typed((checkpoint_write.type, checkpoint_write.blob)), + ) + for checkpoint_write in writes + if checkpoint_write.type is not None and checkpoint_write.blob is not None + ] + if writes + else [] + ) + + def _load_json(self, obj: Any): + return self.jsonplus_serde.loads(self.jsonplus_serde.dumps(obj)) + + def _dump_json(self, obj: Any) -> dict[str, Any]: + serialized_metadata = self.jsonplus_serde.dumps(obj) + # NOTE: we're using JSON serializer (not msgpack), so we need to remove null characters before writing + nulls_removed = serialized_metadata.decode().replace("\\u0000", "") + return json.loads(nulls_removed) + + def _get_checkpoint_qs( + self, + config: Optional[RunnableConfig], + filter: Optional[dict[str, Any]], + before: Optional[RunnableConfig], + ): + query = Q() + + # construct predicate for config filter + if config and "configurable" in config: + thread_id = config["configurable"].get("thread_id") + query &= Q(thread_id=thread_id) + checkpoint_ns = config["configurable"].get("checkpoint_ns") + if checkpoint_ns is not None: + query &= Q(checkpoint_ns=checkpoint_ns) + if checkpoint_id := get_checkpoint_id(config): + query &= Q(id=checkpoint_id) + + # construct predicate for metadata filter + if filter: + query &= Q(metadata__contains=filter) + + # construct predicate for `before` + if before is not None: + query &= Q(id__lt=get_checkpoint_id(before)) + + return ConversationCheckpoint.objects.filter(query).order_by("-id") + + def _get_checkpoint_channel_values( + self, checkpoint: ConversationCheckpoint + ) -> Iterable[ConversationCheckpointBlob]: + if not checkpoint.checkpoint: + return [] + loaded_checkpoint = self._load_json(checkpoint.checkpoint) + if "channel_versions" not in loaded_checkpoint: + return [] + query = Q() + for channel, version in loaded_checkpoint["channel_versions"].items(): + query |= Q(channel=channel, version=version) + return checkpoint.blobs.filter(query) + + def list( + self, + config: Optional[RunnableConfig], + *, + filter: Optional[dict[str, Any]] = None, + before: Optional[RunnableConfig] = None, + limit: Optional[int] = None, + ) -> Iterator[CheckpointTuple]: + """List checkpoints from the database. + + This method retrieves a list of checkpoint tuples from the Postgres database based + on the provided config. The checkpoints are ordered by checkpoint ID in descending order (newest first). + + Args: + config (RunnableConfig): The config to use for listing the checkpoints. + filter (Optional[Dict[str, Any]]): Additional filtering criteria for metadata. Defaults to None. + before (Optional[RunnableConfig]): If provided, only checkpoints before the specified checkpoint ID are returned. Defaults to None. + limit (Optional[int]): The maximum number of checkpoints to return. Defaults to None. + + Yields: + Iterator[CheckpointTuple]: An iterator of checkpoint tuples. + """ + qs = self._get_checkpoint_qs(config, filter, before) + if limit: + qs = qs[:limit] + + for checkpoint in qs: + channel_values = self._get_checkpoint_channel_values(checkpoint) + loaded_checkpoint: Checkpoint = self._load_json(checkpoint.checkpoint) + + checkpoint_dict: Checkpoint = { + **loaded_checkpoint, + "pending_sends": [ + self.serde.loads_typed((checkpoint_write.type, checkpoint_write.blob)) + for checkpoint_write in checkpoint.pending_sends + ], + "channel_values": { + checkpoint_blob.channel: self.serde.loads_typed((checkpoint_blob.type, checkpoint_blob.blob)) + for checkpoint_blob in channel_values + if checkpoint_blob.type is not None + and checkpoint_blob.type != "empty" + and checkpoint_blob.blob is not None + }, + } + + yield CheckpointTuple( + { + "configurable": { + "thread_id": checkpoint.thread_id, + "checkpoint_ns": checkpoint.checkpoint_ns, + "checkpoint_id": checkpoint.id, + } + }, + checkpoint_dict, + self._load_json(checkpoint.metadata), + ( + { + "configurable": { + "thread_id": checkpoint.thread_id, + "checkpoint_ns": checkpoint.checkpoint_ns, + "checkpoint_id": checkpoint.parent_checkpoint_id, + } + } + if checkpoint.parent_checkpoint + else None + ), + self._load_writes(checkpoint.pending_writes), + ) + + def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: + """Get a checkpoint tuple from the database. + + This method retrieves a checkpoint tuple from the Postgres database based on the + provided config. If the config contains a "checkpoint_id" key, the checkpoint with + the matching thread ID and timestamp is retrieved. Otherwise, the latest checkpoint + for the given thread ID is retrieved. + + Args: + config (RunnableConfig): The config to use for retrieving the checkpoint. + + Returns: + Optional[CheckpointTuple]: The retrieved checkpoint tuple, or None if no matching checkpoint was found. + """ + return next(self.list(config), None) + + def put( + self, + config: RunnableConfig, + checkpoint: Checkpoint, + metadata: CheckpointMetadata, + new_versions: ChannelVersions, + ) -> RunnableConfig: + """Save a checkpoint to the database. + + This method saves a checkpoint to the Postgres database. The checkpoint is associated + with the provided config and its parent config (if any). + + Args: + config (RunnableConfig): The config to associate with the checkpoint. + checkpoint (Checkpoint): The checkpoint to save. + metadata (CheckpointMetadata): Additional metadata to save with the checkpoint. + new_versions (ChannelVersions): New channel versions as of this write. + + Returns: + RunnableConfig: Updated configuration after storing the checkpoint. + """ + configurable = config["configurable"] + thread_id: str = configurable["thread_id"] + checkpoint_id = get_checkpoint_id(config) + checkpoint_ns: str | None = configurable.get("checkpoint_ns") or "" + + checkpoint_copy = cast(dict[str, Any], checkpoint.copy()) + channel_values = checkpoint_copy.pop("channel_values", {}) + + next_config: RunnableConfig = { + "configurable": { + "thread_id": thread_id, + "checkpoint_ns": checkpoint_ns, + "checkpoint_id": checkpoint["id"], + } + } + + with self._lock, transaction.atomic(): + updated_checkpoint, _ = ConversationCheckpoint.objects.update_or_create( + id=checkpoint["id"], + thread_id=thread_id, + checkpoint_ns=checkpoint_ns, + defaults={ + "parent_checkpoint_id": checkpoint_id, + "checkpoint": self._dump_json({**checkpoint_copy, "pending_sends": []}), + "metadata": self._dump_json(metadata), + }, + ) + + blobs = [] + for channel, version in new_versions.items(): + type, blob = ( + self.serde.dumps_typed(channel_values[channel]) if channel in channel_values else ("empty", None) + ) + blobs.append( + ConversationCheckpointBlob( + checkpoint=updated_checkpoint, + channel=channel, + version=str(version), + type=type, + blob=blob, + ) + ) + + ConversationCheckpointBlob.objects.bulk_create(blobs, ignore_conflicts=True) + return next_config + + def put_writes( + self, + config: RunnableConfig, + writes: Sequence[tuple[str, Any]], + task_id: str, + ) -> None: + """Store intermediate writes linked to a checkpoint. + + This method saves intermediate writes associated with a checkpoint to the Postgres database. + + Args: + config (RunnableConfig): Configuration of the related checkpoint. + writes (List[Tuple[str, Any]]): List of writes to store. + task_id (str): Identifier for the task creating the writes. + """ + configurable = config["configurable"] + thread_id: str = configurable["thread_id"] + checkpoint_id = get_checkpoint_id(config) + checkpoint_ns: str | None = configurable.get("checkpoint_ns") or "" + + with self._lock, transaction.atomic(): + # `put_writes` and `put` are concurrently called without guaranteeing the call order + # so we need to ensure the checkpoint is created before creating writes. + # Thread.lock() will prevent race conditions though to the same checkpoints within a single pod. + checkpoint, _ = ConversationCheckpoint.objects.get_or_create( + id=checkpoint_id, thread_id=thread_id, checkpoint_ns=checkpoint_ns + ) + + writes_to_create = [] + for idx, (channel, value) in enumerate(writes): + type, blob = self.serde.dumps_typed(value) + writes_to_create.append( + ConversationCheckpointWrite( + checkpoint=checkpoint, + task_id=task_id, + idx=idx, + channel=channel, + type=type, + blob=blob, + ) + ) + + ConversationCheckpointWrite.objects.bulk_create( + writes_to_create, + update_conflicts=all(w[0] in WRITES_IDX_MAP for w in writes), + unique_fields=["checkpoint", "task_id", "idx"], + update_fields=["channel", "type", "blob"], + ) + + def get_next_version(self, current: Optional[str | int], channel: ChannelProtocol) -> str: + if current is None: + current_v = 0 + elif isinstance(current, int): + current_v = current + else: + current_v = int(current.split(".")[0]) + next_v = current_v + 1 + next_h = random.random() + return f"{next_v:032}.{next_h:016}" diff --git a/ee/hogai/django_checkpoint/test/test_checkpointer.py b/ee/hogai/django_checkpoint/test/test_checkpointer.py new file mode 100644 index 0000000000000..2f8fd7f4a60ed --- /dev/null +++ b/ee/hogai/django_checkpoint/test/test_checkpointer.py @@ -0,0 +1,274 @@ +# type: ignore + +from typing import Any, TypedDict + +from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.base import ( + Checkpoint, + CheckpointMetadata, + create_checkpoint, + empty_checkpoint, +) +from langgraph.checkpoint.base.id import uuid6 +from langgraph.errors import NodeInterrupt +from langgraph.graph import END, START +from langgraph.graph.state import CompiledStateGraph, StateGraph + +from ee.hogai.django_checkpoint.checkpointer import DjangoCheckpointer +from ee.models.assistant import ( + Conversation, + ConversationCheckpoint, + ConversationCheckpointBlob, + ConversationCheckpointWrite, +) +from posthog.test.base import NonAtomicBaseTest + + +class TestDjangoCheckpointer(NonAtomicBaseTest): + CLASS_DATA_LEVEL_SETUP = False + + def _build_graph(self, checkpointer: DjangoCheckpointer): + class State(TypedDict): + val: int + + graph = StateGraph(State) + + def handle_node1(state: State) -> State: + if state["val"] == 1: + raise NodeInterrupt("test") + return {"val": state["val"] + 1} + + graph.add_node("node1", handle_node1) + graph.add_node("node2", lambda state: state) + + graph.add_edge(START, "node1") + graph.add_edge("node1", "node2") + graph.add_edge("node2", END) + + return graph.compile(checkpointer=checkpointer) + + def test_saver(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + thread2 = Conversation.objects.create(user=self.user, team=self.team) + + config_1: RunnableConfig = { + "configurable": { + "thread_id": thread1.id, + "checkpoint_ns": "", + } + } + chkpnt_1: Checkpoint = empty_checkpoint() + + config_2: RunnableConfig = { + "configurable": { + "thread_id": thread2.id, + "checkpoint_ns": "", + } + } + chkpnt_2: Checkpoint = create_checkpoint(chkpnt_1, {}, 1) + + config_3: RunnableConfig = { + "configurable": { + "thread_id": thread2.id, + "checkpoint_id": chkpnt_2["id"], + "checkpoint_ns": "inner", + } + } + chkpnt_3: Checkpoint = empty_checkpoint() + + metadata_1: CheckpointMetadata = { + "source": "input", + "step": 2, + "writes": {}, + "score": 1, + } + metadata_2: CheckpointMetadata = { + "source": "loop", + "step": 1, + "writes": {"foo": "bar"}, + "score": None, + } + metadata_3: CheckpointMetadata = {} + + test_data = { + "configs": [config_1, config_2, config_3], + "checkpoints": [chkpnt_1, chkpnt_2, chkpnt_3], + "metadata": [metadata_1, metadata_2, metadata_3], + } + + saver = DjangoCheckpointer() + + configs = test_data["configs"] + checkpoints = test_data["checkpoints"] + metadata = test_data["metadata"] + + saver.put(configs[0], checkpoints[0], metadata[0], {}) + saver.put(configs[1], checkpoints[1], metadata[1], {}) + saver.put(configs[2], checkpoints[2], metadata[2], {}) + + # call method / assertions + query_1 = {"source": "input"} # search by 1 key + query_2 = { + "step": 1, + "writes": {"foo": "bar"}, + } # search by multiple keys + query_3: dict[str, Any] = {} # search by no keys, return all checkpoints + query_4 = {"source": "update", "step": 1} # no match + + search_results_1 = list(saver.list(None, filter=query_1)) + assert len(search_results_1) == 1 + assert search_results_1[0].metadata == metadata[0] + + search_results_2 = list(saver.list(None, filter=query_2)) + assert len(search_results_2) == 1 + assert search_results_2[0].metadata == metadata[1] + + search_results_3 = list(saver.list(None, filter=query_3)) + assert len(search_results_3) == 3 + + search_results_4 = list(saver.list(None, filter=query_4)) + assert len(search_results_4) == 0 + + # search by config (defaults to checkpoints across all namespaces) + search_results_5 = list(saver.list({"configurable": {"thread_id": thread2.id}})) + assert len(search_results_5) == 2 + assert { + search_results_5[0].config["configurable"]["checkpoint_ns"], + search_results_5[1].config["configurable"]["checkpoint_ns"], + } == {"", "inner"} + + def test_channel_versions(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + + chkpnt = { + "v": 1, + "ts": "2024-07-31T20:14:19.804150+00:00", + "id": str(uuid6(clock_seq=-2)), + "channel_values": { + "post": "hog", + "node": "node", + }, + "channel_versions": { + "__start__": 2, + "my_key": 3, + "start:node": 3, + "node": 3, + }, + "versions_seen": { + "__input__": {}, + "__start__": {"__start__": 1}, + "node": {"start:node": 2}, + }, + "pending_sends": [], + } + metadata = {"meta": "key"} + + write_config = {"configurable": {"thread_id": thread1.id, "checkpoint_ns": ""}} + read_config = {"configurable": {"thread_id": thread1.id}} + + saver = DjangoCheckpointer() + saver.put(write_config, chkpnt, metadata, {}) + + checkpoint = ConversationCheckpoint.objects.first() + self.assertIsNotNone(checkpoint) + self.assertEqual(checkpoint.thread, thread1) + self.assertEqual(checkpoint.checkpoint_ns, "") + self.assertEqual(str(checkpoint.id), chkpnt["id"]) + self.assertIsNone(checkpoint.parent_checkpoint) + chkpnt.pop("channel_values") + self.assertEqual(checkpoint.checkpoint, chkpnt) + self.assertEqual(checkpoint.metadata, metadata) + + checkpoints = list(saver.list(read_config)) + self.assertEqual(len(checkpoints), 1) + + checkpoint = saver.get(read_config) + self.assertEqual(checkpoint, checkpoints[0].checkpoint) + + def test_put_copies_checkpoint(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + chkpnt = { + "v": 1, + "ts": "2024-07-31T20:14:19.804150+00:00", + "id": str(uuid6(clock_seq=-2)), + "channel_values": { + "post": "hog", + "node": "node", + }, + "channel_versions": { + "__start__": 2, + "my_key": 3, + "start:node": 3, + "node": 3, + }, + "versions_seen": { + "__input__": {}, + "__start__": {"__start__": 1}, + "node": {"start:node": 2}, + }, + "pending_sends": [], + } + metadata = {"meta": "key"} + write_config = {"configurable": {"thread_id": thread1.id, "checkpoint_ns": ""}} + saver = DjangoCheckpointer() + saver.put(write_config, chkpnt, metadata, {}) + self.assertIn("channel_values", chkpnt) + + def test_concurrent_puts_and_put_writes(self): + graph: CompiledStateGraph = self._build_graph(DjangoCheckpointer()) + thread = Conversation.objects.create(user=self.user, team=self.team) + config = {"configurable": {"thread_id": str(thread.id)}} + graph.invoke( + {"val": 0}, + config=config, + ) + self.assertEqual(len(ConversationCheckpoint.objects.all()), 4) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 10) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 6) + + def test_resuming(self): + checkpointer = DjangoCheckpointer() + graph: CompiledStateGraph = self._build_graph(checkpointer) + thread = Conversation.objects.create(user=self.user, team=self.team) + config = {"configurable": {"thread_id": str(thread.id)}} + + graph.invoke( + {"val": 1}, + config=config, + ) + snapshot = graph.get_state(config) + self.assertIsNotNone(snapshot.next) + self.assertEqual(snapshot.tasks[0].interrupts[0].value, "test") + + self.assertEqual(len(ConversationCheckpoint.objects.all()), 2) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 4) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 3) + self.assertEqual(len(list(checkpointer.list(config))), 2) + + latest_checkpoint = ConversationCheckpoint.objects.last() + latest_write = ConversationCheckpointWrite.objects.filter(checkpoint=latest_checkpoint).first() + actual_checkpoint = checkpointer.get_tuple(config) + self.assertIsNotNone(actual_checkpoint) + self.assertIsNotNone(latest_write) + self.assertEqual(len(latest_checkpoint.writes.all()), 1) + blobs = list(latest_checkpoint.blobs.all()) + self.assertEqual(len(blobs), 3) + self.assertEqual(actual_checkpoint.checkpoint["id"], str(latest_checkpoint.id)) + self.assertEqual(len(actual_checkpoint.pending_writes), 1) + self.assertEqual(actual_checkpoint.pending_writes[0][0], str(latest_write.task_id)) + + graph.update_state(config, {"val": 2}) + # add the value update checkpoint + self.assertEqual(len(ConversationCheckpoint.objects.all()), 3) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 6) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 5) + self.assertEqual(len(list(checkpointer.list(config))), 3) + + res = graph.invoke(None, config=config) + self.assertEqual(len(ConversationCheckpoint.objects.all()), 5) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 12) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 9) + self.assertEqual(len(list(checkpointer.list(config))), 5) + self.assertEqual(res, {"val": 3}) + snapshot = graph.get_state(config) + self.assertFalse(snapshot.next) diff --git a/ee/hogai/eval/tests/test_eval_funnel_generator.py b/ee/hogai/eval/tests/test_eval_funnel_generator.py index cd7e93b260ae9..4d7876ca6f73c 100644 --- a/ee/hogai/eval/tests/test_eval_funnel_generator.py +++ b/ee/hogai/eval/tests/test_eval_funnel_generator.py @@ -1,9 +1,11 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName -from posthog.schema import AssistantFunnelsQuery, HumanMessage +from ee.hogai.utils.types import AssistantNodeName, AssistantState +from posthog.schema import AssistantFunnelsQuery, HumanMessage, VisualizationMessage class TestEvalFunnelGenerator(EvalBaseTest): @@ -14,8 +16,11 @@ def _call_node(self, query: str, plan: str) -> AssistantFunnelsQuery: .add_funnel_generator(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)], "plan": plan}) - return state["messages"][-1].answer + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)], plan=plan), + self._get_config(), + ) + return cast(VisualizationMessage, AssistantState.model_validate(state).messages[-1]).answer def test_node_replaces_equals_with_contains(self): query = "what is the conversion rate from a page view to sign up for users with name John?" diff --git a/ee/hogai/eval/tests/test_eval_funnel_planner.py b/ee/hogai/eval/tests/test_eval_funnel_planner.py index 3760961f9bb03..9adbd75e77c6c 100644 --- a/ee/hogai/eval/tests/test_eval_funnel_planner.py +++ b/ee/hogai/eval/tests/test_eval_funnel_planner.py @@ -5,7 +5,7 @@ from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage @@ -40,8 +40,11 @@ def _call_node(self, query): .add_funnel_planner(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)]}) - return state["plan"] + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)]), + self._get_config(), + ) + return AssistantState.model_validate(state).plan or "" def test_basic_funnel(self): query = "what was the conversion from a page view to sign up?" diff --git a/ee/hogai/eval/tests/test_eval_router.py b/ee/hogai/eval/tests/test_eval_router.py index 25a84769dbfc8..c1307e9d40f00 100644 --- a/ee/hogai/eval/tests/test_eval_router.py +++ b/ee/hogai/eval/tests/test_eval_router.py @@ -1,8 +1,10 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage, RouterMessage @@ -15,8 +17,11 @@ def _call_node(self, query: str | list): .compile() ) messages = [HumanMessage(content=query)] if isinstance(query, str) else query - state = graph.invoke({"messages": messages}) - return state["messages"][-1].content + state = graph.invoke( + AssistantState(messages=messages), + self._get_config(), + ) + return cast(RouterMessage, AssistantState.model_validate(state).messages[-1]).content def test_outputs_basic_trends_insight(self): query = "Show the $pageview trend" diff --git a/ee/hogai/eval/tests/test_eval_trends_generator.py b/ee/hogai/eval/tests/test_eval_trends_generator.py index c5341584ca2f7..496bbf0100b51 100644 --- a/ee/hogai/eval/tests/test_eval_trends_generator.py +++ b/ee/hogai/eval/tests/test_eval_trends_generator.py @@ -1,9 +1,11 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName -from posthog.schema import AssistantTrendsQuery, HumanMessage +from ee.hogai.utils.types import AssistantNodeName, AssistantState +from posthog.schema import AssistantTrendsQuery, HumanMessage, VisualizationMessage class TestEvalTrendsGenerator(EvalBaseTest): @@ -14,8 +16,11 @@ def _call_node(self, query: str, plan: str) -> AssistantTrendsQuery: .add_trends_generator(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)], "plan": plan}) - return state["messages"][-1].answer + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)], plan=plan), + self._get_config(), + ) + return cast(VisualizationMessage, AssistantState.model_validate(state).messages[-1]).answer def test_node_replaces_equals_with_contains(self): query = "what is pageview trend for users with name John?" diff --git a/ee/hogai/eval/tests/test_eval_trends_planner.py b/ee/hogai/eval/tests/test_eval_trends_planner.py index e7ea741d03687..d4fbff456a91c 100644 --- a/ee/hogai/eval/tests/test_eval_trends_planner.py +++ b/ee/hogai/eval/tests/test_eval_trends_planner.py @@ -5,7 +5,7 @@ from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage @@ -40,8 +40,11 @@ def _call_node(self, query): .add_trends_planner(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)]}) - return state["plan"] + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)]), + self._get_config(), + ) + return AssistantState.model_validate(state).plan or "" def test_no_excessive_property_filters(self): query = "Show the $pageview trend" diff --git a/ee/hogai/eval/utils.py b/ee/hogai/eval/utils.py index 1e50a75daefa2..6e03c4cfafa9f 100644 --- a/ee/hogai/eval/utils.py +++ b/ee/hogai/eval/utils.py @@ -3,15 +3,25 @@ import pytest from django.test import override_settings from flaky import flaky +from langchain_core.runnables import RunnableConfig +from ee.models.assistant import Conversation from posthog.demo.matrix.manager import MatrixManager from posthog.tasks.demo_create_data import HedgeboxMatrix -from posthog.test.base import BaseTest +from posthog.test.base import NonAtomicBaseTest @pytest.mark.skipif(os.environ.get("DEEPEVAL") != "YES", reason="Only runs for the assistant evaluation") @flaky(max_runs=3, min_passes=1) -class EvalBaseTest(BaseTest): +class EvalBaseTest(NonAtomicBaseTest): + def _get_config(self) -> RunnableConfig: + conversation = Conversation.objects.create(team=self.team, user=self.user) + return { + "configurable": { + "thread_id": conversation.id, + } + } + @classmethod def setUpTestData(cls): super().setUpTestData() diff --git a/ee/hogai/funnels/nodes.py b/ee/hogai/funnels/nodes.py index a55bc223847f2..6f71305e0b796 100644 --- a/ee/hogai/funnels/nodes.py +++ b/ee/hogai/funnels/nodes.py @@ -6,12 +6,12 @@ from ee.hogai.schema_generator.nodes import SchemaGeneratorNode, SchemaGeneratorToolsNode from ee.hogai.schema_generator.utils import SchemaGeneratorOutput from ee.hogai.taxonomy_agent.nodes import TaxonomyAgentPlannerNode, TaxonomyAgentPlannerToolsNode -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import AssistantFunnelsQuery class FunnelPlannerNode(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = FunnelsTaxonomyAgentToolkit(self._team) prompt = ChatPromptTemplate.from_messages( [ @@ -23,7 +23,7 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: class FunnelPlannerToolsNode(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = FunnelsTaxonomyAgentToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) @@ -36,7 +36,7 @@ class FunnelGeneratorNode(SchemaGeneratorNode[AssistantFunnelsQuery]): OUTPUT_MODEL = FunnelsSchemaGeneratorOutput OUTPUT_SCHEMA = FUNNEL_SCHEMA - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", FUNNEL_SYSTEM_PROMPT), diff --git a/ee/hogai/funnels/prompts.py b/ee/hogai/funnels/prompts.py index b2deec894a070..3808809c173a7 100644 --- a/ee/hogai/funnels/prompts.py +++ b/ee/hogai/funnels/prompts.py @@ -12,6 +12,8 @@ {{react_format}} +{{react_human_in_the_loop}} + Below you will find information on how to correctly discover the taxonomy of the user's data. diff --git a/ee/hogai/funnels/test/test_nodes.py b/ee/hogai/funnels/test/test_nodes.py index 5c65b14110599..4f4e9fca0e5d4 100644 --- a/ee/hogai/funnels/test/test_nodes.py +++ b/ee/hogai/funnels/test/test_nodes.py @@ -4,6 +4,7 @@ from langchain_core.runnables import RunnableLambda from ee.hogai.funnels.nodes import FunnelGeneratorNode, FunnelsSchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantFunnelsQuery, HumanMessage, @@ -15,6 +16,7 @@ @override_settings(IN_UNIT_TESTING=True) class TestFunnelsGeneratorNode(ClickhouseTestMixin, APIBaseTest): def setUp(self): + super().setUp() self.schema = AssistantFunnelsQuery(series=[]) def test_node_runs(self): @@ -24,16 +26,13 @@ def test_node_runs(self): lambda _: FunnelsSchemaGeneratorOutput(query=self.schema).model_dump() ) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState(messages=[HumanMessage(content="Text")], plan="Plan"), {}, ) self.assertEqual( new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, + PartialAssistantState( + messages=[VisualizationMessage(answer=self.schema, plan="Plan", id=new_state.messages[0].id)], + intermediate_steps=None, + ), ) diff --git a/ee/hogai/funnels/toolkit.py b/ee/hogai/funnels/toolkit.py index 8d6407027aac1..ae603519cc331 100644 --- a/ee/hogai/funnels/toolkit.py +++ b/ee/hogai/funnels/toolkit.py @@ -1,5 +1,5 @@ from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import dereference_schema +from ee.hogai.utils.helpers import dereference_schema from posthog.schema import AssistantFunnelsQuery diff --git a/ee/hogai/graph.py b/ee/hogai/graph.py index 79e5f914097ce..bf961d6bb9aa8 100644 --- a/ee/hogai/graph.py +++ b/ee/hogai/graph.py @@ -1,10 +1,10 @@ from collections.abc import Hashable from typing import Optional, cast -from langfuse.callback import CallbackHandler +from langchain_core.runnables.base import RunnableLike from langgraph.graph.state import StateGraph -from ee import settings +from ee.hogai.django_checkpoint.checkpointer import DjangoCheckpointer from ee.hogai.funnels.nodes import ( FunnelGeneratorNode, FunnelGeneratorToolsNode, @@ -19,15 +19,10 @@ TrendsPlannerNode, TrendsPlannerToolsNode, ) -from ee.hogai.utils import AssistantNodeName, AssistantState +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.models.team.team import Team -if settings.LANGFUSE_PUBLIC_KEY: - langfuse_handler = CallbackHandler( - public_key=settings.LANGFUSE_PUBLIC_KEY, secret_key=settings.LANGFUSE_SECRET_KEY, host=settings.LANGFUSE_HOST - ) -else: - langfuse_handler = None +checkpointer = DjangoCheckpointer() class AssistantGraph: @@ -45,10 +40,14 @@ def add_edge(self, from_node: AssistantNodeName, to_node: AssistantNodeName): self._graph.add_edge(from_node, to_node) return self + def add_node(self, node: AssistantNodeName, action: RunnableLike): + self._graph.add_node(node, action) + return self + def compile(self): if not self._has_start_node: raise ValueError("Start node not added to the graph") - return self._graph.compile() + return self._graph.compile(checkpointer=checkpointer) def add_start(self): return self.add_edge(AssistantNodeName.START, AssistantNodeName.ROUTER) diff --git a/ee/hogai/router/nodes.py b/ee/hogai/router/nodes.py index c9151faaabc29..f6aeacdebbe6b 100644 --- a/ee/hogai/router/nodes.py +++ b/ee/hogai/router/nodes.py @@ -1,4 +1,5 @@ from typing import Literal, cast +from uuid import uuid4 from langchain_core.messages import AIMessage as LangchainAIMessage, BaseMessage from langchain_core.prompts import ChatPromptTemplate @@ -11,7 +12,8 @@ ROUTER_SYSTEM_PROMPT, ROUTER_USER_PROMPT, ) -from ee.hogai.utils import AssistantState, AssistantNode +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import HumanMessage, RouterMessage RouteName = Literal["trends", "funnel"] @@ -22,7 +24,7 @@ class RouterOutput(BaseModel): class RouterNode(AssistantNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", ROUTER_SYSTEM_PROMPT), @@ -31,10 +33,10 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: ) + self._construct_messages(state) chain = prompt | self._model output: RouterOutput = chain.invoke({}, config) - return {"messages": [RouterMessage(content=output.visualization_type)]} + return PartialAssistantState(messages=[RouterMessage(content=output.visualization_type, id=str(uuid4()))]) def router(self, state: AssistantState) -> RouteName: - last_message = state["messages"][-1] + last_message = state.messages[-1] if isinstance(last_message, RouterMessage): return cast(RouteName, last_message.content) raise ValueError("Invalid route.") @@ -47,7 +49,7 @@ def _model(self): def _construct_messages(self, state: AssistantState): history: list[BaseMessage] = [] - for message in state["messages"]: + for message in state.messages: if isinstance(message, HumanMessage): history += ChatPromptTemplate.from_messages( [("user", ROUTER_USER_PROMPT.strip())], template_format="mustache" diff --git a/ee/hogai/router/test/test_nodes.py b/ee/hogai/router/test/test_nodes.py index 06014fb0b9f59..53074a381b804 100644 --- a/ee/hogai/router/test/test_nodes.py +++ b/ee/hogai/router/test/test_nodes.py @@ -2,11 +2,11 @@ from unittest.mock import patch from django.test import override_settings -from langchain_core.messages import AIMessage as LangchainAIMessage -from langchain_core.messages import HumanMessage as LangchainHumanMessage +from langchain_core.messages import AIMessage as LangchainAIMessage, HumanMessage as LangchainHumanMessage from langchain_core.runnables import RunnableLambda from ee.hogai.router.nodes import RouterNode, RouterOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( HumanMessage, RouterMessage, @@ -19,7 +19,7 @@ class TestRouterNode(ClickhouseTestMixin, APIBaseTest): def test_router(self): node = RouterNode(self.team) - state: Any = {"messages": [RouterMessage(content="trends")]} + state: Any = AssistantState(messages=[RouterMessage(content="trends")]) self.assertEqual(node.router(state), "trends") def test_node_runs(self): @@ -28,28 +28,36 @@ def test_node_runs(self): return_value=RunnableLambda(lambda _: RouterOutput(visualization_type="funnel")), ): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} - self.assertEqual(node.run(state, {}), {"messages": [RouterMessage(content="funnel")]}) + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) + next_state = node.run(state, {}) + self.assertEqual( + next_state, + PartialAssistantState(messages=[RouterMessage(content="funnel", id=next_state.messages[0].id)]), + ) with patch( "ee.hogai.router.nodes.RouterNode._model", return_value=RunnableLambda(lambda _: RouterOutput(visualization_type="trends")), ): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} - self.assertEqual(node.run(state, {}), {"messages": [RouterMessage(content="trends")]}) + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) + next_state = node.run(state, {}) + self.assertEqual( + next_state, + PartialAssistantState(messages=[RouterMessage(content="trends", id=next_state.messages[0].id)]), + ) def test_node_reconstructs_conversation(self): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) self.assertEqual(node._construct_messages(state), [LangchainHumanMessage(content="Question: generate trends")]) - state = { - "messages": [ + state = AssistantState( + messages=[ HumanMessage(content="generate trends"), RouterMessage(content="trends"), VisualizationMessage(), ] - } + ) self.assertEqual( node._construct_messages(state), [LangchainHumanMessage(content="Question: generate trends"), LangchainAIMessage(content="trends")], diff --git a/ee/hogai/schema_generator/nodes.py b/ee/hogai/schema_generator/nodes.py index c5e7ffbba85c4..4bed02fd462cc 100644 --- a/ee/hogai/schema_generator/nodes.py +++ b/ee/hogai/schema_generator/nodes.py @@ -1,10 +1,16 @@ -import itertools import xml.etree.ElementTree as ET +from collections.abc import Sequence from functools import cached_property from typing import Generic, Optional, TypeVar +from uuid import uuid4 from langchain_core.agents import AgentAction -from langchain_core.messages import AIMessage as LangchainAssistantMessage, BaseMessage, merge_message_runs +from langchain_core.messages import ( + AIMessage as LangchainAssistantMessage, + BaseMessage, + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI @@ -23,10 +29,14 @@ QUESTION_PROMPT, ) from ee.hogai.schema_generator.utils import SchemaGeneratorOutput -from ee.hogai.utils import AssistantNode, AssistantState, filter_visualization_conversation +from ee.hogai.utils.helpers import find_last_message_of_type, slice_messages_to_conversation_start +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantMessageUnion, AssistantState, PartialAssistantState from posthog.models.group_type_mapping import GroupTypeMapping from posthog.schema import ( + AssistantMessage, FailureMessage, + HumanMessage, VisualizationMessage, ) @@ -63,9 +73,10 @@ def _run_with_prompt( state: AssistantState, prompt: ChatPromptTemplate, config: Optional[RunnableConfig] = None, - ) -> AssistantState: - generated_plan = state.get("plan", "") - intermediate_steps = state.get("intermediate_steps") or [] + ) -> PartialAssistantState: + start_id = state.start_id + generated_plan = state.plan or "" + intermediate_steps = state.intermediate_steps or [] validation_error_message = intermediate_steps[-1][1] if intermediate_steps else None generation_prompt = prompt + self._construct_messages(state, validation_error_message=validation_error_message) @@ -79,41 +90,42 @@ def _run_with_prompt( except PydanticOutputParserException as e: # Generation step is expensive. After a second unsuccessful attempt, it's better to send a failure message. if len(intermediate_steps) >= 2: - return { - "messages": [ + return PartialAssistantState( + messages=[ FailureMessage( content=f"Oops! It looks like I’m having trouble generating this {self.INSIGHT_NAME} insight. Could you please try again?" ) ], - "intermediate_steps": None, - } + intermediate_steps=None, + ) - return { - "intermediate_steps": [ + return PartialAssistantState( + intermediate_steps=[ *intermediate_steps, (AgentAction("handle_incorrect_response", e.llm_output, e.validation_message), None), ], - } + ) - return { - "messages": [ + return PartialAssistantState( + messages=[ VisualizationMessage( plan=generated_plan, answer=message.query, - done=True, + initiator=start_id, + id=str(uuid4()), ) ], - "intermediate_steps": None, - } + intermediate_steps=None, + ) def router(self, state: AssistantState): - if state.get("intermediate_steps") is not None: + if state.intermediate_steps: return "tools" return "next" @cached_property def _group_mapping_prompt(self) -> str: - groups = GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") + groups = GroupTypeMapping.objects.filter(project_id=self._team.project_id).order_by("group_type_index") if not groups: return "The user has not defined any groups." @@ -123,15 +135,25 @@ def _group_mapping_prompt(self) -> str: ) return ET.tostring(root, encoding="unicode") + def _get_human_viz_message_mapping(self, messages: Sequence[AssistantMessageUnion]) -> dict[str, int]: + mapping: dict[str, int] = {} + for idx, msg in enumerate(messages): + if isinstance(msg, VisualizationMessage) and msg.initiator is not None: + mapping[msg.initiator] = idx + return mapping + def _construct_messages( self, state: AssistantState, validation_error_message: Optional[str] = None ) -> list[BaseMessage]: """ Reconstruct the conversation for the generation. Take all previously generated questions, plans, and schemas, and return the history. """ - messages = state.get("messages", []) - generated_plan = state.get("plan", "") + messages = state.messages + generated_plan = state.plan + start_id = state.start_id + if start_id is not None: + messages = slice_messages_to_conversation_start(messages, start_id) if len(messages) == 0: return [] @@ -141,43 +163,61 @@ def _construct_messages( ) ] - human_messages, visualization_messages = filter_visualization_conversation(messages) - first_ai_message = True + msg_mapping = self._get_human_viz_message_mapping(messages) + initiator_message = messages[-1] + last_viz_message = find_last_message_of_type(messages, VisualizationMessage) + + for message in messages: + # The initial human message and the new plan are added to the end of the conversation. + if message == initiator_message: + continue + if isinstance(message, HumanMessage): + if message.id and (viz_message_idx := msg_mapping.get(message.id)): + # Plans go first. + viz_message = messages[viz_message_idx] + if isinstance(viz_message, VisualizationMessage): + conversation.append( + HumanMessagePromptTemplate.from_template(PLAN_PROMPT, template_format="mustache").format( + plan=viz_message.plan or "" + ) + ) - for idx, (human_message, ai_message) in enumerate( - itertools.zip_longest(human_messages, visualization_messages) - ): - # Plans go first - if ai_message: - conversation.append( - HumanMessagePromptTemplate.from_template( - PLAN_PROMPT if first_ai_message else NEW_PLAN_PROMPT, - template_format="mustache", - ).format(plan=ai_message.plan or "") - ) - first_ai_message = False - elif generated_plan: - conversation.append( - HumanMessagePromptTemplate.from_template( - PLAN_PROMPT if first_ai_message else NEW_PLAN_PROMPT, - template_format="mustache", - ).format(plan=generated_plan) + # Augment with the prompt previous initiator messages. + conversation.append( + HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( + question=message.content + ) + ) + # Otherwise, just append the human message. + else: + conversation.append(LangchainHumanMessage(content=message.content)) + # Summary, human-in-the-loop messages. + elif isinstance(message, AssistantMessage): + conversation.append(LangchainAssistantMessage(content=message.content)) + + # Include only last generated schema because it doesn't need more context. + if last_viz_message: + conversation.append( + LangchainAssistantMessage( + content=last_viz_message.answer.model_dump_json() if last_viz_message.answer else "" ) - - # Then questions - if human_message: + ) + # Add the initiator message and the generated plan to the end, so instructions are clear. + if isinstance(initiator_message, HumanMessage): + if generated_plan: + plan_prompt = PLAN_PROMPT if messages[0] == initiator_message else NEW_PLAN_PROMPT conversation.append( - HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( - question=human_message.content + HumanMessagePromptTemplate.from_template(plan_prompt, template_format="mustache").format( + plan=generated_plan or "" ) ) - - # Then schemas, but include only last generated schema because it doesn't need more context. - if ai_message and idx + 1 == len(visualization_messages): - conversation.append( - LangchainAssistantMessage(content=ai_message.answer.model_dump_json() if ai_message.answer else "") + conversation.append( + HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( + question=initiator_message.content ) + ) + # Retries must be added to the end of the conversation. if validation_error_message: conversation.append( HumanMessagePromptTemplate.from_template(FAILOVER_PROMPT, template_format="mustache").format( @@ -193,10 +233,10 @@ class SchemaGeneratorToolsNode(AssistantNode): Used for failover from generation errors. """ - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: - intermediate_steps = state.get("intermediate_steps", []) + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] if not intermediate_steps: - return state + return PartialAssistantState() action, _ = intermediate_steps[-1] prompt = ( @@ -205,9 +245,9 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: .content ) - return { - "intermediate_steps": [ + return PartialAssistantState( + intermediate_steps=[ *intermediate_steps[:-1], (action, str(prompt)), ] - } + ) diff --git a/ee/hogai/schema_generator/test/test_nodes.py b/ee/hogai/schema_generator/test/test_nodes.py index 795045af50b56..b44154b93b927 100644 --- a/ee/hogai/schema_generator/test/test_nodes.py +++ b/ee/hogai/schema_generator/test/test_nodes.py @@ -4,10 +4,11 @@ from django.test import override_settings from langchain_core.agents import AgentAction from langchain_core.prompts import ChatPromptTemplate -from langchain_core.runnables import RunnableLambda +from langchain_core.runnables import RunnableConfig, RunnableLambda from ee.hogai.schema_generator.nodes import SchemaGeneratorNode, SchemaGeneratorToolsNode from ee.hogai.schema_generator.utils import SchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantMessage, AssistantTrendsQuery, @@ -16,7 +17,7 @@ RouterMessage, VisualizationMessage, ) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin +from posthog.test.base import BaseTest TestSchema = SchemaGeneratorOutput[AssistantTrendsQuery] @@ -26,7 +27,7 @@ class DummyGeneratorNode(SchemaGeneratorNode[AssistantTrendsQuery]): OUTPUT_MODEL = SchemaGeneratorOutput[AssistantTrendsQuery] OUTPUT_SCHEMA = {} - def run(self, state, config): + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", "system_prompt"), @@ -36,8 +37,9 @@ def run(self, state, config): @override_settings(IN_UNIT_TESTING=True) -class TestSchemaGeneratorNode(ClickhouseTestMixin, APIBaseTest): +class TestSchemaGeneratorNode(BaseTest): def setUp(self): + super().setUp() self.schema = AssistantTrendsQuery(series=[]) def test_node_runs(self): @@ -45,23 +47,23 @@ def test_node_runs(self): with patch.object(DummyGeneratorNode, "_model") as generator_model_mock: generator_model_mock.return_value = RunnableLambda(lambda _: TestSchema(query=self.schema).model_dump()) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState( + messages=[HumanMessage(content="Text", id="0")], + plan="Plan", + start_id="0", + ), {}, ) - self.assertEqual( - new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, - ) + self.assertIsNone(new_state.intermediate_steps) + self.assertEqual(len(new_state.messages), 1) + self.assertEqual(new_state.messages[0].type, "ai/viz") + self.assertEqual(new_state.messages[0].answer, self.schema) - def test_agent_reconstructs_conversation(self): + def test_agent_reconstructs_conversation_and_does_not_add_an_empty_plan(self): node = DummyGeneratorNode(self.team) - history = node._construct_messages({"messages": [HumanMessage(content="Text")]}) + history = node._construct_messages( + AssistantState(messages=[HumanMessage(content="Text", id="0")], start_id="0") + ) self.assertEqual(len(history), 2) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) @@ -69,7 +71,11 @@ def test_agent_reconstructs_conversation(self): self.assertIn("Answer to this question:", history[1].content) self.assertNotIn("{{question}}", history[1].content) - history = node._construct_messages({"messages": [HumanMessage(content="Text")], "plan": "randomplan"}) + def test_agent_reconstructs_conversation_adds_plan(self): + node = DummyGeneratorNode(self.team) + history = node._construct_messages( + AssistantState(messages=[HumanMessage(content="Text", id="0")], plan="randomplan", start_id="0") + ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) @@ -82,16 +88,18 @@ def test_agent_reconstructs_conversation(self): self.assertNotIn("{{question}}", history[2].content) self.assertIn("Text", history[2].content) + def test_agent_reconstructs_conversation_can_handle_follow_ups(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Follow Up"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + HumanMessage(content="Follow Up", id="2"), ], - "plan": "newrandomplan", - } + plan="newrandomplan", + start_id="2", + ) ) self.assertEqual(len(history), 6) @@ -116,13 +124,41 @@ def test_agent_reconstructs_conversation(self): self.assertNotIn("{{question}}", history[5].content) self.assertIn("Follow Up", history[5].content) - def test_agent_reconstructs_conversation_and_merges_messages(self): + def test_agent_reconstructs_conversation_and_does_not_merge_messages(self): + node = DummyGeneratorNode(self.team) + history = node._construct_messages( + AssistantState( + messages=[HumanMessage(content="Te", id="0"), HumanMessage(content="xt", id="1")], + plan="randomplan", + start_id="1", + ) + ) + self.assertEqual(len(history), 4) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertIn("Te", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertNotIn("{{plan}}", history[2].content) + self.assertIn("randomplan", history[2].content) + self.assertEqual(history[3].type, "human") + self.assertIn("Answer to this question:", history[3].content) + self.assertNotIn("{{question}}", history[3].content) + self.assertEqual(history[3].type, "human") + self.assertIn("xt", history[3].content) + + def test_filters_out_human_in_the_loop_after_initiator(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [HumanMessage(content="Te"), HumanMessage(content="xt")], - "plan": "randomplan", - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", initiator="0", id="1"), + HumanMessage(content="Follow", id="2"), + HumanMessage(content="Up", id="3"), + ], + plan="newrandomplan", + start_id="0", + ) ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -134,104 +170,114 @@ def test_agent_reconstructs_conversation_and_merges_messages(self): self.assertEqual(history[2].type, "human") self.assertIn("Answer to this question:", history[2].content) self.assertNotIn("{{question}}", history[2].content) - self.assertIn("Te\nxt", history[2].content) + self.assertIn("Text", history[2].content) + def test_preserves_human_in_the_loop_before_initiator(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Follow"), - HumanMessage(content="Up"), + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + AssistantMessage(content="Loop", id="1"), + HumanMessage(content="Answer", id="2"), + VisualizationMessage(answer=self.schema, plan="randomplan", initiator="0", id="3"), + HumanMessage(content="Question 2", id="4"), ], - "plan": "newrandomplan", - } + plan="newrandomplan", + start_id="4", + ) ) - - self.assertEqual(len(history), 6) + self.assertEqual(len(history), 8) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) self.assertEqual(history[1].type, "human") self.assertIn("the plan", history[1].content) self.assertNotIn("{{plan}}", history[1].content) self.assertIn("randomplan", history[1].content) - self.assertEqual(history[2].type, "human") - self.assertIn("Answer to this question:", history[2].content) self.assertNotIn("{{question}}", history[2].content) - self.assertIn("Text", history[2].content) + self.assertIn("Question 1", history[2].content) self.assertEqual(history[3].type, "ai") - self.assertEqual(history[3].content, self.schema.model_dump_json()) + self.assertEqual("Loop", history[3].content) self.assertEqual(history[4].type, "human") - self.assertIn("the new plan", history[4].content) - self.assertNotIn("{{plan}}", history[4].content) - self.assertIn("newrandomplan", history[4].content) - self.assertEqual(history[5].type, "human") - self.assertIn("Answer to this question:", history[5].content) - self.assertNotIn("{{question}}", history[5].content) - self.assertIn("Follow\nUp", history[5].content) + self.assertEqual("Answer", history[4].content) + self.assertEqual(history[5].type, "ai") + self.assertEqual(history[6].type, "human") + self.assertIn("the new plan", history[6].content) + self.assertIn("newrandomplan", history[6].content) + self.assertEqual(history[7].type, "human") + self.assertNotIn("{{question}}", history[7].content) + self.assertIn("Question 2", history[7].content) def test_agent_reconstructs_typical_conversation(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", initiator="0", id="2"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", initiator="4", id="6"), + AssistantMessage(content="Summary 2", id="7"), + HumanMessage(content="Question 3", id="8"), + RouterMessage(content="funnel", id="9"), ], - "plan": "Plan 3", - } + plan="Plan 3", + start_id="8", + ) ) - self.assertEqual(len(history), 8) + + self.assertEqual(len(history), 10) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) self.assertEqual(history[1].type, "human") self.assertIn("Plan 1", history[1].content) self.assertEqual(history[2].type, "human") self.assertIn("Question 1", history[2].content) - self.assertEqual(history[3].type, "human") - self.assertIn("Plan 2", history[3].content) + self.assertEqual(history[3].type, "ai") + self.assertEqual(history[3].content, "Summary 1") self.assertEqual(history[4].type, "human") - self.assertIn("Question 2", history[4].content) - self.assertEqual(history[5].type, "ai") - self.assertEqual(history[6].type, "human") - self.assertIn("Plan 3", history[6].content) - self.assertEqual(history[7].type, "human") - self.assertIn("Question 3", history[7].content) - - def test_prompt(self): + self.assertIn("Plan 2", history[4].content) + self.assertEqual(history[5].type, "human") + self.assertIn("Question 2", history[5].content) + self.assertEqual(history[6].type, "ai") + self.assertEqual(history[6].content, "Summary 2") + self.assertEqual(history[7].type, "ai") + self.assertEqual(history[8].type, "human") + self.assertIn("Plan 3", history[8].content) + self.assertEqual(history[9].type, "human") + self.assertIn("Question 3", history[9].content) + + def test_prompt_messages_merged(self): node = DummyGeneratorNode(self.team) - state = { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), + state = AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", initiator="0", id="2"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", initiator="4", id="6"), + AssistantMessage(content="Summary 2", id="7"), + HumanMessage(content="Question 3", id="8"), + RouterMessage(content="funnel", id="9"), ], - "plan": "Plan 3", - } + plan="Plan 3", + start_id="8", + ) with patch.object(DummyGeneratorNode, "_model") as generator_model_mock: def assert_prompt(prompt): - self.assertEqual(len(prompt), 4) + self.assertEqual(len(prompt), 6) self.assertEqual(prompt[0].type, "system") self.assertEqual(prompt[1].type, "human") self.assertEqual(prompt[2].type, "ai") self.assertEqual(prompt[3].type, "human") + self.assertEqual(prompt[4].type, "ai") + self.assertEqual(prompt[5].type, "human") generator_model_mock.return_value = RunnableLambda(assert_prompt) node.run(state, {}) @@ -244,19 +290,17 @@ def test_failover_with_incorrect_schema(self): schema["query"] = [] generator_model_mock.return_value = RunnableLambda(lambda _: json.dumps(schema)) - new_state = node.run({"messages": [HumanMessage(content="Text")]}, {}) - self.assertIn("intermediate_steps", new_state) - self.assertEqual(len(new_state["intermediate_steps"]), 1) + new_state = node.run(AssistantState(messages=[HumanMessage(content="Text")]), {}) + self.assertEqual(len(new_state.intermediate_steps), 1) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [(AgentAction(tool="", tool_input="", log="exception"), "exception")], - }, + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[(AgentAction(tool="", tool_input="", log="exception"), "exception")], + ), {}, ) - self.assertIn("intermediate_steps", new_state) - self.assertEqual(len(new_state["intermediate_steps"]), 2) + self.assertEqual(len(new_state.intermediate_steps), 2) def test_node_leaves_failover(self): node = DummyGeneratorNode(self.team) @@ -266,25 +310,25 @@ def test_node_leaves_failover(self): return_value=RunnableLambda(lambda _: TestSchema(query=self.schema).model_dump()), ): new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [(AgentAction(tool="", tool_input="", log="exception"), "exception")], - }, + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[(AgentAction(tool="", tool_input="", log="exception"), "exception")], + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) + self.assertIsNone(new_state.intermediate_steps) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [ + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[ (AgentAction(tool="", tool_input="", log="exception"), "exception"), (AgentAction(tool="", tool_input="", log="exception"), "exception"), ], - }, + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) + self.assertIsNone(new_state.intermediate_steps) def test_node_leaves_failover_after_second_unsuccessful_attempt(self): node = DummyGeneratorNode(self.team) @@ -295,29 +339,30 @@ def test_node_leaves_failover_after_second_unsuccessful_attempt(self): generator_model_mock.return_value = RunnableLambda(lambda _: json.dumps(schema)) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [ + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[ (AgentAction(tool="", tool_input="", log="exception"), "exception"), (AgentAction(tool="", tool_input="", log="exception"), "exception"), ], - }, + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) - self.assertEqual(len(new_state["messages"]), 1) - self.assertIsInstance(new_state["messages"][0], FailureMessage) + self.assertIsNone(new_state.intermediate_steps) + self.assertEqual(len(new_state.messages), 1) + self.assertIsInstance(new_state.messages[0], FailureMessage) def test_agent_reconstructs_conversation_with_failover(self): action = AgentAction(tool="fix", tool_input="validation error", log="exception") node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [HumanMessage(content="Text")], - "plan": "randomplan", - "intermediate_steps": [(action, "uniqexception")], - }, - "uniqexception", + AssistantState( + messages=[HumanMessage(content="Text", id="0")], + plan="randomplan", + intermediate_steps=[(action, "uniqexception")], + start_id="0", + ), + validation_error_message="uniqexception", ) self.assertEqual(len(history), 4) self.assertEqual(history[0].type, "human") @@ -337,14 +382,14 @@ def test_agent_reconstructs_conversation_with_failover(self): def test_agent_reconstructs_conversation_with_failed_messages(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), FailureMessage(content="Error"), HumanMessage(content="Text"), ], - "plan": "randomplan", - }, + plan="randomplan", + ), ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -360,19 +405,19 @@ def test_agent_reconstructs_conversation_with_failed_messages(self): def test_router(self): node = DummyGeneratorNode(self.team) - state = node.router({"messages": [], "intermediate_steps": None}) + state = node.router(AssistantState(messages=[], intermediate_steps=None)) self.assertEqual(state, "next") state = node.router( - {"messages": [], "intermediate_steps": [(AgentAction(tool="", tool_input="", log=""), None)]} + AssistantState(messages=[], intermediate_steps=[(AgentAction(tool="", tool_input="", log=""), None)]) ) self.assertEqual(state, "tools") -class TestSchemaGeneratorToolsNode(ClickhouseTestMixin, APIBaseTest): +class TestSchemaGeneratorToolsNode(BaseTest): def test_tools_node(self): node = SchemaGeneratorToolsNode(self.team) action = AgentAction(tool="fix", tool_input="validationerror", log="pydanticexception") - state = node.run({"messages": [], "intermediate_steps": [(action, None)]}, {}) - self.assertIsNotNone("validationerror", state["intermediate_steps"][0][1]) - self.assertIn("validationerror", state["intermediate_steps"][0][1]) - self.assertIn("pydanticexception", state["intermediate_steps"][0][1]) + state = node.run(AssistantState(messages=[], intermediate_steps=[(action, None)]), {}) + self.assertIsNotNone("validationerror", state.intermediate_steps[0][1]) + self.assertIn("validationerror", state.intermediate_steps[0][1]) + self.assertIn("pydanticexception", state.intermediate_steps[0][1]) diff --git a/ee/hogai/summarizer/nodes.py b/ee/hogai/summarizer/nodes.py index 8d5e8a406f45e..513246bcc1238 100644 --- a/ee/hogai/summarizer/nodes.py +++ b/ee/hogai/summarizer/nodes.py @@ -1,15 +1,18 @@ import json from time import sleep +from uuid import uuid4 + from django.conf import settings +from django.core.serializers.json import DjangoJSONEncoder from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI -from django.core.serializers.json import DjangoJSONEncoder from rest_framework.exceptions import APIException from sentry_sdk import capture_exception -from ee.hogai.summarizer.prompts import SUMMARIZER_SYSTEM_PROMPT, SUMMARIZER_INSTRUCTION_PROMPT -from ee.hogai.utils import AssistantNode, AssistantNodeName, AssistantState +from ee.hogai.summarizer.prompts import SUMMARIZER_INSTRUCTION_PROMPT, SUMMARIZER_SYSTEM_PROMPT +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState from posthog.api.services.query import process_query_dict from posthog.clickhouse.client.execute_async import get_query_status from posthog.errors import ExposedCHQueryError @@ -21,8 +24,8 @@ class SummarizerNode(AssistantNode): name = AssistantNodeName.SUMMARIZER - def run(self, state: AssistantState, config: RunnableConfig): - viz_message = state["messages"][-1] + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + viz_message = state.messages[-1] if not isinstance(viz_message, VisualizationMessage): raise ValueError("Can only run summarization with a visualization message as the last one in the state") if viz_message.answer is None: @@ -58,10 +61,16 @@ def run(self, state: AssistantState, config: RunnableConfig): err_message = ", ".join(f"{key}: {value}" for key, value in err.detail.items()) elif isinstance(err.detail, list): err_message = ", ".join(map(str, err.detail)) - return {"messages": [FailureMessage(content=f"There was an error running this query: {err_message}")]} + return PartialAssistantState( + messages=[ + FailureMessage(content=f"There was an error running this query: {err_message}", id=str(uuid4())) + ] + ) except Exception as err: capture_exception(err) - return {"messages": [FailureMessage(content="There was an unknown error running this query.")]} + return PartialAssistantState( + messages=[FailureMessage(content="There was an unknown error running this query.", id=str(uuid4()))] + ) summarization_prompt = ChatPromptTemplate(self._construct_messages(state), template_format="mustache") @@ -76,7 +85,7 @@ def run(self, state: AssistantState, config: RunnableConfig): config, ) - return {"messages": [AssistantMessage(content=str(message.content), done=True)]} + return PartialAssistantState(messages=[AssistantMessage(content=str(message.content), id=str(uuid4()))]) @property def _model(self): @@ -85,7 +94,7 @@ def _model(self): def _construct_messages(self, state: AssistantState) -> list[tuple[str, str]]: conversation: list[tuple[str, str]] = [("system", SUMMARIZER_SYSTEM_PROMPT)] - for message in state.get("messages", []): + for message in state.messages: if isinstance(message, HumanMessage): conversation.append(("human", message.content)) elif isinstance(message, AssistantMessage): diff --git a/ee/hogai/summarizer/test/test_nodes.py b/ee/hogai/summarizer/test/test_nodes.py index b38d88275aa19..9c54517717b5f 100644 --- a/ee/hogai/summarizer/test/test_nodes.py +++ b/ee/hogai/summarizer/test/test_nodes.py @@ -1,23 +1,23 @@ from unittest.mock import patch from django.test import override_settings -from langchain_core.runnables import RunnableLambda from langchain_core.messages import ( HumanMessage as LangchainHumanMessage, ) +from langchain_core.runnables import RunnableLambda +from rest_framework.exceptions import ValidationError + from ee.hogai.summarizer.nodes import SummarizerNode from ee.hogai.summarizer.prompts import SUMMARIZER_INSTRUCTION_PROMPT, SUMMARIZER_SYSTEM_PROMPT +from ee.hogai.utils.types import AssistantState +from posthog.api.services.query import process_query_dict from posthog.schema import ( - AssistantMessage, AssistantTrendsEventsNode, AssistantTrendsQuery, - FailureMessage, HumanMessage, VisualizationMessage, ) -from rest_framework.exceptions import ValidationError from posthog.test.base import APIBaseTest, ClickhouseTestMixin -from posthog.api.services.query import process_query_dict @override_settings(IN_UNIT_TESTING=True) @@ -32,28 +32,26 @@ def test_node_runs(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started - self.assertEqual( - new_state, - { - "messages": [ - AssistantMessage(content="The results indicate foobar.", done=True), - ], - }, - ) + msg = new_state.messages[0] + self.assertEqual(msg.content, "The results indicate foobar.") + self.assertEqual(msg.type, "ai") + self.assertIsNotNone(msg.id) @patch( "ee.hogai.summarizer.nodes.process_query_dict", @@ -66,28 +64,26 @@ def test_node_handles_internal_error(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started - self.assertEqual( - new_state, - { - "messages": [ - FailureMessage(content="There was an unknown error running this query."), - ], - }, - ) + msg = new_state.messages[0] + self.assertEqual(msg.content, "There was an unknown error running this query.") + self.assertEqual(msg.type, "ai/failure") + self.assertIsNotNone(msg.id) @patch( "ee.hogai.summarizer.nodes.process_query_dict", @@ -102,33 +98,29 @@ def test_node_handles_exposed_error(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started + msg = new_state.messages[0] self.assertEqual( - new_state, - { - "messages": [ - FailureMessage( - content=( - "There was an error running this query: This query exceeds the capabilities of our picolator. " - "Try de-brolling its flim-flam." - ) - ), - ], - }, + msg.content, + "There was an error running this query: This query exceeds the capabilities of our picolator. Try de-brolling its flim-flam.", ) + self.assertEqual(msg.type, "ai/failure") + self.assertIsNotNone(msg.id) def test_node_requires_a_viz_message_in_state(self): node = SummarizerNode(self.team) @@ -137,12 +129,13 @@ def test_node_requires_a_viz_message_in_state(self): ValueError, "Can only run summarization with a visualization message as the last one in the state" ): node.run( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) @@ -151,16 +144,13 @@ def test_node_requires_viz_message_in_state_to_have_query(self): with self.assertRaisesMessage(ValueError, "Did not found query in the visualization message"): node.run( - { - "messages": [ - VisualizationMessage( - answer=None, - plan="Plan", - done=True, - ), + AssistantState( + messages=[ + VisualizationMessage(answer=None, plan="Plan", id="test"), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) @@ -170,16 +160,18 @@ def test_agent_reconstructs_conversation(self): node = SummarizerNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="What's the trends in signups?"), + AssistantState( + messages=[ + HumanMessage(content="What's the trends in signups?", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), - ] - } + ], + start_id="test", + ) ) self.assertEqual( history, diff --git a/ee/hogai/taxonomy_agent/nodes.py b/ee/hogai/taxonomy_agent/nodes.py index d499269a8ca95..bd26a7a93918f 100644 --- a/ee/hogai/taxonomy_agent/nodes.py +++ b/ee/hogai/taxonomy_agent/nodes.py @@ -1,4 +1,3 @@ -import itertools import xml.etree.ElementTree as ET from abc import ABC from functools import cached_property @@ -7,10 +6,16 @@ from git import Optional from langchain.agents.format_scratchpad import format_log_to_str from langchain_core.agents import AgentAction -from langchain_core.messages import AIMessage as LangchainAssistantMessage, BaseMessage, merge_message_runs +from langchain_core.messages import ( + AIMessage as LangchainAssistantMessage, + BaseMessage, + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI +from langgraph.errors import NodeInterrupt from pydantic import ValidationError from ee.hogai.taxonomy import CORE_FILTER_DEFINITIONS_BY_GROUP @@ -24,6 +29,7 @@ REACT_FOLLOW_UP_PROMPT, REACT_FORMAT_PROMPT, REACT_FORMAT_REMINDER_PROMPT, + REACT_HUMAN_IN_THE_LOOP_PROMPT, REACT_MALFORMED_JSON_PROMPT, REACT_MISSING_ACTION_CORRECTION_PROMPT, REACT_MISSING_ACTION_PROMPT, @@ -33,13 +39,18 @@ REACT_USER_PROMPT, ) from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentTool, TaxonomyAgentToolkit -from ee.hogai.utils import AssistantNode, AssistantState, filter_visualization_conversation, remove_line_breaks +from ee.hogai.utils.helpers import filter_messages, remove_line_breaks, slice_messages_to_conversation_start +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.hogql_queries.ai.team_taxonomy_query_runner import TeamTaxonomyQueryRunner from posthog.hogql_queries.query_runner import ExecutionMode from posthog.models.group_type_mapping import GroupTypeMapping from posthog.schema import ( + AssistantMessage, CachedTeamTaxonomyQueryResponse, + HumanMessage, TeamTaxonomyQuery, + VisualizationMessage, ) @@ -50,8 +61,8 @@ def _run_with_prompt_and_toolkit( prompt: ChatPromptTemplate, toolkit: TaxonomyAgentToolkit, config: Optional[RunnableConfig] = None, - ) -> AssistantState: - intermediate_steps = state.get("intermediate_steps") or [] + ) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] conversation = ( prompt + ChatPromptTemplate.from_messages( @@ -79,6 +90,7 @@ def _run_with_prompt_and_toolkit( "react_format": self._get_react_format_prompt(toolkit), "react_format_reminder": REACT_FORMAT_REMINDER_PROMPT, "react_property_filters": self._get_react_property_filters_prompt(), + "react_human_in_the_loop": REACT_HUMAN_IN_THE_LOOP_PROMPT, "product_description": self._team.project.product_description, "groups": self._team_group_types, "events": self._events_prompt, @@ -108,12 +120,12 @@ def _run_with_prompt_and_toolkit( e.llm_output, ) - return { - "intermediate_steps": [*intermediate_steps, (result, None)], - } + return PartialAssistantState( + intermediate_steps=[*intermediate_steps, (result, None)], + ) def router(self, state: AssistantState): - if state.get("intermediate_steps", []): + if state.intermediate_steps: return "tools" raise ValueError("Invalid state.") @@ -179,7 +191,7 @@ def _events_prompt(self) -> str: @cached_property def _team_group_types(self) -> list[str]: return list( - GroupTypeMapping.objects.filter(team=self._team) + GroupTypeMapping.objects.filter(project_id=self._team.project_id) .order_by("group_type_index") .values_list("group_type", flat=True) ) @@ -188,33 +200,34 @@ def _construct_messages(self, state: AssistantState) -> list[BaseMessage]: """ Reconstruct the conversation for the agent. On this step we only care about previously asked questions and generated plans. All other messages are filtered out. """ - human_messages, visualization_messages = filter_visualization_conversation(state.get("messages", [])) - - if not human_messages: - return [] - + start_id = state.start_id + filtered_messages = filter_messages(slice_messages_to_conversation_start(state.messages, start_id)) conversation = [] - for idx, messages in enumerate(itertools.zip_longest(human_messages, visualization_messages)): - human_message, viz_message = messages - - if human_message: + for idx, message in enumerate(filtered_messages): + if isinstance(message, HumanMessage): + # Add initial instructions. if idx == 0: conversation.append( HumanMessagePromptTemplate.from_template(REACT_USER_PROMPT, template_format="mustache").format( - question=human_message.content + question=message.content ) ) - else: + # Add follow-up instructions only for the human message that initiated a generation. + elif message.id == start_id: conversation.append( HumanMessagePromptTemplate.from_template( REACT_FOLLOW_UP_PROMPT, template_format="mustache", - ).format(feedback=human_message.content) + ).format(feedback=message.content) ) - - if viz_message: - conversation.append(LangchainAssistantMessage(content=viz_message.plan or "")) + # Everything else leave as is. + else: + conversation.append(LangchainHumanMessage(content=message.content)) + elif isinstance(message, VisualizationMessage): + conversation.append(LangchainAssistantMessage(content=message.plan or "")) + elif isinstance(message, AssistantMessage): + conversation.append(LangchainAssistantMessage(content=message.content)) return conversation @@ -230,26 +243,37 @@ def _get_agent_scratchpad(self, scratchpad: list[tuple[AgentAction, str | None]] class TaxonomyAgentPlannerToolsNode(AssistantNode, ABC): def _run_with_toolkit( self, state: AssistantState, toolkit: TaxonomyAgentToolkit, config: Optional[RunnableConfig] = None - ) -> AssistantState: - intermediate_steps = state.get("intermediate_steps") or [] - action, _ = intermediate_steps[-1] + ) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] + action, observation = intermediate_steps[-1] try: input = TaxonomyAgentTool.model_validate({"name": action.tool, "arguments": action.tool_input}).root except ValidationError as e: - observation = ( + observation = str( ChatPromptTemplate.from_template(REACT_PYDANTIC_VALIDATION_EXCEPTION_PROMPT, template_format="mustache") .format_messages(exception=e.errors(include_url=False))[0] .content ) - return {"intermediate_steps": [*intermediate_steps[:-1], (action, str(observation))]} + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, str(observation))], + ) # The plan has been found. Move to the generation. if input.name == "final_answer": - return { - "plan": input.arguments, - "intermediate_steps": None, - } + return PartialAssistantState( + plan=input.arguments, + intermediate_steps=[], + ) + if input.name == "ask_user_for_help": + # The agent has requested help, so we interrupt the graph. + if not observation: + raise NodeInterrupt(input.arguments) + + # Feedback was provided. + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, observation)], + ) output = "" if input.name == "retrieve_event_properties": @@ -263,9 +287,11 @@ def _run_with_toolkit( else: output = toolkit.handle_incorrect_response(input.arguments) - return {"intermediate_steps": [*intermediate_steps[:-1], (action, output)]} + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, output)], + ) def router(self, state: AssistantState): - if state.get("plan") is not None: + if state.plan is not None: return "plan_found" return "continue" diff --git a/ee/hogai/taxonomy_agent/prompts.py b/ee/hogai/taxonomy_agent/prompts.py index f63a7dfe15455..c9d409bcdf103 100644 --- a/ee/hogai/taxonomy_agent/prompts.py +++ b/ee/hogai/taxonomy_agent/prompts.py @@ -81,6 +81,15 @@ """.strip() +REACT_HUMAN_IN_THE_LOOP_PROMPT = """ + +Ask the user for clarification if: +- The user's question is ambiguous. +- You can't find matching events or properties. +- You're unable to build a plan that effectively answers the user's question. + +""".strip() + REACT_FORMAT_REMINDER_PROMPT = """ Begin! Reminder that you must ALWAYS respond with a valid JSON blob of a single action. Use tools if necessary. Respond directly if appropriate. Format is Action:```$JSON_BLOB``` then Observation. """.strip() diff --git a/ee/hogai/taxonomy_agent/test/test_nodes.py b/ee/hogai/taxonomy_agent/test/test_nodes.py index 40127c19370b6..cb25331664331 100644 --- a/ee/hogai/taxonomy_agent/test/test_nodes.py +++ b/ee/hogai/taxonomy_agent/test/test_nodes.py @@ -11,7 +11,7 @@ TaxonomyAgentPlannerToolsNode, ) from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.models import GroupTypeMapping from posthog.schema import ( AssistantMessage, @@ -37,7 +37,7 @@ def setUp(self): def _get_node(self): class Node(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt: ChatPromptTemplate = ChatPromptTemplate.from_messages([("user", "test")]) toolkit = DummyToolkit(self._team) return super()._run_with_prompt_and_toolkit(state, prompt, toolkit, config=config) @@ -46,19 +46,20 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: def test_agent_reconstructs_conversation(self): node = self._get_node() - history = node._construct_messages({"messages": [HumanMessage(content="Text")]}) + history = node._construct_messages(AssistantState(messages=[HumanMessage(content="Text")])) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") self.assertIn("Text", history[0].content) self.assertNotIn(f"{{question}}", history[0].content) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + ], + start_id="1", + ) ) self.assertEqual(len(history), 2) self.assertEqual(history[0].type, "human") @@ -68,13 +69,14 @@ def test_agent_reconstructs_conversation(self): self.assertEqual(history[1].content, "randomplan") history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Text"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + HumanMessage(content="Text", id="2"), + ], + start_id="2", + ) ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -89,12 +91,14 @@ def test_agent_reconstructs_conversation(self): def test_agent_reconstructs_conversation_and_omits_unknown_messages(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - AssistantMessage(content="test"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + RouterMessage(content="trends", id="1"), + AssistantMessage(content="test", id="2"), + ], + start_id="0", + ) ) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") @@ -104,13 +108,13 @@ def test_agent_reconstructs_conversation_and_omits_unknown_messages(self): def test_agent_reconstructs_conversation_with_failures(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), FailureMessage(content="Error"), HumanMessage(content="Text"), - ] - } + ], + ) ) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") @@ -120,32 +124,60 @@ def test_agent_reconstructs_conversation_with_failures(self): def test_agent_reconstructs_typical_conversation(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", id="2", initiator="0"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + AssistantMessage(content="Loop 1", id="6"), + HumanMessage(content="Loop Answer 1", id="7"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", id="8", initiator="4"), + AssistantMessage(content="Summary 2", id="9"), + HumanMessage(content="Question 3", id="10"), + RouterMessage(content="funnel", id="11"), + ], + start_id="10", + ) ) - self.assertEqual(len(history), 5) + self.assertEqual(len(history), 9) self.assertEqual(history[0].type, "human") self.assertIn("Question 1", history[0].content) self.assertEqual(history[1].type, "ai") self.assertEqual(history[1].content, "Plan 1") - self.assertEqual(history[2].type, "human") - self.assertIn("Question 2", history[2].content) - self.assertEqual(history[3].type, "ai") - self.assertEqual(history[3].content, "Plan 2") - self.assertEqual(history[4].type, "human") - self.assertIn("Question 3", history[4].content) + self.assertEqual(history[2].type, "ai") + self.assertEqual(history[2].content, "Summary 1") + self.assertEqual(history[3].type, "human") + self.assertIn("Question 2", history[3].content) + self.assertEqual(history[4].type, "ai") + self.assertEqual(history[4].content, "Loop 1") + self.assertEqual(history[5].type, "human") + self.assertEqual(history[5].content, "Loop Answer 1") + self.assertEqual(history[6].content, "Plan 2") + self.assertEqual(history[6].type, "ai") + self.assertEqual(history[7].type, "ai") + self.assertEqual(history[7].content, "Summary 2") + self.assertEqual(history[8].type, "human") + self.assertIn("Question 3", history[8].content) + + def test_agent_reconstructs_conversation_without_messages_after_parent(self): + node = self._get_node() + history = node._construct_messages( + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + AssistantMessage(content="Loop 1", id="2"), + HumanMessage(content="Loop Answer 1", id="3"), + ], + start_id="0", + ) + ) + self.assertEqual(len(history), 1) + self.assertEqual(history[0].type, "human") + self.assertIn("Question 1", history[0].content) def test_agent_filters_out_low_count_events(self): _create_person(distinct_ids=["test"], team=self.team) @@ -182,9 +214,9 @@ def test_agent_handles_output_without_action_block(self): return_value=RunnableLambda(lambda _: LangchainAIMessage(content="I don't want to output an action.")), ): node = self._get_node() - state_update = node.run({"messages": [HumanMessage(content="Question")]}, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, obs = state_update["intermediate_steps"][0] + state_update = node.run(AssistantState(messages=[HumanMessage(content="Question")]), {}) + self.assertEqual(len(state_update.intermediate_steps), 1) + action, obs = state_update.intermediate_steps[0] self.assertIsNone(obs) self.assertIn("I don't want to output an action.", action.log) self.assertIn("Action:", action.log) @@ -196,9 +228,9 @@ def test_agent_handles_output_with_malformed_json(self): return_value=RunnableLambda(lambda _: LangchainAIMessage(content="Thought.\nAction: abc")), ): node = self._get_node() - state_update = node.run({"messages": [HumanMessage(content="Question")]}, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, obs = state_update["intermediate_steps"][0] + state_update = node.run(AssistantState(messages=[HumanMessage(content="Question")]), {}) + self.assertEqual(len(state_update.intermediate_steps), 1) + action, obs = state_update.intermediate_steps[0] self.assertIsNone(obs) self.assertIn("Thought.\nAction: abc", action.log) self.assertIn("action", action.tool_input) @@ -232,34 +264,34 @@ def test_property_filters_prompt(self): class TestTaxonomyAgentPlannerToolsNode(ClickhouseTestMixin, APIBaseTest): def _get_node(self): class Node(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = DummyToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) return Node(self.team) def test_node_handles_action_name_validation_error(self): - state = { - "intermediate_steps": [(AgentAction(tool="does not exist", tool_input="input", log="log"), "test")], - "messages": [], - } + state = AssistantState( + intermediate_steps=[(AgentAction(tool="does not exist", tool_input="input", log="log"), "test")], + messages=[], + ) node = self._get_node() state_update = node.run(state, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, observation = state_update["intermediate_steps"][0] + self.assertEqual(len(state_update.intermediate_steps), 1) + action, observation = state_update.intermediate_steps[0] self.assertIsNotNone(observation) self.assertIn("", observation) def test_node_handles_action_input_validation_error(self): - state = { - "intermediate_steps": [ + state = AssistantState( + intermediate_steps=[ (AgentAction(tool="retrieve_entity_property_values", tool_input="input", log="log"), "test") ], - "messages": [], - } + messages=[], + ) node = self._get_node() state_update = node.run(state, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, observation = state_update["intermediate_steps"][0] + self.assertEqual(len(state_update.intermediate_steps), 1) + action, observation = state_update.intermediate_steps[0] self.assertIsNotNone(observation) self.assertIn("", observation) diff --git a/ee/hogai/taxonomy_agent/toolkit.py b/ee/hogai/taxonomy_agent/toolkit.py index 2af39253b9e68..d05b6f0c933ef 100644 --- a/ee/hogai/taxonomy_agent/toolkit.py +++ b/ee/hogai/taxonomy_agent/toolkit.py @@ -55,6 +55,7 @@ class SingleArgumentTaxonomyAgentTool(BaseModel): "retrieve_event_properties", "final_answer", "handle_incorrect_response", + "ask_user_for_help", ] arguments: str @@ -145,6 +146,16 @@ def _default_tools(self) -> list[ToolkitTool]: property_name: The name of the property that you want to retrieve values for. """, }, + { + "name": "ask_user_for_help", + "signature": "(question: str)", + "description": """ + Use this tool to ask a question to the user. Your question must be concise and clear. + + Args: + question: The question you want to ask. + """, + }, ] def render_text_description(self) -> str: @@ -169,7 +180,7 @@ def render_text_description(self) -> str: @property def _groups(self): - return GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") + return GroupTypeMapping.objects.filter(project_id=self._team.project_id).order_by("group_type_index") @cached_property def _entity_names(self) -> list[str]: diff --git a/ee/hogai/test/test_assistant.py b/ee/hogai/test/test_assistant.py index b6cd65bd4ec12..6d0bb8807d629 100644 --- a/ee/hogai/test/test_assistant.py +++ b/ee/hogai/test/test_assistant.py @@ -1,31 +1,63 @@ import json -from typing import Any +from typing import Any, Optional, cast from unittest.mock import patch -from uuid import uuid4 -from ee.hogai.utils import Conversation -from posthog.schema import AssistantMessage, HumanMessage -from ..assistant import Assistant + +from langchain_core import messages +from langchain_core.agents import AgentAction +from langchain_core.runnables import RunnableConfig, RunnableLambda from langgraph.graph.state import CompiledStateGraph +from langgraph.types import StateSnapshot +from pydantic import BaseModel + +from ee.models.assistant import Conversation +from posthog.schema import AssistantMessage, HumanMessage, ReasoningMessage +from posthog.test.base import NonAtomicBaseTest + +from ..assistant import Assistant from ..graph import AssistantGraph, AssistantNodeName -from posthog.test.base import BaseTest -from langchain_core.agents import AgentAction -class TestAssistant(BaseTest): - def _run_assistant_graph(self, test_graph: CompiledStateGraph) -> list[tuple[str, Any]]: +class TestAssistant(NonAtomicBaseTest): + CLASS_DATA_LEVEL_SETUP = False + + def setUp(self): + super().setUp() + self.conversation = Conversation.objects.create(team=self.team, user=self.user) + + def _run_assistant_graph( + self, + test_graph: Optional[CompiledStateGraph] = None, + message: Optional[str] = "Hello", + conversation: Optional[Conversation] = None, + is_new_conversation: bool = False, + ) -> list[tuple[str, Any]]: # Create assistant instance with our test graph assistant = Assistant( - team=self.team, - conversation=Conversation(messages=[HumanMessage(content="Hello")], session_id=str(uuid4())), + self.team, + conversation or self.conversation, + HumanMessage(content=message), + self.user, + is_new_conversation=is_new_conversation, ) - assistant._graph = test_graph + if test_graph: + assistant._graph = test_graph # Capture and parse output of assistant.stream() output: list[tuple[str, Any]] = [] for message in assistant.stream(): - event_line, data_line, *_ = message.split("\n") + event_line, data_line, *_ = cast(str, message).split("\n") output.append((event_line.removeprefix("event: "), json.loads(data_line.removeprefix("data: ")))) return output + def assertConversationEqual(self, output: list[tuple[str, Any]], expected_output: list[tuple[str, Any]]): + for i, ((output_msg_type, output_msg), (expected_msg_type, expected_msg)) in enumerate( + zip(output, expected_output) + ): + self.assertEqual(output_msg_type, expected_msg_type, f"Message type mismatch at index {i}") + msg_dict = ( + expected_msg.model_dump(exclude_none=True) if isinstance(expected_msg, BaseModel) else expected_msg + ) + self.assertDictContainsSubset(msg_dict, output_msg, f"Message content mismatch at index {i}") + @patch( "ee.hogai.trends.nodes.TrendsPlannerNode.run", return_value={"intermediate_steps": [(AgentAction(tool="final_answer", tool_input="", log=""), None)]}, @@ -39,19 +71,22 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) .add_trends_planner(AssistantNodeName.SUMMARIZER) .add_summarizer(AssistantNodeName.END) - .compile() + .compile(), + conversation=self.conversation, ) # Assert that ReasoningMessages are added - assert output == [ - ("status", {"type": "ack"}), + expected_output = [ + ( + "message", + HumanMessage(content="Hello").model_dump(exclude_none=True), + ), ( "message", { "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerNode "substeps": [], - "done": True, }, ), ( @@ -60,7 +95,6 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerToolsNode "substeps": [], - "done": True, }, ), ( @@ -71,6 +105,7 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann }, ), ] + self.assertConversationEqual(output, expected_output) @patch( "ee.hogai.trends.nodes.TrendsPlannerNode.run", @@ -105,19 +140,22 @@ def test_reasoning_messages_with_substeps_added(self, _mock_funnel_planner_run): AssistantGraph(self.team) .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) .add_trends_planner(AssistantNodeName.END) - .compile() + .compile(), + conversation=self.conversation, ) # Assert that ReasoningMessages are added - assert output == [ - ("status", {"type": "ack"}), + expected_output = [ + ( + "message", + HumanMessage(content="Hello").model_dump(exclude_none=True), + ), ( "message", { "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerNode "substeps": [], - "done": True, }, ), ( @@ -131,7 +169,153 @@ def test_reasoning_messages_with_substeps_added(self, _mock_funnel_planner_run): "Analyzing `currency` event's property `purchase`", "Analyzing person property `country_of_birth`", ], - "done": True, }, ), ] + self.assertConversationEqual(output, expected_output) + + def _test_human_in_the_loop(self, graph: CompiledStateGraph): + with patch("ee.hogai.taxonomy_agent.nodes.TaxonomyAgentPlannerNode._model") as mock: + config: RunnableConfig = { + "configurable": { + "thread_id": self.conversation.id, + } + } + + # Interrupt the graph + message = """ + Thought: Let's ask for help. + Action: + ``` + { + "action": "ask_user_for_help", + "action_input": "Need help with this query" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + output = self._run_assistant_graph(graph, conversation=self.conversation) + expected_output = [ + ("message", HumanMessage(content="Hello")), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", AssistantMessage(content="Need help with this query")), + ] + self.assertConversationEqual(output, expected_output) + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + + # Resume the graph from the interruption point. + message = """ + Thought: Finish. + Action: + ``` + { + "action": "final_answer", + "action_input": "Plan" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + output = self._run_assistant_graph(graph, conversation=self.conversation, message="It's straightforward") + expected_output = [ + ("message", HumanMessage(content="It's straightforward")), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ] + self.assertConversationEqual(output, expected_output) + snapshot: StateSnapshot = graph.get_state(config) + self.assertFalse(snapshot.next) + self.assertEqual(snapshot.values.get("intermediate_steps"), []) + self.assertEqual(snapshot.values["plan"], "Plan") + + def test_trends_interrupt_when_asking_for_help(self): + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) + .add_trends_planner(AssistantNodeName.END) + .compile() + ) + self._test_human_in_the_loop(graph) + + def test_funnels_interrupt_when_asking_for_help(self): + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.FUNNEL_PLANNER) + .add_funnel_planner(AssistantNodeName.END) + .compile() + ) + self._test_human_in_the_loop(graph) + + def test_intermediate_steps_are_updated_after_feedback(self): + with patch("ee.hogai.taxonomy_agent.nodes.TaxonomyAgentPlannerNode._model") as mock: + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) + .add_trends_planner(AssistantNodeName.END) + .compile() + ) + config: RunnableConfig = { + "configurable": { + "thread_id": self.conversation.id, + } + } + + # Interrupt the graph + message = """ + Thought: Let's ask for help. + Action: + ``` + { + "action": "ask_user_for_help", + "action_input": "Need help with this query" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + self._run_assistant_graph(graph, conversation=self.conversation) + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + self.assertEqual(len(snapshot.values["intermediate_steps"]), 1) + action, observation = snapshot.values["intermediate_steps"][0] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertIsNone(observation) + + self._run_assistant_graph(graph, conversation=self.conversation, message="It's straightforward") + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + self.assertEqual(len(snapshot.values["intermediate_steps"]), 2) + action, observation = snapshot.values["intermediate_steps"][0] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertEqual(observation, "It's straightforward") + action, observation = snapshot.values["intermediate_steps"][1] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertIsNone(observation) + + def test_new_conversation_handles_serialized_conversation(self): + graph = ( + AssistantGraph(self.team) + .add_node(AssistantNodeName.ROUTER, lambda _: {"messages": [AssistantMessage(content="Hello")]}) + .add_edge(AssistantNodeName.START, AssistantNodeName.ROUTER) + .add_edge(AssistantNodeName.ROUTER, AssistantNodeName.END) + .compile() + ) + output = self._run_assistant_graph( + graph, + conversation=self.conversation, + is_new_conversation=True, + ) + expected_output = [ + ("conversation", {"id": str(self.conversation.id)}), + ] + self.assertConversationEqual(output[:1], expected_output) + + output = self._run_assistant_graph( + graph, + conversation=self.conversation, + is_new_conversation=False, + ) + self.assertNotEqual(output[0][0], "conversation") diff --git a/ee/hogai/test/test_utils.py b/ee/hogai/test/test_utils.py index 42e54d058c556..8c32471c88508 100644 --- a/ee/hogai/test/test_utils.py +++ b/ee/hogai/test/test_utils.py @@ -1,6 +1,4 @@ -from langchain_core.messages import HumanMessage as LangchainHumanMessage - -from ee.hogai.utils import filter_visualization_conversation, merge_human_messages +from ee.hogai.utils.helpers import filter_messages from posthog.schema import ( AssistantMessage, AssistantTrendsQuery, @@ -13,40 +11,29 @@ class TestTrendsUtils(BaseTest): - def test_merge_human_messages(self): - res = merge_human_messages( - [ - LangchainHumanMessage(content="Text"), - LangchainHumanMessage(content="Text"), - LangchainHumanMessage(content="Te"), - LangchainHumanMessage(content="xt"), - ] - ) - self.assertEqual(len(res), 1) - self.assertEqual(res, [LangchainHumanMessage(content="Text\nTe\nxt")]) - - def test_filter_trends_conversation(self): - human_messages, visualization_messages = filter_visualization_conversation( + def test_filters_and_merges_human_messages(self): + conversation = [ + HumanMessage(content="Text"), + FailureMessage(content="Error"), + HumanMessage(content="Text"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan"), + HumanMessage(content="Text2"), + VisualizationMessage(answer=None, plan="plan"), + ] + messages = filter_messages(conversation) + self.assertEqual(len(messages), 4) + self.assertEqual( [ - HumanMessage(content="Text"), - FailureMessage(content="Error"), - HumanMessage(content="Text"), + HumanMessage(content="Text\nText"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan"), HumanMessage(content="Text2"), VisualizationMessage(answer=None, plan="plan"), - ] - ) - self.assertEqual(len(human_messages), 2) - self.assertEqual(len(visualization_messages), 1) - self.assertEqual( - human_messages, [LangchainHumanMessage(content="Text"), LangchainHumanMessage(content="Text2")] - ) - self.assertEqual( - visualization_messages, [VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan")] + ], + messages, ) def test_filters_typical_conversation(self): - human_messages, visualization_messages = filter_visualization_conversation( + messages = filter_messages( [ HumanMessage(content="Question 1"), RouterMessage(content="trends"), @@ -58,15 +45,30 @@ def test_filters_typical_conversation(self): AssistantMessage(content="Summary 2"), ] ) - self.assertEqual(len(human_messages), 2) - self.assertEqual(len(visualization_messages), 2) - self.assertEqual( - human_messages, [LangchainHumanMessage(content="Question 1"), LangchainHumanMessage(content="Question 2")] - ) + self.assertEqual(len(messages), 6) self.assertEqual( - visualization_messages, + messages, [ + HumanMessage(content="Question 1"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), + AssistantMessage(content="Summary 1"), + HumanMessage(content="Question 2"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), + AssistantMessage(content="Summary 2"), + ], + ) + + def test_joins_human_messages(self): + messages = filter_messages( + [ + HumanMessage(content="Question 1"), + HumanMessage(content="Question 2"), + ] + ) + self.assertEqual(len(messages), 1) + self.assertEqual( + messages, + [ + HumanMessage(content="Question 1\nQuestion 2"), ], ) diff --git a/ee/hogai/trends/nodes.py b/ee/hogai/trends/nodes.py index b6b33cf6d8354..e430b4036e043 100644 --- a/ee/hogai/trends/nodes.py +++ b/ee/hogai/trends/nodes.py @@ -6,12 +6,12 @@ from ee.hogai.taxonomy_agent.nodes import TaxonomyAgentPlannerNode, TaxonomyAgentPlannerToolsNode from ee.hogai.trends.prompts import REACT_SYSTEM_PROMPT, TRENDS_SYSTEM_PROMPT from ee.hogai.trends.toolkit import TRENDS_SCHEMA, TrendsTaxonomyAgentToolkit -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import AssistantTrendsQuery class TrendsPlannerNode(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = TrendsTaxonomyAgentToolkit(self._team) prompt = ChatPromptTemplate.from_messages( [ @@ -23,7 +23,7 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: class TrendsPlannerToolsNode(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = TrendsTaxonomyAgentToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) @@ -36,7 +36,7 @@ class TrendsGeneratorNode(SchemaGeneratorNode[AssistantTrendsQuery]): OUTPUT_MODEL = TrendsSchemaGeneratorOutput OUTPUT_SCHEMA = TRENDS_SCHEMA - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", TRENDS_SYSTEM_PROMPT), diff --git a/ee/hogai/trends/prompts.py b/ee/hogai/trends/prompts.py index 2ac9496480cdd..dcc1daeaa5a00 100644 --- a/ee/hogai/trends/prompts.py +++ b/ee/hogai/trends/prompts.py @@ -12,6 +12,8 @@ {{react_format}} +{{react_human_in_the_loop}} + Below you will find information on how to correctly discover the taxonomy of the user's data. diff --git a/ee/hogai/trends/test/test_nodes.py b/ee/hogai/trends/test/test_nodes.py index 44973b3195377..369ce8bc9b292 100644 --- a/ee/hogai/trends/test/test_nodes.py +++ b/ee/hogai/trends/test/test_nodes.py @@ -4,6 +4,7 @@ from langchain_core.runnables import RunnableLambda from ee.hogai.trends.nodes import TrendsGeneratorNode, TrendsSchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantTrendsQuery, HumanMessage, @@ -17,6 +18,7 @@ class TestTrendsGeneratorNode(ClickhouseTestMixin, APIBaseTest): maxDiff = None def setUp(self): + super().setUp() self.schema = AssistantTrendsQuery(series=[]) def test_node_runs(self): @@ -26,16 +28,16 @@ def test_node_runs(self): lambda _: TrendsSchemaGeneratorOutput(query=self.schema).model_dump() ) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState( + messages=[HumanMessage(content="Text")], + plan="Plan", + ), {}, ) self.assertEqual( new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, + PartialAssistantState( + messages=[VisualizationMessage(answer=self.schema, plan="Plan", id=new_state.messages[0].id)], + intermediate_steps=None, + ), ) diff --git a/ee/hogai/trends/toolkit.py b/ee/hogai/trends/toolkit.py index d69830d2f2cd6..5fd7a35f0f18a 100644 --- a/ee/hogai/trends/toolkit.py +++ b/ee/hogai/trends/toolkit.py @@ -1,8 +1,6 @@ from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import dereference_schema -from posthog.schema import ( - AssistantTrendsQuery, -) +from ee.hogai.utils.helpers import dereference_schema +from posthog.schema import AssistantTrendsQuery class TrendsTaxonomyAgentToolkit(TaxonomyAgentToolkit): diff --git a/ee/hogai/utils.py b/ee/hogai/utils.py deleted file mode 100644 index 278e2f4076495..0000000000000 --- a/ee/hogai/utils.py +++ /dev/null @@ -1,117 +0,0 @@ -import operator -from abc import ABC, abstractmethod -from collections.abc import Sequence -from enum import StrEnum -from typing import Annotated, Optional, TypedDict, Union - -from jsonref import replace_refs -from langchain_core.agents import AgentAction -from langchain_core.messages import ( - HumanMessage as LangchainHumanMessage, - merge_message_runs, -) -from langchain_core.runnables import RunnableConfig -from langgraph.graph import END, START -from pydantic import BaseModel, Field - -from posthog.models.team.team import Team -from posthog.schema import ( - AssistantMessage, - FailureMessage, - HumanMessage, - ReasoningMessage, - RootAssistantMessage, - RouterMessage, - VisualizationMessage, -) - -AssistantMessageUnion = Union[ - AssistantMessage, HumanMessage, VisualizationMessage, FailureMessage, RouterMessage, ReasoningMessage -] - - -class Conversation(BaseModel): - messages: list[RootAssistantMessage] = Field(..., min_length=1, max_length=20) - session_id: str - - -class AssistantState(TypedDict, total=False): - messages: Annotated[Sequence[AssistantMessageUnion], operator.add] - intermediate_steps: Optional[list[tuple[AgentAction, Optional[str]]]] - plan: Optional[str] - - -class AssistantNodeName(StrEnum): - START = START - END = END - ROUTER = "router" - TRENDS_PLANNER = "trends_planner" - TRENDS_PLANNER_TOOLS = "trends_planner_tools" - TRENDS_GENERATOR = "trends_generator" - TRENDS_GENERATOR_TOOLS = "trends_generator_tools" - FUNNEL_PLANNER = "funnel_planner" - FUNNEL_PLANNER_TOOLS = "funnel_planner_tools" - FUNNEL_GENERATOR = "funnel_generator" - FUNNEL_GENERATOR_TOOLS = "funnel_generator_tools" - SUMMARIZER = "summarizer" - - -class AssistantNode(ABC): - _team: Team - - def __init__(self, team: Team): - self._team = team - - @abstractmethod - def run(cls, state: AssistantState, config: RunnableConfig) -> AssistantState: - raise NotImplementedError - - -def remove_line_breaks(line: str) -> str: - return line.replace("\n", " ") - - -def merge_human_messages(messages: list[LangchainHumanMessage]) -> list[LangchainHumanMessage]: - """ - Filters out duplicated human messages and merges them into one message. - """ - contents = set() - filtered_messages = [] - for message in messages: - if message.content in contents: - continue - contents.add(message.content) - filtered_messages.append(message) - return merge_message_runs(filtered_messages) - - -def filter_visualization_conversation( - messages: Sequence[AssistantMessageUnion], -) -> tuple[list[LangchainHumanMessage], list[VisualizationMessage]]: - """ - Splits, filters and merges the message history to be consumable by agents. Returns human and visualization messages. - """ - stack: list[LangchainHumanMessage] = [] - human_messages: list[LangchainHumanMessage] = [] - visualization_messages: list[VisualizationMessage] = [] - - for message in messages: - if isinstance(message, HumanMessage): - stack.append(LangchainHumanMessage(content=message.content)) - elif isinstance(message, VisualizationMessage) and message.answer: - if stack: - human_messages += merge_human_messages(stack) - stack = [] - visualization_messages.append(message) - - if stack: - human_messages += merge_human_messages(stack) - - return human_messages, visualization_messages - - -def dereference_schema(schema: dict) -> dict: - new_schema: dict = replace_refs(schema, proxies=False, lazy_load=False) - if "$defs" in new_schema: - new_schema.pop("$defs") - return new_schema diff --git a/ee/hogai/utils/__init__.py b/ee/hogai/utils/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/utils/helpers.py b/ee/hogai/utils/helpers.py new file mode 100644 index 0000000000000..4fc8cf3b5d6a0 --- /dev/null +++ b/ee/hogai/utils/helpers.py @@ -0,0 +1,79 @@ +from collections.abc import Sequence +from typing import Optional, TypeVar, Union + +from jsonref import replace_refs +from langchain_core.messages import ( + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) + +from posthog.schema import ( + AssistantMessage, + HumanMessage, + VisualizationMessage, +) + +from .types import AIMessageUnion, AssistantMessageUnion + + +def remove_line_breaks(line: str) -> str: + return line.replace("\n", " ") + + +def filter_messages( + messages: Sequence[AssistantMessageUnion], + entity_filter: Union[tuple[type[AIMessageUnion], ...], type[AIMessageUnion]] = ( + AssistantMessage, + VisualizationMessage, + ), +) -> list[AssistantMessageUnion]: + """ + Filters and merges the message history to be consumable by agents. Returns human and AI messages. + """ + stack: list[LangchainHumanMessage] = [] + filtered_messages: list[AssistantMessageUnion] = [] + + def _merge_stack(stack: list[LangchainHumanMessage]) -> list[HumanMessage]: + return [ + HumanMessage(content=langchain_message.content, id=langchain_message.id) + for langchain_message in merge_message_runs(stack) + ] + + for message in messages: + if isinstance(message, HumanMessage): + stack.append(LangchainHumanMessage(content=message.content, id=message.id)) + elif isinstance(message, entity_filter): + if stack: + filtered_messages += _merge_stack(stack) + stack = [] + filtered_messages.append(message) + + if stack: + filtered_messages += _merge_stack(stack) + + return filtered_messages + + +T = TypeVar("T", bound=AssistantMessageUnion) + + +def find_last_message_of_type(messages: Sequence[AssistantMessageUnion], message_type: type[T]) -> Optional[T]: + return next((msg for msg in reversed(messages) if isinstance(msg, message_type)), None) + + +def slice_messages_to_conversation_start( + messages: Sequence[AssistantMessageUnion], start_id: Optional[str] = None +) -> Sequence[AssistantMessageUnion]: + result = [] + for msg in messages: + result.append(msg) + if msg.id == start_id: + break + return result + + +def dereference_schema(schema: dict) -> dict: + new_schema: dict = replace_refs(schema, proxies=False, lazy_load=False) + if "$defs" in new_schema: + new_schema.pop("$defs") + return new_schema diff --git a/ee/hogai/utils/nodes.py b/ee/hogai/utils/nodes.py new file mode 100644 index 0000000000000..6a4358243b666 --- /dev/null +++ b/ee/hogai/utils/nodes.py @@ -0,0 +1,18 @@ +from abc import ABC, abstractmethod + +from langchain_core.runnables import RunnableConfig + +from posthog.models.team.team import Team + +from .types import AssistantState, PartialAssistantState + + +class AssistantNode(ABC): + _team: Team + + def __init__(self, team: Team): + self._team = team + + @abstractmethod + def run(cls, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + raise NotImplementedError diff --git a/ee/hogai/utils/state.py b/ee/hogai/utils/state.py new file mode 100644 index 0000000000000..3392f3362adb9 --- /dev/null +++ b/ee/hogai/utils/state.py @@ -0,0 +1,70 @@ +from typing import Any, Literal, TypedDict, TypeGuard, Union + +from langchain_core.messages import AIMessageChunk + +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState + +# A state update can have a partial state or a LangGraph's reserved dataclasses like Interrupt. +GraphValueUpdate = dict[AssistantNodeName, dict[Any, Any] | Any] + +GraphValueUpdateTuple = tuple[Literal["values"], GraphValueUpdate] + + +def is_value_update(update: list[Any]) -> TypeGuard[GraphValueUpdateTuple]: + """ + Transition between nodes. + + Returns: + PartialAssistantState, Interrupt, or other LangGraph reserved dataclasses. + """ + return len(update) == 2 and update[0] == "updates" + + +def validate_value_update(update: GraphValueUpdate) -> dict[AssistantNodeName, PartialAssistantState | Any]: + validated_update = {} + for node_name, value in update.items(): + if isinstance(value, dict): + validated_update[node_name] = PartialAssistantState.model_validate(value) + else: + validated_update[node_name] = value + return validated_update + + +class LangGraphState(TypedDict): + langgraph_node: AssistantNodeName + + +GraphMessageUpdateTuple = tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]] + + +def is_message_update(update: list[Any]) -> TypeGuard[GraphMessageUpdateTuple]: + """ + Streaming of messages. + """ + return len(update) == 2 and update[0] == "messages" + + +GraphStateUpdateTuple = tuple[Literal["updates"], dict[Any, Any]] + + +def is_state_update(update: list[Any]) -> TypeGuard[GraphStateUpdateTuple]: + """ + Update of the state. Returns a full state. + """ + return len(update) == 2 and update[0] == "values" + + +def validate_state_update(state_update: dict[Any, Any]) -> AssistantState: + return AssistantState.model_validate(state_update) + + +GraphTaskStartedUpdateTuple = tuple[Literal["debug"], tuple[Union[AIMessageChunk, Any], LangGraphState]] + + +def is_task_started_update( + update: list[Any], +) -> TypeGuard[GraphTaskStartedUpdateTuple]: + """ + Streaming of messages. + """ + return len(update) == 2 and update[0] == "debug" and update[1]["type"] == "task" diff --git a/ee/hogai/utils/types.py b/ee/hogai/utils/types.py new file mode 100644 index 0000000000000..2df027b6f85af --- /dev/null +++ b/ee/hogai/utils/types.py @@ -0,0 +1,52 @@ +import operator +from collections.abc import Sequence +from enum import StrEnum +from typing import Annotated, Optional, Union + +from langchain_core.agents import AgentAction +from langgraph.graph import END, START +from pydantic import BaseModel, Field + +from posthog.schema import ( + AssistantMessage, + FailureMessage, + HumanMessage, + ReasoningMessage, + RouterMessage, + VisualizationMessage, +) + +AIMessageUnion = Union[AssistantMessage, VisualizationMessage, FailureMessage, RouterMessage, ReasoningMessage] +AssistantMessageUnion = Union[HumanMessage, AIMessageUnion] + + +class _SharedAssistantState(BaseModel): + intermediate_steps: Optional[list[tuple[AgentAction, Optional[str]]]] = Field(default=None) + start_id: Optional[str] = Field(default=None) + """ + The ID of the message from which the conversation started. + """ + plan: Optional[str] = Field(default=None) + + +class AssistantState(_SharedAssistantState): + messages: Annotated[Sequence[AssistantMessageUnion], operator.add] + + +class PartialAssistantState(_SharedAssistantState): + messages: Optional[Annotated[Sequence[AssistantMessageUnion], operator.add]] = Field(default=None) + + +class AssistantNodeName(StrEnum): + START = START + END = END + ROUTER = "router" + TRENDS_PLANNER = "trends_planner" + TRENDS_PLANNER_TOOLS = "trends_planner_tools" + TRENDS_GENERATOR = "trends_generator" + TRENDS_GENERATOR_TOOLS = "trends_generator_tools" + FUNNEL_PLANNER = "funnel_planner" + FUNNEL_PLANNER_TOOLS = "funnel_planner_tools" + FUNNEL_GENERATOR = "funnel_generator" + FUNNEL_GENERATOR_TOOLS = "funnel_generator_tools" + SUMMARIZER = "summarizer" diff --git a/ee/management/commands/materialize_columns.py b/ee/management/commands/materialize_columns.py index c1ca3b3fd2287..6d54f8362ff1d 100644 --- a/ee/management/commands/materialize_columns.py +++ b/ee/management/commands/materialize_columns.py @@ -1,3 +1,4 @@ +import argparse import logging from django.core.management.base import BaseCommand @@ -69,8 +70,14 @@ def add_arguments(self, parser): default=MATERIALIZE_COLUMNS_MAX_AT_ONCE, help="Max number of columns to materialize via single invocation. Same as MATERIALIZE_COLUMNS_MAX_AT_ONCE env variable.", ) + parser.add_argument( + "--nullable", + action=argparse.BooleanOptionalAction, + default=True, + dest="is_nullable", + ) - def handle(self, *args, **options): + def handle(self, *, is_nullable: bool, **options): logger.setLevel(logging.INFO) if options["dry_run"]: @@ -80,7 +87,7 @@ def handle(self, *args, **options): logger.info(f"Materializing column. table={options['property_table']}, property_name={options['property']}") materialize_properties_task( - columns_to_materialize=[ + properties_to_materialize=[ ( options["property_table"], options["table_column"], @@ -90,6 +97,7 @@ def handle(self, *args, **options): ], backfill_period_days=options["backfill_period"], dry_run=options["dry_run"], + is_nullable=is_nullable, ) else: materialize_properties_task( @@ -99,4 +107,5 @@ def handle(self, *args, **options): backfill_period_days=options["backfill_period"], dry_run=options["dry_run"], team_id_to_analyze=options["analyze_team_id"], + is_nullable=is_nullable, ) diff --git a/ee/management/commands/update_materialized_column.py b/ee/management/commands/update_materialized_column.py index b45444eb4831b..bb55a61545dd6 100644 --- a/ee/management/commands/update_materialized_column.py +++ b/ee/management/commands/update_materialized_column.py @@ -1,7 +1,7 @@ import logging from typing import Any -from collections.abc import Callable +from collections.abc import Callable, Iterable from django.core.management.base import BaseCommand, CommandParser from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns @@ -9,9 +9,9 @@ logger = logging.getLogger(__name__) -COLUMN_OPERATIONS: dict[str, Callable[[TablesWithMaterializedColumns, ColumnName], Any]] = { - "enable": lambda table, column_name: update_column_is_disabled(table, column_name, is_disabled=False), - "disable": lambda table, column_name: update_column_is_disabled(table, column_name, is_disabled=True), +COLUMN_OPERATIONS: dict[str, Callable[[TablesWithMaterializedColumns, Iterable[ColumnName]], Any]] = { + "enable": lambda table, column_names: update_column_is_disabled(table, column_names, is_disabled=False), + "disable": lambda table, column_names: update_column_is_disabled(table, column_names, is_disabled=True), "drop": drop_column, } @@ -20,10 +20,12 @@ class Command(BaseCommand): def add_arguments(self, parser: CommandParser) -> None: parser.add_argument("operation", choices=COLUMN_OPERATIONS.keys()) parser.add_argument("table") - parser.add_argument("column_name") + parser.add_argument("column_names", nargs="+", metavar="column") - def handle(self, operation: str, table: TablesWithMaterializedColumns, column_name: ColumnName, **options): - logger.info("Running %r for %r.%r...", operation, table, column_name) + def handle( + self, operation: str, table: TablesWithMaterializedColumns, column_names: Iterable[ColumnName], **options + ): + logger.info("Running %r on %r for %r...", operation, table, column_names) fn = COLUMN_OPERATIONS[operation] - fn(table, column_name) + fn(table, column_names) logger.info("Success!") diff --git a/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py b/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py new file mode 100644 index 0000000000000..ec48cc780ad57 --- /dev/null +++ b/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py @@ -0,0 +1,147 @@ +# Generated by Django 4.2.15 on 2024-12-11 15:51 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import posthog.models.utils + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0528_project_field_in_taxonomy"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("ee", "0017_accesscontrol_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="Conversation", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="ConversationCheckpoint", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ( + "checkpoint_ns", + models.TextField( + default="", + help_text='Checkpoint namespace. Denotes the path to the subgraph node the checkpoint originates from, separated by `|` character, e.g. `"child|grandchild"`. Defaults to "" (root graph).', + ), + ), + ("checkpoint", models.JSONField(help_text="Serialized checkpoint data.", null=True)), + ("metadata", models.JSONField(help_text="Serialized checkpoint metadata.", null=True)), + ( + "parent_checkpoint", + models.ForeignKey( + help_text="Parent checkpoint ID.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="children", + to="ee.conversationcheckpoint", + ), + ), + ( + "thread", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="checkpoints", to="ee.conversation" + ), + ), + ], + ), + migrations.CreateModel( + name="ConversationCheckpointWrite", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("task_id", models.UUIDField(help_text="Identifier for the task creating the checkpoint write.")), + ( + "idx", + models.IntegerField( + help_text="Index of the checkpoint write. It is an integer value where negative numbers are reserved for special cases, such as node interruption." + ), + ), + ( + "channel", + models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ), + ), + ("type", models.TextField(help_text="Type of the serialized blob. For example, `json`.", null=True)), + ("blob", models.BinaryField(null=True)), + ( + "checkpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="writes", + to="ee.conversationcheckpoint", + ), + ), + ], + ), + migrations.CreateModel( + name="ConversationCheckpointBlob", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ( + "channel", + models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ), + ), + ("version", models.TextField(help_text="Monotonically increasing version of the channel.")), + ("type", models.TextField(help_text="Type of the serialized blob. For example, `json`.", null=True)), + ("blob", models.BinaryField(null=True)), + ( + "checkpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="blobs", + to="ee.conversationcheckpoint", + ), + ), + ], + ), + migrations.AddConstraint( + model_name="conversationcheckpointwrite", + constraint=models.UniqueConstraint( + fields=("checkpoint_id", "task_id", "idx"), name="unique_checkpoint_write" + ), + ), + migrations.AddConstraint( + model_name="conversationcheckpointblob", + constraint=models.UniqueConstraint( + fields=("checkpoint_id", "channel", "version"), name="unique_checkpoint_blob" + ), + ), + migrations.AddConstraint( + model_name="conversationcheckpoint", + constraint=models.UniqueConstraint(fields=("id", "checkpoint_ns", "thread"), name="unique_checkpoint"), + ), + ] diff --git a/ee/migrations/max_migration.txt b/ee/migrations/max_migration.txt index 449d87290c304..fb889f1cc34cf 100644 --- a/ee/migrations/max_migration.txt +++ b/ee/migrations/max_migration.txt @@ -1 +1 @@ -0017_accesscontrol_and_more +0018_conversation_conversationcheckpoint_and_more diff --git a/ee/models/__init__.py b/ee/models/__init__.py index df7cfcba704e6..2067d11f7618f 100644 --- a/ee/models/__init__.py +++ b/ee/models/__init__.py @@ -1,3 +1,4 @@ +from .assistant import Conversation, ConversationCheckpoint, ConversationCheckpointBlob, ConversationCheckpointWrite from .dashboard_privilege import DashboardPrivilege from .event_definition import EnterpriseEventDefinition from .explicit_team_membership import ExplicitTeamMembership @@ -10,7 +11,11 @@ __all__ = [ "AccessControl", + "ConversationCheckpoint", + "ConversationCheckpointBlob", + "ConversationCheckpointWrite", "DashboardPrivilege", + "Conversation", "EnterpriseEventDefinition", "EnterprisePropertyDefinition", "ExplicitTeamMembership", diff --git a/ee/models/assistant.py b/ee/models/assistant.py new file mode 100644 index 0000000000000..390a7ab7a117f --- /dev/null +++ b/ee/models/assistant.py @@ -0,0 +1,83 @@ +from collections.abc import Iterable + +from django.db import models +from langgraph.checkpoint.serde.types import TASKS + +from posthog.models.team.team import Team +from posthog.models.user import User +from posthog.models.utils import UUIDModel + + +class Conversation(UUIDModel): + user = models.ForeignKey(User, on_delete=models.CASCADE) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + + +class ConversationCheckpoint(UUIDModel): + thread = models.ForeignKey(Conversation, on_delete=models.CASCADE, related_name="checkpoints") + checkpoint_ns = models.TextField( + default="", + help_text='Checkpoint namespace. Denotes the path to the subgraph node the checkpoint originates from, separated by `|` character, e.g. `"child|grandchild"`. Defaults to "" (root graph).', + ) + parent_checkpoint = models.ForeignKey( + "self", null=True, on_delete=models.CASCADE, related_name="children", help_text="Parent checkpoint ID." + ) + checkpoint = models.JSONField(null=True, help_text="Serialized checkpoint data.") + metadata = models.JSONField(null=True, help_text="Serialized checkpoint metadata.") + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["id", "checkpoint_ns", "thread"], + name="unique_checkpoint", + ) + ] + + @property + def pending_sends(self) -> Iterable["ConversationCheckpointWrite"]: + if self.parent_checkpoint is None: + return [] + return self.parent_checkpoint.writes.filter(channel=TASKS).order_by("task_id", "idx") + + @property + def pending_writes(self) -> Iterable["ConversationCheckpointWrite"]: + return self.writes.order_by("idx", "task_id") + + +class ConversationCheckpointBlob(UUIDModel): + checkpoint = models.ForeignKey(ConversationCheckpoint, on_delete=models.CASCADE, related_name="blobs") + channel = models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ) + version = models.TextField(help_text="Monotonically increasing version of the channel.") + type = models.TextField(null=True, help_text="Type of the serialized blob. For example, `json`.") + blob = models.BinaryField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["checkpoint_id", "channel", "version"], + name="unique_checkpoint_blob", + ) + ] + + +class ConversationCheckpointWrite(UUIDModel): + checkpoint = models.ForeignKey(ConversationCheckpoint, on_delete=models.CASCADE, related_name="writes") + task_id = models.UUIDField(help_text="Identifier for the task creating the checkpoint write.") + idx = models.IntegerField( + help_text="Index of the checkpoint write. It is an integer value where negative numbers are reserved for special cases, such as node interruption." + ) + channel = models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ) + type = models.TextField(null=True, help_text="Type of the serialized blob. For example, `json`.") + blob = models.BinaryField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["checkpoint_id", "task_id", "idx"], + name="unique_checkpoint_write", + ) + ] diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr deleted file mode 100644 index 389933177e2ca..0000000000000 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ /dev/null @@ -1,1649 +0,0 @@ -# serializer version: 1 -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_0_test_poe_v1_still_falls_back_to_person_subquery - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_1_test_poe_being_unavailable_we_fall_back_to_person_id_overrides - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_2_test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_3_test_allow_denormalised_props_fix_does_not_stop_all_poe_processing - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_4_test_poe_v2_available_person_properties_are_used_in_replay_listing - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_00_poe_v2_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_01_poe_v2_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_02_poe_v2_and_materialized_columns_off_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_03_poe_v2_and_materialized_columns_off_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_04_poe_off_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_05_poe_off_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_06_poe_off_and_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_07_poe_off_and_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_08_poe_v1_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_09_poe_v1_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr index bcd1ed1e3c8cb..97bc6424e1cbd 100644 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr +++ b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -19,12 +19,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -51,24 +51,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -95,24 +95,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -139,12 +139,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -171,12 +171,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -203,7 +203,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -238,12 +238,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -278,7 +278,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -313,12 +313,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -353,7 +353,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -388,12 +388,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -428,7 +428,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -463,12 +463,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -503,7 +503,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -538,27 +538,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -593,7 +593,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -628,27 +628,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -683,7 +683,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -718,27 +718,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -773,7 +773,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -808,27 +808,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -863,7 +863,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -898,12 +898,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -938,7 +938,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -973,12 +973,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1013,7 +1013,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1048,12 +1048,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1088,7 +1088,7 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1123,12 +1123,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1163,10 +1163,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1201,10 +1201,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1239,10 +1239,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1277,10 +1277,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1315,16 +1315,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1359,16 +1359,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1403,16 +1403,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1447,16 +1447,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1491,16 +1491,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1535,16 +1535,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1579,16 +1579,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1623,16 +1623,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py b/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py deleted file mode 100644 index 391d12071966f..0000000000000 --- a/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py +++ /dev/null @@ -1,353 +0,0 @@ -import re -from itertools import product -from uuid import uuid4 - -from dateutil.relativedelta import relativedelta -from django.utils.timezone import now -from freezegun import freeze_time -from parameterized import parameterized - -from ee.clickhouse.materialized_columns.columns import materialize -from posthog.clickhouse.client import sync_execute -from posthog.hogql.ast import CompareOperation, And, SelectQuery -from posthog.hogql.base import Expr -from posthog.hogql.context import HogQLContext -from posthog.hogql.printer import print_ast -from posthog.models import Person -from posthog.models.filters import SessionRecordingsFilter -from posthog.schema import PersonsOnEventsMode -from posthog.session_recordings.queries.session_recording_list_from_filters import ( - SessionRecordingListFromFilters, -) -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary -from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL -from posthog.test.base import ( - APIBaseTest, - ClickhouseTestMixin, - QueryMatchingTest, - snapshot_clickhouse_queries, - _create_event, -) - - -# The HogQL pair of TestClickhouseSessionRecordingsListFromSessionReplay can be renamed when delete the old one -@freeze_time("2021-01-01T13:46:23") -class TestClickhouseSessionRecordingsListFromFilters(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): - def _print_query(self, query: SelectQuery) -> str: - return print_ast( - query, - HogQLContext(team_id=self.team.pk, enable_select_queries=True), - "clickhouse", - pretty=True, - ) - - def tearDown(self) -> None: - sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) - - @property - def base_time(self): - return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) - - def create_event( - self, - distinct_id, - timestamp, - team=None, - event_name="$pageview", - properties=None, - ): - if team is None: - team = self.team - if properties is None: - properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} - return _create_event( - team=team, - event=event_name, - timestamp=timestamp, - distinct_id=distinct_id, - properties=properties, - ) - - @parameterized.expand( - [ - [ - "test_poe_v1_still_falls_back_to_person_subquery", - True, - False, - False, - PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - [ - "test_poe_being_unavailable_we_fall_back_to_person_id_overrides", - False, - False, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, - ], - [ - "test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props", - False, - False, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, - ], - [ - "test_allow_denormalised_props_fix_does_not_stop_all_poe_processing", - False, - True, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - [ - "test_poe_v2_available_person_properties_are_used_in_replay_listing", - False, - True, - True, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - ] - ) - def test_effect_of_poe_settings_on_query_generated( - self, - _name: str, - poe_v1: bool, - poe_v2: bool, - allow_denormalized_props: bool, - expected_poe_mode: PersonsOnEventsMode, - ) -> None: - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe_v1, - PERSON_ON_EVENTS_V2_OVERRIDE=poe_v2, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalized_props, - ): - assert self.team.person_on_events_mode == expected_poe_mode - materialize("events", "rgInternal", table_column="person_properties") - - filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "rgInternal", - "value": ["false"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - session_recording_list_instance = SessionRecordingListFromFilters( - filter=filter, team=self.team, hogql_query_modifiers=None - ) - - hogql_parsed_select = session_recording_list_instance.get_query() - printed_query = self._print_query(hogql_parsed_select) - - person_filtering_expr = self._matching_person_filter_expr_from(hogql_parsed_select) - - self._assert_is_events_person_filter(person_filtering_expr) - - if poe_v1 or poe_v2: - # Property used directly from event (from materialized column) - assert "ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null')" in printed_query - else: - # We get the person property value from the persons JOIN - assert re.search( - r"argMax\(replaceRegexpAll\(nullIf\(nullIf\(JSONExtractRaw\(person\.properties, %\(hogql_val_\d+\)s\), ''\), 'null'\), '^\"|\"\$', ''\), person\.version\) AS properties___rgInternal", - printed_query, - ) - # Then we actually filter on that property value - assert re.search( - r"ifNull\(equals\(events__person\.properties___rgInternal, %\(hogql_val_\d+\)s\), 0\)", - printed_query, - ) - self.assertQueryMatchesSnapshot(printed_query) - - def _assert_is_pdi_filter(self, person_filtering_expr: list[Expr]) -> None: - assert person_filtering_expr[0].right.select_from.table.chain == ["person_distinct_ids"] - assert person_filtering_expr[0].right.where.left.chain == ["person", "properties", "rgInternal"] - - def _assert_is_events_person_filter(self, person_filtering_expr: list[Expr]) -> None: - assert person_filtering_expr[0].right.select_from.table.chain == ["events"] - event_person_condition = [ - x - for x in person_filtering_expr[0].right.where.exprs - if isinstance(x, CompareOperation) and x.left.chain == ["person", "properties", "rgInternal"] - ] - assert len(event_person_condition) == 1 - - def _matching_person_filter_expr_from(self, hogql_parsed_select: SelectQuery) -> list[Expr]: - where_conditions: list[Expr] = hogql_parsed_select.where.exprs - ands = [x for x in where_conditions if isinstance(x, And)] - assert len(ands) == 1 - and_comparisons = [x for x in ands[0].exprs if isinstance(x, CompareOperation)] - assert len(and_comparisons) == 1 - assert isinstance(and_comparisons[0].right, SelectQuery) - return and_comparisons - - settings_combinations = [ - ["poe v2 and materialized columns allowed", False, True, True], - ["poe v2 and materialized columns off", False, True, False], - ["poe off and materialized columns allowed", False, False, True], - ["poe off and materialized columns not allowed", False, False, False], - ["poe v1 and materialized columns allowed", True, False, True], - ["poe v1 and not materialized columns not allowed", True, False, False], - ] - - # Options for "materialize person columns" - materialization_options = [ - [" with materialization", True], - [" without materialization", False], - ] - - # Expand the parameter list to the product of all combinations with "materialize person columns" - # e.g. [a, b] x [c, d] = [a, c], [a, d], [b, c], [b, d] - test_case_combinations = [ - [f"{name}{mat_option}", poe_v1, poe, mat_columns, mat_person] - for (name, poe_v1, poe, mat_columns), (mat_option, mat_person) in product( - settings_combinations, materialization_options - ) - ] - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_event_filter_with_person_properties_materialized( - self, - _name: str, - poe1_enabled: bool, - poe2_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - user_one = "test_event_filter_with_person_properties-user" - user_two = "test_event_filter_with_person_properties-user2" - session_id_one = f"test_event_filter_with_person_properties-1-{str(uuid4())}" - session_id_two = f"test_event_filter_with_person_properties-2-{str(uuid4())}" - - Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"}) - - self._add_replay_with_pageview(session_id_one, user_one) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - self._add_replay_with_pageview(session_id_two, user_two) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - match_everyone_filter = SessionRecordingsFilter( - team=self.team, - data={"properties": []}, - ) - - session_recording_list_instance = SessionRecordingListFromFilters( - filter=match_everyone_filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - - assert sorted([x["session_id"] for x in session_recordings]) == sorted([session_id_one, session_id_two]) - - match_bla_filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "email", - "value": ["bla"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - - session_recording_list_instance = SessionRecordingListFromFilters( - filter=match_bla_filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - def _add_replay_with_pageview(self, session_id: str, user: str) -> None: - self.create_event( - user, - self.base_time, - properties={"$session_id": session_id, "$window_id": str(uuid4())}, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.base_time, - team_id=self.team.id, - ) - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_person_id_filter( - self, - _name: str, - poe2_enabled: bool, - poe1_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - # it shouldn't matter to this test whether any column is materialized - # but let's keep the tests in this file similar so we flush out any unexpected interactions - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - three_user_ids = ["person-1-distinct-1", "person-1-distinct-2", "person-2"] - session_id_one = f"test_person_id_filter-session-one" - session_id_two = f"test_person_id_filter-session-two" - session_id_three = f"test_person_id_filter-session-three" - - p = Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[0], three_user_ids[1]], - properties={"email": "bla"}, - ) - Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[2]], - properties={"email": "bla2"}, - ) - - self._add_replay_with_pageview(session_id_one, three_user_ids[0]) - self._add_replay_with_pageview(session_id_two, three_user_ids[1]) - self._add_replay_with_pageview(session_id_three, three_user_ids[2]) - - filter = SessionRecordingsFilter(team=self.team, data={"person_uuid": str(p.uuid)}) - session_recording_list_instance = SessionRecordingListFromFilters( - filter=filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index a3dc50c1228f5..7b1a962b187b7 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -1,4 +1,3 @@ -import json from typing import Any, Optional import structlog @@ -13,7 +12,7 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer -from posthog.constants import SESSION_RECORDINGS_FILTER_IDS, AvailableFeature +from posthog.constants import AvailableFeature from posthog.models import ( SessionRecording, SessionRecordingPlaylist, @@ -27,7 +26,6 @@ changes_between, log_activity, ) -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.team.team import check_is_feature_available_for_team from posthog.models.utils import UUIDT from posthog.rate_limit import ( @@ -37,7 +35,6 @@ from posthog.schema import RecordingsQuery from posthog.session_recordings.session_recording_api import ( list_recordings_response, - list_recordings, query_as_params_to_dict, list_recordings_from_query, ) @@ -230,19 +227,12 @@ def recordings(self, request: request.Request, *args: Any, **kwargs: Any) -> res .values_list("recording_id", flat=True) ) - use_query_type = (request.GET.get("as_query", "False")).lower() == "true" - - if use_query_type: - data_dict = query_as_params_to_dict(request.GET.dict()) - query = RecordingsQuery.model_validate(data_dict) - query.session_ids = playlist_items - return list_recordings_response( - list_recordings_from_query(query, request, context=self.get_serializer_context()) - ) - else: - filter = SessionRecordingsFilter(request=request, team=self.team) - filter = filter.shallow_clone({SESSION_RECORDINGS_FILTER_IDS: json.dumps(playlist_items)}) - return list_recordings_response(list_recordings(filter, request, context=self.get_serializer_context())) + data_dict = query_as_params_to_dict(request.GET.dict()) + query = RecordingsQuery.model_validate(data_dict) + query.session_ids = playlist_items + return list_recordings_response( + list_recordings_from_query(query, request, context=self.get_serializer_context()) + ) # As of now, you can only "update" a session recording by adding or removing a recording from a static playlist @action( diff --git a/ee/surveys/summaries/summarize_surveys.py b/ee/surveys/summaries/summarize_surveys.py index 1e4b088484f55..1b74ca04d60c8 100644 --- a/ee/surveys/summaries/summarize_surveys.py +++ b/ee/surveys/summaries/summarize_surveys.py @@ -121,7 +121,7 @@ def summarize_survey_responses( we're trying to identify what to work on use as concise and simple language as is possible. generate no text other than the summary. - the aim is to let people see themes in the responses received. return the text in github flavoured markdown format""", + the aim is to let people see themes in the responses received. return the text in markdown format without using any paragraph formatting""", }, ], user=f"{instance_region}/{user.pk}", @@ -131,5 +131,7 @@ def summarize_survey_responses( if usage: TOKENS_IN_PROMPT_HISTOGRAM.observe(usage) + logger.info("survey_summary_response", result=result) + content: str = result.choices[0].message.content or "" return {"content": content, "timings": timer.get_all_timings()} diff --git a/ee/tasks/materialized_columns.py b/ee/tasks/materialized_columns.py index d05cdddc0b0ca..98091c3b1d00a 100644 --- a/ee/tasks/materialized_columns.py +++ b/ee/tasks/materialized_columns.py @@ -1,50 +1,49 @@ +from collections.abc import Iterator +from dataclasses import dataclass from celery.utils.log import get_task_logger +from clickhouse_driver import Client -from ee.clickhouse.materialized_columns.columns import ( - TRIM_AND_EXTRACT_PROPERTY, - get_materialized_columns, -) +from ee.clickhouse.materialized_columns.columns import MaterializedColumn, get_cluster, tables as table_infos from posthog.client import sync_execute -from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE +from posthog.settings import CLICKHOUSE_DATABASE from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns logger = get_task_logger(__name__) -def mark_all_materialized() -> None: - if any_ongoing_mutations(): - logger.info("There are running mutations, skipping marking as materialized") - return - - for ( - table, - property_name, - table_column, - column_name, - ) in get_materialized_columns_with_default_expression(): - updated_table = "sharded_events" if table == "events" else table - - # :TRICKY: On cloud, we ON CLUSTER updates to events/sharded_events but not to persons. Why? ¯\_(ツ)_/¯ - execute_on_cluster = f"ON CLUSTER '{CLICKHOUSE_CLUSTER}'" if table == "events" else "" - - sync_execute( - f""" - ALTER TABLE {updated_table} - {execute_on_cluster} - MODIFY COLUMN - {column_name} VARCHAR MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=table_column)} - """, - {"property": property_name}, +@dataclass +class MarkMaterializedTask: + table: str + column: MaterializedColumn + + def execute(self, client: Client) -> None: + expression, parameters = self.column.get_expression_and_parameters() + client.execute( + f"ALTER TABLE {self.table} MODIFY COLUMN {self.column.name} {self.column.type} MATERIALIZED {expression}", + parameters, ) -def get_materialized_columns_with_default_expression(): - tables: list[TablesWithMaterializedColumns] = ["events", "person"] - for table in tables: - materialized_columns = get_materialized_columns(table) - for (property_name, table_column), column_name in materialized_columns.items(): - if is_default_expression(table, column_name): - yield table, property_name, table_column, column_name +def mark_all_materialized() -> None: + cluster = get_cluster() + + for table_name, column in get_materialized_columns_with_default_expression(): + table_info = table_infos[table_name] + table_info.map_data_nodes( + cluster, + MarkMaterializedTask( + table_info.data_table, + column, + ).execute, + ).result() + + +def get_materialized_columns_with_default_expression() -> Iterator[tuple[str, MaterializedColumn]]: + table_names: list[TablesWithMaterializedColumns] = ["events", "person"] + for table_name in table_names: + for column in MaterializedColumn.get_all(table_name): + if is_default_expression(table_name, column.name): + yield table_name, column def any_ongoing_mutations() -> bool: diff --git a/ee/tasks/test/test_calculate_cohort.py b/ee/tasks/test/test_calculate_cohort.py index c5264bbe12631..ed0dd3e4290cc 100644 --- a/ee/tasks/test/test_calculate_cohort.py +++ b/ee/tasks/test/test_calculate_cohort.py @@ -45,6 +45,7 @@ def test_create_stickiness_cohort(self, _insert_cohort_from_insight_filter): "stickiness_days": "1", "label": "$pageview", }, + self.team.pk, ) insert_cohort_from_insight_filter( @@ -118,6 +119,7 @@ def test_create_trends_cohort(self, _insert_cohort_from_insight_filter): "date_to": "2021-01-01", "label": "$pageview", }, + self.team.pk, ) insert_cohort_from_insight_filter( cohort_id, @@ -228,6 +230,7 @@ def test_create_trends_cohort_arg_test(self, _insert_cohort_from_insight_filter) "interval": "day", "properties": '[{"key": "$domain", "value": "app.posthog.com", "operator": "icontains", "type": "event"}]', }, + self.team.pk, ) insert_cohort_from_insight_filter( cohort_id, @@ -357,6 +360,7 @@ def test_create_funnels_cohort(self, _insert_cohort_from_insight_filter): "date_to": "2021-01-07", "funnel_step": "1", }, + self.team.pk, ) insert_cohort_from_insight_filter(cohort_id, params) @@ -445,6 +449,7 @@ def _create_events(data, event="$pageview"): "entity_order": "0", "lifecycle_type": "returning", }, + self.team.pk, ) insert_cohort_from_insight_filter( @@ -507,6 +512,7 @@ def _create_events(data, event="$pageview"): "entity_order": "0", "lifecycle_type": "dormant", }, + self.team.pk, ) self.assertEqual(_insert_cohort_from_insight_filter.call_count, 2) diff --git a/ee/urls.py b/ee/urls.py index 7c722bc31852f..91b58e0fcb238 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -6,11 +6,11 @@ from django.urls.conf import path from ee.api import integration -from .api.rbac import organization_resource_access, role from .api import ( authentication, billing, + conversation, dashboard_collaborator, explicit_team_member, feature_flag_role_access, @@ -19,18 +19,20 @@ sentry_stats, subscription, ) +from .api.rbac import organization_resource_access, role from .session_recordings import session_recording_playlist def extend_api_router() -> None: from posthog.api import ( - router as root_router, - register_grandfathered_environment_nested_viewset, - projects_router, - organizations_router, - project_feature_flags_router, environment_dashboards_router, + environments_router, legacy_project_dashboards_router, + organizations_router, + project_feature_flags_router, + projects_router, + register_grandfathered_environment_nested_viewset, + router as root_router, ) root_router.register(r"billing", billing.BillingViewset, "billing") @@ -93,6 +95,10 @@ def extend_api_router() -> None: ["project_id"], ) + environments_router.register( + r"conversations", conversation.ConversationViewSet, "environment_conversations", ["team_id"] + ) + # The admin interface is disabled on self-hosted instances, as its misuse can be unsafe admin_urlpatterns = ( diff --git a/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--dark.png b/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--dark.png index 1f129b49b9cf8..24cc8f4b2f6b6 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--dark.png and b/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--light.png b/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--light.png index 3077ca6a36368..64fd850a0fc25 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--light.png and b/frontend/__snapshots__/components-cards-insight-details--data-table-events-query--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--dark.png b/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--dark.png index 1f129b49b9cf8..24cc8f4b2f6b6 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--dark.png and b/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--light.png b/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--light.png index 3077ca6a36368..64fd850a0fc25 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--light.png and b/frontend/__snapshots__/components-cards-insight-details--data-table-hog-ql-query--light.png differ diff --git a/frontend/__snapshots__/components-cards-text-card--template--dark.png b/frontend/__snapshots__/components-cards-text-card--template--dark.png index 1418925cf2961..08005be3d11b8 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--dark.png and b/frontend/__snapshots__/components-cards-text-card--template--dark.png differ diff --git a/frontend/__snapshots__/components-cards-text-card--template--light.png b/frontend/__snapshots__/components-cards-text-card--template--light.png index c0961d67af400..fdd4b64968096 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--light.png and b/frontend/__snapshots__/components-cards-text-card--template--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--dark.png b/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--dark.png index 5865d3715d1a6..fcbc4ceb12a2a 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--dark.png and b/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--light.png b/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--light.png index 8f982cacab5c2..40ff9d12b2ba2 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--light.png and b/frontend/__snapshots__/components-errors-error-display--with-cymbal-errors--light.png differ diff --git a/frontend/__snapshots__/components-hogfetti--hogfetti--dark.png b/frontend/__snapshots__/components-hogfetti--hogfetti--dark.png index 1e92a6a972426..cc6a66966c712 100644 Binary files a/frontend/__snapshots__/components-hogfetti--hogfetti--dark.png and b/frontend/__snapshots__/components-hogfetti--hogfetti--dark.png differ diff --git a/frontend/__snapshots__/components-hogfetti--hogfetti--light.png b/frontend/__snapshots__/components-hogfetti--hogfetti--light.png index b507aeb43422a..038547ad16f06 100644 Binary files a/frontend/__snapshots__/components-hogfetti--hogfetti--light.png and b/frontend/__snapshots__/components-hogfetti--hogfetti--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png index e356532ca0e6d..25a32339e8edb 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png and b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png index ac6439a3b0f62..513b3ed148bb7 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png and b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value--dark.png b/frontend/__snapshots__/components-hogqleditor--no-value--dark.png index 4d295c7431ebf..5ceff04a1d206 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value--dark.png and b/frontend/__snapshots__/components-hogqleditor--no-value--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value--light.png b/frontend/__snapshots__/components-hogqleditor--no-value--light.png index 401a4e98e08ce..2038e29ae4087 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value--light.png and b/frontend/__snapshots__/components-hogqleditor--no-value--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png index 4d295c7431ebf..5ceff04a1d206 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png and b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png index 401a4e98e08ce..2038e29ae4087 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png and b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png index 21c8c0c700b98..d5207c220209d 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png and b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png index 447fa7b473966..28cfc2ba09456 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png and b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png index 21c8c0c700b98..d5207c220209d 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png and b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png index 447fa7b473966..28cfc2ba09456 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png and b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png index e9fe7de459ce9..2ad22e1251aa4 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png index b31b13bf96316..61c142b66d388 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png index 557a24ad829de..225413d72709d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png index 18eeafe53a294..0dddcd3fa4040 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png index 557a24ad829de..225413d72709d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png index 18eeafe53a294..0dddcd3fa4040 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png index 9c3fb1e344f74..acec91d9d5770 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png index e41e7dec7df53..e35cf20e724cf 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png index 2517ac22b8fc9..fac9168f5b922 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png index 4fce8a837f7a9..9786d21e2280d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png index d9b70116e57b6..772ca47300a43 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png index 88ad89c98e8d9..33839ce93e4d1 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png index 0dac5dff8a31f..876339c60c48f 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png index b6af1c8e969b6..09f1c1bbb0700 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png index 7ca69b7425a30..85b075a0f290d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png index 4a9a95e7c36d7..4716324e22c09 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png index 657492c654a5d..f61d8a1aa9217 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png index 7be9fca3cf6be..4125a9641deab 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png index ab975529f0e67..197dba2e766b2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png index a4a1c5246525b..e3f9727ae5b77 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png index 0cc51fee095f6..fe75b85b1853b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png index 312e4ac544ef9..53680276c8d44 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png index 13921a926aa45..56ec6eb839a55 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png index bca3d330610fc..c415408379d3b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png index 488140bfb1571..ae176c67d2aa2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png index 5dea38e2b1c7f..7e9e414273a44 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--dark.png b/frontend/__snapshots__/components-playerinspector--default--dark.png index ad65001574fd4..87c483c27613a 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--dark.png and b/frontend/__snapshots__/components-playerinspector--default--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--light.png b/frontend/__snapshots__/components-playerinspector--default--light.png index a2c075a113bd2..04f141310bd43 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--light.png and b/frontend/__snapshots__/components-playerinspector--default--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png index cb009d55395dd..b529d9ec4168f 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png index 4c8ac367ea329..5598463df851b 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png differ diff --git a/frontend/__snapshots__/components-subscriptions--subscription-no-integrations--dark.png b/frontend/__snapshots__/components-subscriptions--subscription-no-integrations--dark.png index 865fb0aa346b8..d98df60929d78 100644 Binary files a/frontend/__snapshots__/components-subscriptions--subscription-no-integrations--dark.png and b/frontend/__snapshots__/components-subscriptions--subscription-no-integrations--dark.png differ diff --git a/frontend/__snapshots__/components-subscriptions--subscriptions-new--dark.png b/frontend/__snapshots__/components-subscriptions--subscriptions-new--dark.png index 7aa30537bd437..d98df60929d78 100644 Binary files a/frontend/__snapshots__/components-subscriptions--subscriptions-new--dark.png and b/frontend/__snapshots__/components-subscriptions--subscriptions-new--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-code-snippet--with-wrapping-and-action--dark.png b/frontend/__snapshots__/lemon-ui-code-snippet--with-wrapping-and-action--dark.png index 5660310dc865b..a2f322aaa8290 100644 Binary files a/frontend/__snapshots__/lemon-ui-code-snippet--with-wrapping-and-action--dark.png and b/frontend/__snapshots__/lemon-ui-code-snippet--with-wrapping-and-action--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-code-snippet--without-wrapping-with-action--dark.png b/frontend/__snapshots__/lemon-ui-code-snippet--without-wrapping-with-action--dark.png index ea1710a944a8c..dcbb67b8d51b8 100644 Binary files a/frontend/__snapshots__/lemon-ui-code-snippet--without-wrapping-with-action--dark.png and b/frontend/__snapshots__/lemon-ui-code-snippet--without-wrapping-with-action--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-icons--shelf-j--light.png b/frontend/__snapshots__/lemon-ui-icons--shelf-j--light.png index 04be0b77f544f..0e611033cda35 100644 Binary files a/frontend/__snapshots__/lemon-ui-icons--shelf-j--light.png and b/frontend/__snapshots__/lemon-ui-icons--shelf-j--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--multiple-digits--dark.png b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--multiple-digits--dark.png index 5a32be0344360..5de7383368eaf 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--multiple-digits--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--multiple-digits--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--show-zero--dark.png b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--show-zero--dark.png index 7006b55bebdd8..31c5a789465d0 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--show-zero--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--show-zero--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--standard--dark.png b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--standard--dark.png index 932b20cf6fa45..5b88c9eed7a88 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--standard--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-badge-lemon-badge-number--standard--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--closable--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--closable--dark.png index 7840ca42cb736..e648e224765de 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--closable--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--closable--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--closable--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--closable--light.png index d9ce96b32ed2f..efeed5e57f854 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--closable--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--closable--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--dark.png index 1bf56394b7007..f18ad86246022 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--light.png index d6363800a423c..94b6fc1daf114 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--error--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--error--dark.png index f931361312c1b..9ec84b1eb45ab 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--error--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--error--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--error--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--error--light.png index f2a38ac128937..101ee45ce5f9a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--error--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--error--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--info--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--info--dark.png index 2802006b65749..25570af1797d9 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--info--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--info--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--info--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--info--light.png index 43c3baeb614e9..cfb0d093cf59c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--info--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--info--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--dark.png index dda64fb366625..28206ca98bf0a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--light.png index 3d77a66ab40d3..775d33eca3959 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--dark.png index 77d8942fd56dd..efa19dd9a31a9 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--light.png index 4607d11318285..3f5f28e4c2f13 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--narrow-with-buttons--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--success--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--success--dark.png index 3548a56188f16..6717e6e657649 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--success--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--success--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--success--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--success--light.png index bec5747abc9c9..3876283e32fc3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--success--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--success--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--warning--dark.png b/frontend/__snapshots__/lemon-ui-lemon-banner--warning--dark.png index c0885ef680b4d..74d4a6713b414 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--warning--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--warning--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--warning--light.png b/frontend/__snapshots__/lemon-ui-lemon-banner--warning--light.png index 5051d6c6fba30..75d37b5a5d707 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--warning--light.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--warning--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--active--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--active--dark.png index cdca348aa0cbe..bae9289646168 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--active--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--active--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--as-links--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--as-links--dark.png index 63b7bfd9b1293..6fedd1afb34a6 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--as-links--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--as-links--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--as-links--light.png b/frontend/__snapshots__/lemon-ui-lemon-button--as-links--light.png index 550cee152c102..60e53b09ed81b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--as-links--light.png and b/frontend/__snapshots__/lemon-ui-lemon-button--as-links--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--dark.png index 61a065c8c35b8..e42314aee217d 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--light.png b/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--light.png index 4ca1808ad1865..0e7f1e20c0ad5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--light.png and b/frontend/__snapshots__/lemon-ui-lemon-button--disabled-with-reason--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--full-width--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--full-width--dark.png index 4de1960f0fb63..749b74e5404cd 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--full-width--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--full-width--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--menu-buttons--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--menu-buttons--dark.png index 9a6adc09abdfc..3fcc85acaeb11 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--menu-buttons--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--menu-buttons--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png index 7a79087a8d109..0494072b9e653 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--sizes--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--sizes--dark.png index 7be31ba710f2c..e92436727bc80 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--sizes--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--sizes--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--text-only--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--text-only--dark.png index 3ea250d025cec..689e130598d90 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--text-only--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--text-only--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--text-only--light.png b/frontend/__snapshots__/lemon-ui-lemon-button--text-only--light.png index db7cf5217fe07..3abe51f3e6627 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--text-only--light.png and b/frontend/__snapshots__/lemon-ui-lemon-button--text-only--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--types-and-statuses--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--types-and-statuses--dark.png index a1ae61bc8afd5..54a16eba7b82d 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--types-and-statuses--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--types-and-statuses--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png index 692203c75b49e..2796435eb68ff 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png index 63975d42c24c9..958d095dc7b38 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png index af5eb35b4a393..320f0dfb39709 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png index b7c759035f7be..1ec1d92d13bc5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png index c70073759a9c3..e2a7d303c9bf3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png index fd30577a0f79f..c67e7eff5f2d6 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png index 97b6b6671f4b7..62005fd240d46 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png index 46de463f355a3..deb1a4130559e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png index 6e53b5cd15413..ccb0203a5d2c3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png index 6d29186846f99..c7001a1113b24 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png index f86bca1b53ab8..0d37818b855a9 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png index 3015065991f8b..32569ecc291ad 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png index d3cdc976bcd4d..0ca2d8c95c36b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png index 989bb7dc4e1f3..1382bcae5ed6b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png index af5eb35b4a393..320f0dfb39709 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png index b7c759035f7be..1ec1d92d13bc5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png index f1cc69206e84b..d6279a75e1b90 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png index 65cfd36b3081a..39a2c68e2cd5e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png index 13aa1b9a431a2..bcc3113d51f69 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png index a575e4ef6b228..161c7033dbea2 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png index fc30b65924d01..841266fbca5bf 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png index 3737e331119c9..514ed70766db7 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png index c07fa500efba1..022a1933f0729 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png index dd6fa19630b65..8081ad0512c0a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png index 09f57c49a2ddd..35a1560f55720 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png index 7357eb0db768b..be88a38b80777 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png index d692fb5269ff0..0728b9da4ad59 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png index 655ce9d87c47c..eda77f53e84d4 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png index 62707fb6e89a2..bf29f763f7162 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png index 8248c8be132db..331ac9df5287a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png index f595113e318ad..57fa737a559ed 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png index f8fbf21738bde..91bf217461bb6 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png new file mode 100644 index 0000000000000..5d0408386245a Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png new file mode 100644 index 0000000000000..6386c0c6b6f99 Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-dialog--customised--dark.png b/frontend/__snapshots__/lemon-ui-lemon-dialog--customised--dark.png index b898f2112d37f..299dfb968ba0f 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-dialog--customised--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-dialog--customised--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-modal--lemon-modal--dark.png b/frontend/__snapshots__/lemon-ui-lemon-modal--lemon-modal--dark.png index d523c3aad154a..e6995794582f3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-modal--lemon-modal--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-modal--lemon-modal--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-modal--without-content--dark.png b/frontend/__snapshots__/lemon-ui-lemon-modal--without-content--dark.png index d523c3aad154a..e6995794582f3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-modal--without-content--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-modal--without-content--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--default--dark.png index d55e8cbeb48c7..11267310b2e8a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--full-width--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--full-width--dark.png index d55e8cbeb48c7..11267310b2e8a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--full-width--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--full-width--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--small--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--small--dark.png index d8e409a48cfaa..cec8821312150 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-button--small--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-button--small--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--default--dark.png index 9ca8c01118d7d..11267310b2e8a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--full-width--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--full-width--dark.png index 9ca8c01118d7d..11267310b2e8a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--full-width--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--full-width--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--small--dark.png b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--small--dark.png index a77d3f7434fb8..cec8821312150 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-segmented-select--small--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-segmented-select--small--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-toast--toast-types--dark.png b/frontend/__snapshots__/lemon-ui-lemon-toast--toast-types--dark.png index 6e5c72e870205..33f03744d7ba5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-toast--toast-types--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-toast--toast-types--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-toast--with-button--dark.png b/frontend/__snapshots__/lemon-ui-lemon-toast--with-button--dark.png index 8d7cf020283cb..707b72bec5e34 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-toast--with-button--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-toast--with-button--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-toast--with-progress--dark.png b/frontend/__snapshots__/lemon-ui-lemon-toast--with-progress--dark.png index 022e40aa17ce0..c4bac6bfde234 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-toast--with-progress--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-toast--with-progress--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-spinner--in-buttons--dark.png b/frontend/__snapshots__/lemon-ui-spinner--in-buttons--dark.png index 228f14acdef0c..2b66836306961 100644 Binary files a/frontend/__snapshots__/lemon-ui-spinner--in-buttons--dark.png and b/frontend/__snapshots__/lemon-ui-spinner--in-buttons--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png index 9e94e50e11369..f5ed8a89ac925 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png index afcf5cd9c035f..b2493c895f8bd 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png index 70a50e26d5e7b..d807d8d4ef08d 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png index d66b1f11c1a6b..758c48c0daa81 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png index eef82575bea8b..c05ba32634144 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png index 687b3e5258355..8c556eeb82689 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png index 01823e88fef50..6600ad600011f 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png index 7c84465a4fd8f..e05ec0ee257d3 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index 8008f3eb43eca..4d5f9882073b5 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index c02be573f0f34..2392b90f55117 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index ace7d3debd600..130aa9a287ffd 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 03870e07340f4..93ad7064fd68b 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png index 576caf4f434a2..537566ccb05ab 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index 5db2152719be9..df722bd0f2d26 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 0aad92c68ce5f..477239b6b7587 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png index a5621ecc99e11..7fbea863d2331 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png index 9bbbf1b37f239..fa92d131cd17f 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png index b2f928130be50..2449e1ce566bd 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiments-list--dark.png b/frontend/__snapshots__/scenes-app-experiments--experiments-list--dark.png index 4978c38f5e986..b969c8e32854a 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiments-list--dark.png and b/frontend/__snapshots__/scenes-app-experiments--experiments-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png index d5637e0b03a16..a6d3889b23a43 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png index c50f33e6d898c..daf12837bf697 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png index 08312752ac112..835b68b2ea10d 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png index 8a55f274bf3f3..64f704c6a75bc 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-no-pages-available--dark.png b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-no-pages-available--dark.png index f8dde32675261..388f05bff2379 100644 Binary files a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-no-pages-available--dark.png and b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-no-pages-available--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-page-selected--dark.png b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-page-selected--dark.png index 79b726f68b1cd..b8a56a896b3d1 100644 Binary files a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-page-selected--dark.png and b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-page-selected--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--dark.png b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--dark.png index 8821684eea618..1b0608ca30a28 100644 Binary files a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--dark.png and b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--light.png b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--light.png index b6ce8b8910607..c2f535216405b 100644 Binary files a/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--light.png and b/frontend/__snapshots__/scenes-app-heatmaps--heatmaps-browser-with-unauthorized-page-selected--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png index 587abfccb4aa5..a1c8d5a8d59af 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png index ac19e318a3f94..695536b9bcac8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png index 8ad5ae4d07ef6..73e5f193806a7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png index 64bcb81eb0d5d..6512dbd87e1db 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png index 64260538a2c7b..ee27b11bed568 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 25ab8cd06fd02..00916265cd38b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index d4389462f0b58..248d1f34b318a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png index 6b73a796de5fe..8193f6628caff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png index 9b5ea77184db9..0cd89979f05a3 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--dark--webkit.png index 3551a39f9d5b0..76fa0b9d70502 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png index 53a38ebe97627..7f17c93cf8695 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png index b6956d05b33b5..149a6d0c31e64 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png index ab8433877a9e5..64cc334785eda 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png index e5bac9eeee8f1..756eb74f6156b 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png index db43716077ebe..27244118ba4a0 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png index cc409784b1c45..6f13b687b5156 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png index 6f6b491670b99..78d4b44700226 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png index c456bd0c5fde0..d300a75be4dc6 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png index 803390e1b9822..f7e1627246573 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png index b785e83e2206f..1bcb94198a8b6 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png b/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png index c844325595b28..3fdfad1c20908 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png index be67ab75d125b..eee71a3783a06 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png index cf026b65c4994..358c01f1ccdae 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--dark.png b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--dark.png index 11ea424b098d0..160760e0f576d 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--light.png b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--light.png index 20411145690cb..92db4ba9ff45e 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--light.png and b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png index d606d44e5e440..1c682a169746a 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png index 19fd725a92aca..4a8389daacd3a 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png index 536e789b3e2bf..2f712d0266fe1 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page-without-pipelines--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page-without-pipelines--dark.png index e7ee037d2db5a..6df95747f77ef 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page-without-pipelines--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page-without-pipelines--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-big-query--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-big-query--dark.png index 4093411b824a6..6f93fdda8e5f4 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-big-query--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-big-query--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png index b8b549ad31a31..d39232bc20b73 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png index 4afdf7cf81cfb..a128cf65fde2e 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-nodes-management-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-nodes-management-page--dark.png index cc34a11485191..3749d39986466 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-nodes-management-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-nodes-management-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png index 820998e556ba5..aeb5c204bdf51 100644 Binary files a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png and b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png index e7ec3a315684b..720fc8ce9e6c3 100644 Binary files a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png and b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png index fc93723027d33..10d3e242df26a 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png index 98c143d93d5b7..180284766d7da 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png index 169cab94e78cc..a55904927d4df 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-notebooks--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png index 923fe9d53a9c8..51fe54193e7dd 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png index 8ac25dbcb685c..a5428e6294540 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png index 907f97aa1f5b2..03fe4c6ea65ee 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png index 918b0b0d8cf6e..0a32462d3ed9e 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png index ae7822ea5b860..16719722cec24 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png b/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png index 70d439fbf26a9..4ca173794e1a0 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png b/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png index 0ae472abeef7e..7979111b8b036 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png index 0cd41b32b0322..631019cafacdd 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png index 000cf871d37cd..74aee901a398c 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png index 505db4406be40..3287c8e015277 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--dark.png b/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--dark.png index b2062e06f4a10..c5067b4f13221 100644 Binary files a/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--dark.png and b/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--light.png b/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--light.png index fe5652295d73c..06ddf95b64e5f 100644 Binary files a/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--light.png and b/frontend/__snapshots__/scenes-other-billing-product--billing-product-temporarily-free--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--second-factor--dark.png b/frontend/__snapshots__/scenes-other-login--second-factor--dark.png index 775f77110f211..8dc59e1e8c104 100644 Binary files a/frontend/__snapshots__/scenes-other-login--second-factor--dark.png and b/frontend/__snapshots__/scenes-other-login--second-factor--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--second-factor--light.png b/frontend/__snapshots__/scenes-other-login--second-factor--light.png index 54ed53c9fdb7f..f6db8d0a35316 100644 Binary files a/frontend/__snapshots__/scenes-other-login--second-factor--light.png and b/frontend/__snapshots__/scenes-other-login--second-factor--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--sso-error--dark.png b/frontend/__snapshots__/scenes-other-login--sso-error--dark.png index fbc54832bece1..172967f784968 100644 Binary files a/frontend/__snapshots__/scenes-other-login--sso-error--dark.png and b/frontend/__snapshots__/scenes-other-login--sso-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png index 93338096ad6c9..95511e628778d 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--dark.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--dark.png index e23d1c60a85ce..535804d77de4e 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--dark.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--light.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--light.png index 774cb7beb60c7..9494cdccd7b4a 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--light.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-sd-ks--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png index 2108bf08e480f..9533045495757 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png index e797cc6467cfc..e6876dcaf9403 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png index 644dc1a22f0eb..c299d9caf9d5d 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png index 750ff00450e15..3a632563f3dd4 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png index 2108bf08e480f..9533045495757 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png index e797cc6467cfc..e6876dcaf9403 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset--initial--dark.png b/frontend/__snapshots__/scenes-other-password-reset--initial--dark.png index 0abf0df2c194b..901a5be7244e5 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset--initial--dark.png and b/frontend/__snapshots__/scenes-other-password-reset--initial--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png index c063bc66591f6..32d397abf284c 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png index 53d8c00a4492e..7ae06bd540053 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index eb8542bfdcfcc..d2302cf2491a4 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index 999b15cfdd917..ccab613598663 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png index 23e20084b753d..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png index f7b7f6ebd06e6..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index b76714d7b9006..f0f85a24787f5 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 5d1b0d30a4d1c..4cd020a3df66e 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png index 7da712a894a4c..d16b8c42a3cd8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png index cadfa35ff0a63..44413dfa61cd9 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--dark.png b/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--dark.png index 31d6746bc2752..bb50e05d84042 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--light.png b/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--light.png index 1a27f899049d4..7ca5d50818be3 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--light.png and b/frontend/__snapshots__/scenes-other-toolbar--experiments-disabled-in-parent--light.png differ diff --git a/frontend/src/layout/GlobalModals.tsx b/frontend/src/layout/GlobalModals.tsx index 040510d4d532f..81bdae758064a 100644 --- a/frontend/src/layout/GlobalModals.tsx +++ b/frontend/src/layout/GlobalModals.tsx @@ -1,19 +1,16 @@ -import { LemonModal } from '@posthog/lemon-ui' import { actions, kea, path, reducers, useActions, useValues } from 'kea' import { ConfirmUpgradeModal } from 'lib/components/ConfirmUpgradeModal/ConfirmUpgradeModal' import { HedgehogBuddyWithLogic } from 'lib/components/HedgehogBuddy/HedgehogBuddyWithLogic' import { TimeSensitiveAuthenticationModal } from 'lib/components/TimeSensitiveAuthentication/TimeSensitiveAuthentication' import { UpgradeModal } from 'lib/components/UpgradeModal/UpgradeModal' -import { Setup2FA } from 'scenes/authentication/Setup2FA' +import { TwoFactorSetupModal } from 'scenes/authentication/TwoFactorSetupModal' import { CreateOrganizationModal } from 'scenes/organization/CreateOrganizationModal' -import { membersLogic } from 'scenes/organization/membersLogic' import { CreateEnvironmentModal } from 'scenes/project/CreateEnvironmentModal' import { CreateProjectModal } from 'scenes/project/CreateProjectModal' import { SessionPlayerModal } from 'scenes/session-recordings/player/modal/SessionPlayerModal' import { inviteLogic } from 'scenes/settings/organization/inviteLogic' import { InviteModal } from 'scenes/settings/organization/InviteModal' import { PreviewingCustomCssModal } from 'scenes/themes/PreviewingCustomCssModal' -import { userLogic } from 'scenes/userLogic' import type { globalModalsLogicType } from './GlobalModalsType' @@ -59,7 +56,6 @@ export function GlobalModals(): JSX.Element { useActions(globalModalsLogic) const { isInviteModalShown } = useValues(inviteLogic) const { hideInviteModal } = useActions(inviteLogic) - const { user } = useValues(userLogic) return ( <> @@ -72,24 +68,7 @@ export function GlobalModals(): JSX.Element { - {user && user.organization?.enforce_2fa && !user.is_2fa_enabled && ( - -

- Your organization requires you to set up 2FA. -

-

- - Use an authenticator app like Google Authenticator or 1Password to scan the QR code below. - -

- { - userLogic.actions.loadUser() - membersLogic.actions.loadAllMembers() - }} - /> -
- )} + ) diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index df5f78ab272c6..42bb779a54d82 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -175,7 +175,7 @@ .Sidebar3000 { --sidebar-slider-padding: 0.125rem; --sidebar-horizontal-padding: 0.5rem; - --sidebar-row-height: 2.5rem; + --sidebar-row-height: 3rem; --sidebar-background: var(--bg-3000); position: relative; @@ -533,8 +533,6 @@ position: relative; display: flex; - flex-direction: column; - justify-content: center; width: 100%; height: 100%; color: inherit; @@ -549,7 +547,9 @@ } .SidebarListItem__link { + flex-direction: column; row-gap: 1px; + justify-content: center; padding: 0 var(--sidebar-horizontal-padding) 0 var(--sidebar-list-item-inset); color: inherit !important; // Disable link color .SidebarListItem[aria-disabled='true'] & { @@ -558,17 +558,33 @@ } .SidebarListItem__button { + flex-direction: row; + gap: 0.25rem; row-gap: 1px; + align-items: center; padding: 0 var(--sidebar-horizontal-padding) 0 var(--sidebar-list-item-inset); + font-size: 1.125rem; // Make icons bigger color: inherit !important; // Disable link color cursor: pointer; &:hover { background: var(--border-3000); } + + .SidebarListItem__icon { + flex-shrink: 0; + } + + .SidebarListItem__name { + overflow: hidden; + text-overflow: ellipsis; + } } .SidebarListItem__rename { + flex-direction: column; + justify-content: center; + // Pseudo-elements don't work on inputs, so we use a wrapper div background: var(--bg-light); diff --git a/frontend/src/layout/navigation-3000/components/SidebarList.tsx b/frontend/src/layout/navigation-3000/components/SidebarList.tsx index 2b63b9a61e9c6..65cd05d65c4d4 100644 --- a/frontend/src/layout/navigation-3000/components/SidebarList.tsx +++ b/frontend/src/layout/navigation-3000/components/SidebarList.tsx @@ -232,7 +232,8 @@ function SidebarListItem({ item, validateName, active, style }: SidebarListItemP if (isItemClickable(item)) { content = (
  • -
    {item.name}
    + {item.icon &&
    {item.icon}
    } +
    {item.name}
  • ) } else if (!save || (!isItemTentative(item) && newName === null)) { diff --git a/frontend/src/layout/navigation-3000/components/TopBar.scss b/frontend/src/layout/navigation-3000/components/TopBar.scss index 6273642ace326..b797fd97bf2f8 100644 --- a/frontend/src/layout/navigation-3000/components/TopBar.scss +++ b/frontend/src/layout/navigation-3000/components/TopBar.scss @@ -155,7 +155,6 @@ .TopBar3000__actions { display: flex; - flex-grow: 1; gap: 0.5rem; align-items: center; justify-content: flex-end; diff --git a/frontend/src/layout/navigation-3000/components/TopBar.tsx b/frontend/src/layout/navigation-3000/components/TopBar.tsx index e666bc094e911..0e9f9dde9cd5d 100644 --- a/frontend/src/layout/navigation-3000/components/TopBar.tsx +++ b/frontend/src/layout/navigation-3000/components/TopBar.tsx @@ -6,6 +6,8 @@ import clsx from 'clsx' import { useActions, useValues } from 'kea' import { router } from 'kea-router' import { EditableField } from 'lib/components/EditableField/EditableField' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' +import { MetalyticsSummary } from 'lib/components/Metalytics/MetalyticsSummary' import { IconMenu } from 'lib/lemon-ui/icons' import { Link } from 'lib/lemon-ui/Link' import { Popover } from 'lib/lemon-ui/Popover/Popover' @@ -101,7 +103,12 @@ export function TopBar(): JSX.Element | null { )} -
    + +
    + +
    +
    +
    ) : null diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index be44236c6dc9b..6f6105aaa9c54 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -1,17 +1,18 @@ import { - IconChat, IconCursorClick, IconDashboard, IconDatabase, - IconDecisionTree, + IconFeatures, IconGraph, IconHome, IconLive, IconLogomark, IconMegaphone, + IconMessage, IconNotebook, IconPeople, IconPieChart, + IconPlug, IconPlusSmall, IconRewindPlay, IconRocket, @@ -429,6 +430,16 @@ export const navigation3000Logic = kea([ }) } + if (featureFlags[FEATURE_FLAGS.FEATURE_MANAGEMENT_UI]) { + sectionOne.splice(4, 0, { + identifier: Scene.FeatureManagement, + label: 'Features', + icon: , + logic: isUsingSidebar ? featureFlagsSidebarLogic : undefined, + to: isUsingSidebar ? undefined : urls.featureManagement(), + }) + } + return [ sectionOne, [ @@ -492,7 +503,7 @@ export const navigation3000Logic = kea([ { identifier: Scene.Surveys, label: 'Surveys', - icon: , + icon: , to: urls.surveys(), }, featureFlags[FEATURE_FLAGS.PRODUCT_INTRO_PAGES] !== 'test' || hasOnboardedFeatureFlags @@ -506,13 +517,13 @@ export const navigation3000Logic = kea([ { identifier: Scene.DataWarehouse, label: 'Data warehouse', - icon: , + icon: , to: isUsingSidebar ? undefined : urls.dataWarehouse(), }, featureFlags[FEATURE_FLAGS.SQL_EDITOR] ? { identifier: Scene.SQLEditor, - label: 'Data warehouse 3000', + label: 'SQL Editor', icon: , to: urls.sqlEditor(), logic: editorSidebarLogic, @@ -529,8 +540,8 @@ export const navigation3000Logic = kea([ hasOnboardedAnyProduct ? { identifier: Scene.Pipeline, - label: 'Data pipeline', - icon: , + label: 'Data pipelines', + icon: , to: urls.pipeline(), } : null, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx index db89c7e5401b8..7701538ffd36c 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx @@ -2,13 +2,13 @@ import { IconAI, IconChevronDown, IconDatabase, - IconDecisionTree, IconFeatures, IconGraph, IconHelmet, IconMap, IconMessage, IconPieChart, + IconPlug, IconRewindPlay, IconStack, IconTestTube, @@ -74,7 +74,7 @@ const PRODUCTS = [ { name: 'Data pipelines', slug: 'cdp', - icon: , + icon: , }, { name: 'Data warehouse', diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivity.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivity.tsx index b49152b98c7a8..21a52abe26936 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivity.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivity.tsx @@ -31,6 +31,7 @@ import { import { ActivityScope, AvailableFeature } from '~/types' import { SidePanelPaneHeader } from '../../components/SidePanelPaneHeader' +import { SidePanelActivityMetalytics } from './SidePanelActivityMetalytics' const SCROLL_TRIGGER_OFFSET = 100 @@ -126,11 +127,11 @@ export const SidePanelActivity = (): JSX.Element => { -
    -
    +
    +
    setActiveTab(key)} @@ -143,76 +144,82 @@ export const SidePanelActivity = (): JSX.Element => { key: SidePanelActivityTab.All, label: 'All activity', }, + ...(featureFlags[FEATURE_FLAGS.METALYTICS] + ? [ + { + key: SidePanelActivityTab.Metalytics, + label: 'Analytics', + }, + ] + : []), ]} />
    {/* Controls */} -
    - {activeTab === SidePanelActivityTab.Unread ? ( - <> - - Notifications shows you changes others make to{' '} - Insights and{' '} - Feature Flags that you created. Come - join our community forum and tell us - what else should be here! - + {activeTab === SidePanelActivityTab.Unread ? ( +
    + + Notifications shows you changes others make to{' '} + Insights and{' '} + Feature Flags that you created. Come join{' '} + our community forum and tell us what + else should be here! + -
    - {toggleExtendedDescription} - {hasUnread ? ( - markAllAsRead()}> - Mark all as read - - ) : null} -
    - - ) : activeTab === SidePanelActivityTab.All ? (
    -
    - {toggleExtendedDescription} - {allActivityResponseLoading ? : null} -
    + {toggleExtendedDescription} + {hasUnread ? ( + markAllAsRead()}> + Mark all as read + + ) : null} +
    +
    + ) : activeTab === SidePanelActivityTab.All ? ( +
    +
    + {toggleExtendedDescription} + {allActivityResponseLoading ? : null} +
    -
    - Filter for activity on: - - setFilters({ - ...filters, - scope: value ?? undefined, - item_id: undefined, - }) - } - dropdownMatchSelectWidth={false} - /> +
    + Filter for activity on: + + setFilters({ + ...filters, + scope: value ?? undefined, + item_id: undefined, + }) + } + dropdownMatchSelectWidth={false} + /> - by - - setFilters({ - ...filters, - user: user?.id ?? undefined, - }) - } - /> -
    + by + + setFilters({ + ...filters, + user: user?.id ?? undefined, + }) + } + />
    - ) : null} -
    +
    + ) : null}
    {activeTab === SidePanelActivityTab.Unread ? ( <> {importantChangesLoading && !hasNotifications ? ( - + ) : hasNotifications ? ( notifications.map((logItem, index) => ( { /> )) ) : ( -
    +
    You're all caught up!
    )} @@ -230,7 +237,7 @@ export const SidePanelActivity = (): JSX.Element => { ) : activeTab === SidePanelActivityTab.All ? ( <> {allActivityResponseLoading && !allActivity.length ? ( - + ) : allActivity.length ? ( <> {allActivity.map((logItem, index) => ( @@ -241,7 +248,7 @@ export const SidePanelActivity = (): JSX.Element => { /> ))} -
    +
    {allActivityResponseLoading ? ( <> Loading older activity @@ -261,7 +268,7 @@ export const SidePanelActivity = (): JSX.Element => {
    ) : ( -
    +
    No activity yet {filters ? ( setFilters(null)}> @@ -271,6 +278,8 @@ export const SidePanelActivity = (): JSX.Element => {
    )} + ) : activeTab === SidePanelActivityTab.Metalytics ? ( + ) : null}
    diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivityMetalytics.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivityMetalytics.tsx new file mode 100644 index 0000000000000..146cb6f96f5fa --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/SidePanelActivityMetalytics.tsx @@ -0,0 +1,74 @@ +import { Spinner, Tooltip } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { humanizeScope } from 'lib/components/ActivityLog/humanizeActivity' +import { metalyticsLogic } from 'lib/components/Metalytics/metalyticsLogic' +import { ProfileBubbles } from 'lib/lemon-ui/ProfilePicture/ProfileBubbles' + +export function SidePanelActivityMetalytics(): JSX.Element { + const { scope, instanceId, viewCount, recentUserMembers, viewCountLoading, recentUsersLoading } = + useValues(metalyticsLogic) + + if (!instanceId) { + return ( +

    + You can see internal analytics of how your Organization members are using PostHog for things such as + Dashboards, Insights, Playlists etc. Open an app to see the viewership data here. +

    + ) + } + + const humanizedScope = `this ${scope ? humanizeScope(scope, true) : 'app'}` + + return ( +
    +

    + You are viewing "meta" analytics of how your organization members are interacting with{' '} + {humanizedScope}. +

    +
    + +
    +
    Views
    +
    + {viewCountLoading ? : viewCount?.views ?? 0} +
    +
    +
    + + +
    +
    Viewers
    +
    + {viewCountLoading ? : viewCount?.users ?? 0} +
    +
    +
    + + +
    +
    Recent viewers (30 days)
    + {recentUsersLoading ? ( + + ) : ( + ({ + email: member.user.email, + name: member.user.first_name, + title: member.user.email, + }))} + limit={3} + /> + )} +
    +
    +
    +
    + ) +} diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts index 180461c465996..641c0900638ef 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts @@ -37,6 +37,7 @@ export const activityForSceneLogic = kea([ connect({ values: [sceneLogic, ['sceneConfig']], }), + selectors({ sceneActivityFilters: [ (s) => [ diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx index 69554124d4fbf..244e42c52d936 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx @@ -10,6 +10,7 @@ import { toParams } from 'lib/utils' import posthog from 'posthog-js' import { projectLogic } from 'scenes/projectLogic' +import { sidePanelStateLogic } from '../../sidePanelStateLogic' import { ActivityFilters, activityForSceneLogic } from './activityForSceneLogic' import type { sidePanelActivityLogicType } from './sidePanelActivityLogicType' @@ -29,12 +30,14 @@ export interface ChangesResponse { export enum SidePanelActivityTab { Unread = 'unread', All = 'all', + Metalytics = 'metalytics', } export const sidePanelActivityLogic = kea([ path(['scenes', 'navigation', 'sidepanel', 'sidePanelActivityLogic']), connect({ values: [activityForSceneLogic, ['sceneActivityFilters'], projectLogic, ['currentProjectId']], + actions: [sidePanelStateLogic, ['openSidePanel']], }), actions({ togglePolling: (pageIsVisible: boolean) => ({ pageIsVisible }), @@ -171,7 +174,7 @@ export const sidePanelActivityLogic = kea([ ], })), - listeners(({ values, actions }) => ({ + listeners(({ values, actions, cache }) => ({ setActiveTab: ({ tab }) => { if (tab === SidePanelActivityTab.All && !values.allActivityResponseLoading) { actions.loadAllActivity() @@ -183,6 +186,18 @@ export const sidePanelActivityLogic = kea([ actions.loadOlderActivity() } }, + openSidePanel: ({ options }) => { + if (options) { + actions.setActiveTab(options as SidePanelActivityTab) + } + }, + togglePolling: ({ pageIsVisible }) => { + if (pageIsVisible) { + actions.loadImportantChanges() + } else { + clearTimeout(cache.pollTimeout) + } + }, })), selectors({ allActivity: [ diff --git a/frontend/src/layout/navigation-3000/types.ts b/frontend/src/layout/navigation-3000/types.ts index 3f79f6dbda42f..a941e7dfaad74 100644 --- a/frontend/src/layout/navigation-3000/types.ts +++ b/frontend/src/layout/navigation-3000/types.ts @@ -151,4 +151,5 @@ export interface TentativeListItem { export interface ButtonListItem extends BasicListItem { key: '__button__' onClick: () => void + icon?: JSX.Element } diff --git a/frontend/src/layout/navigation/ProjectNotice.tsx b/frontend/src/layout/navigation/ProjectNotice.tsx index 5a0a9c4dd28ce..8d4df0246a3a0 100644 --- a/frontend/src/layout/navigation/ProjectNotice.tsx +++ b/frontend/src/layout/navigation/ProjectNotice.tsx @@ -1,4 +1,5 @@ import { IconGear, IconPlus } from '@posthog/icons' +import { Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { dayjs } from 'lib/dayjs' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' @@ -22,17 +23,29 @@ interface ProjectNoticeBlueprint { closeable?: boolean } -function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { +function CountDown({ datetime, callback }: { datetime: dayjs.Dayjs; callback?: () => void }): JSX.Element { const [now, setNow] = useState(dayjs()) + // Format the time difference as 00:00:00 + const duration = dayjs.duration(datetime.diff(now)) + const pastCountdown = duration.seconds() < 0 + + const countdown = pastCountdown + ? 'Expired' + : duration.hours() > 0 + ? duration.format('HH:mm:ss') + : duration.format('mm:ss') + useEffect(() => { const interval = setInterval(() => setNow(dayjs()), 1000) return () => clearInterval(interval) }, []) - // Format the time difference as 00:00:00 - const duration = dayjs.duration(datetime.diff(now)) - const countdown = duration.hours() > 0 ? duration.format('HH:mm:ss') : duration.format('mm:ss') + useEffect(() => { + if (pastCountdown) { + callback?.() + } + }, [pastCountdown]) return <>{countdown} } @@ -40,8 +53,8 @@ function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { export function ProjectNotice(): JSX.Element | null { const { projectNoticeVariant } = useValues(navigationLogic) const { currentOrganization } = useValues(organizationLogic) - const { logout } = useActions(userLogic) - const { user } = useValues(userLogic) + const { logout, loadUser } = useActions(userLogic) + const { user, userLoading } = useValues(userLogic) const { closeProjectNotice } = useActions(navigationLogic) const { showInviteModal } = useActions(inviteLogic) const { requestVerificationLink } = useActions(verifyEmailLogic) @@ -124,7 +137,14 @@ export function ProjectNotice(): JSX.Element | null { You are currently logged in as a customer.{' '} {user?.is_impersonated_until && ( <> - Expires in + Expires in + {userLoading ? ( + + ) : ( + loadUser()}> + Refresh + + )} )} diff --git a/frontend/src/lib/api.mock.ts b/frontend/src/lib/api.mock.ts index 0dacdcfa74554..e6dac16290e92 100644 --- a/frontend/src/lib/api.mock.ts +++ b/frontend/src/lib/api.mock.ts @@ -84,6 +84,7 @@ export const MOCK_DEFAULT_TEAM: TeamType = { autocapture_web_vitals_opt_in: false, autocapture_exceptions_errors_to_ignore: [], effective_membership_level: OrganizationMembershipLevel.Admin, + user_access_level: 'admin', access_control: true, has_group_types: true, primary_dashboard: 1, diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 7be5df3d764d6..f1497f937c334 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -54,6 +54,7 @@ import { ExternalDataSourceSyncSchema, ExternalDataSourceType, FeatureFlagAssociatedRoleType, + FeatureFlagStatusResponse, FeatureFlagType, Group, GroupListParams, @@ -424,8 +425,8 @@ class ApiRequest { return this.events(teamId).addPathComponent(id) } - public tags(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('tags') + public tags(projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('tags') } // # Data management @@ -520,8 +521,11 @@ class ApiRequest { return this.dashboards(teamId).addPathComponent(dashboardId) } - public dashboardCollaborators(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { - return this.dashboardsDetail(dashboardId, teamId).addPathComponent('collaborators') + public dashboardCollaborators( + dashboardId: DashboardType['id'], + projectId: ProjectType['id'] = ApiConfig.getCurrentProjectId() // Collaborators endpoint is project-level, not team-level + ): ApiRequest { + return this.dashboardsDetail(dashboardId, projectId).addPathComponent('collaborators') } public dashboardSharing(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { @@ -531,9 +535,9 @@ class ApiRequest { public dashboardCollaboratorsDetail( dashboardId: DashboardType['id'], userUuid: UserType['uuid'], - teamId?: TeamType['id'] + projectId?: ProjectType['id'] ): ApiRequest { - return this.dashboardCollaborators(dashboardId, teamId).addPathComponent(userUuid) + return this.dashboardCollaborators(dashboardId, projectId).addPathComponent(userUuid) } // # Dashboard templates @@ -663,6 +667,13 @@ class ApiRequest { ) } + public featureFlagStatus(teamId: TeamType['id'], featureFlagId: FeatureFlagType['id']): ApiRequest { + return this.projectsDetail(teamId) + .addPathComponent('feature_flags') + .addPathComponent(String(featureFlagId)) + .addPathComponent('status') + } + public featureFlagCreateScheduledChange(teamId: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('scheduled_changes') } @@ -834,9 +845,9 @@ class ApiRequest { return apiRequest } - // Chat - public chat(teamId?: TeamType['id']): ApiRequest { - return this.environmentsDetail(teamId).addPathComponent('query').addPathComponent('chat') + // Conversations + public conversations(teamId?: TeamType['id']): ApiRequest { + return this.environmentsDetail(teamId).addPathComponent('conversations') } // Notebooks @@ -921,6 +932,10 @@ class ApiRequest { return await api.update(this.assembleFullUrl(), options?.data, options) } + public async put(options?: ApiMethodOptions & { data: any }): Promise { + return await api.put(this.assembleFullUrl(), options?.data, options) + } + public async create(options?: ApiMethodOptions & { data: any }): Promise { return await api.create(this.assembleFullUrl(), options?.data, options) } @@ -1038,6 +1053,12 @@ const api = { ): Promise<{ scheduled_change: ScheduledChangeType }> { return await new ApiRequest().featureFlagDeleteScheduledChange(teamId, scheduledChangeId).delete() }, + async getStatus( + teamId: TeamType['id'], + featureFlagId: FeatureFlagType['id'] + ): Promise { + return await new ApiRequest().featureFlagStatus(teamId, featureFlagId).get() + }, }, organizationFeatureFlags: { @@ -1279,8 +1300,8 @@ const api = { }, tags: { - async list(teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): Promise { - return new ApiRequest().tags(teamId).get() + async list(projectId: TeamType['id'] = ApiConfig.getCurrentProjectId()): Promise { + return new ApiRequest().tags(projectId).get() }, }, @@ -2526,12 +2547,10 @@ const api = { }) }, - chatURL: (): string => { - return new ApiRequest().chat().assembleFullUrl() - }, - - async chat(data: any): Promise { - return await api.createResponse(this.chatURL(), data) + conversations: { + async create(data: { content: string; conversation?: string | null }): Promise { + return api.createResponse(new ApiRequest().conversations().assembleFullUrl(), data) + }, }, /** Fetch data from specified URL. The result already is JSON-parsed. */ @@ -2554,14 +2573,19 @@ const api = { }) }, - async update(url: string, data: any, options?: ApiMethodOptions): Promise { + async _update( + method: 'PATCH' | 'PUT', + url: string, + data: P, + options?: ApiMethodOptions + ): Promise { url = prepareUrl(url) ensureProjectIdNotInvalid(url) const isFormData = data instanceof FormData - const response = await handleFetch(url, 'PATCH', async () => { + const response = await handleFetch(url, method, async () => { return await fetch(url, { - method: 'PATCH', + method: method, headers: { ...objectClean(options?.headers ?? {}), ...(isFormData ? {} : { 'Content-Type': 'application/json' }), @@ -2576,7 +2600,15 @@ const api = { return await getJSONOrNull(response) }, - async create(url: string, data?: any, options?: ApiMethodOptions): Promise { + async update(url: string, data: P, options?: ApiMethodOptions): Promise { + return api._update('PATCH', url, data, options) + }, + + async put(url: string, data: P, options?: ApiMethodOptions): Promise { + return api._update('PUT', url, data, options) + }, + + async create(url: string, data?: P, options?: ApiMethodOptions): Promise { const res = await api.createResponse(url, data, options) return await getJSONOrNull(res) }, diff --git a/frontend/src/lib/components/AuthorizedUrlList/AuthorizedUrlList.tsx b/frontend/src/lib/components/AuthorizedUrlList/AuthorizedUrlList.tsx index 1169702c255fa..aa95376c5fa9c 100644 --- a/frontend/src/lib/components/AuthorizedUrlList/AuthorizedUrlList.tsx +++ b/frontend/src/lib/components/AuthorizedUrlList/AuthorizedUrlList.tsx @@ -186,7 +186,7 @@ export function AuthorizedUrlList({ type === AuthorizedUrlListType.TOOLBAR_URLS ? launchUrl(keyedURL.url) : // other urls are simply opened directly - keyedURL.url + query + `${keyedURL.url}${query ? query : ''}` } targetBlank tooltip={ diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx index 34c889fd00cee..698530f338d4a 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx @@ -34,8 +34,8 @@ interface TextCardBodyProps extends Pick, ' export function TextContent({ text, closeDetails, className }: TextCardBodyProps): JSX.Element { return ( -
    closeDetails?.()}> - {text} +
    closeDetails?.()}> + {text}
    ) } @@ -143,7 +143,7 @@ export function TextCardInternal( )}
    - +
    {showResizeHandles && ( diff --git a/frontend/src/lib/components/CompareFilter/CompareFilter.tsx b/frontend/src/lib/components/CompareFilter/CompareFilter.tsx index 23999b9dce4db..3cb57cd84642a 100644 --- a/frontend/src/lib/components/CompareFilter/CompareFilter.tsx +++ b/frontend/src/lib/components/CompareFilter/CompareFilter.tsx @@ -1,29 +1,31 @@ import { LemonSelect } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' import { RollingDateRangeFilter } from 'lib/components/DateFilter/RollingDateRangeFilter' import { dateFromToText } from 'lib/utils' import { useEffect, useState } from 'react' -import { insightLogic } from 'scenes/insights/insightLogic' -import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -export function CompareFilter(): JSX.Element | null { - const { insightProps, canEditInsight } = useValues(insightLogic) +import { CompareFilter as CompareFilterType } from '~/queries/schema' - const { compareFilter, supportsCompare } = useValues(insightVizDataLogic(insightProps)) - const { updateCompareFilter } = useActions(insightVizDataLogic(insightProps)) +type CompareFilterProps = { + compareFilter?: CompareFilterType | null + updateCompareFilter: (compareFilter: CompareFilterType) => void + disabled?: boolean +} +export function CompareFilter({ + compareFilter, + updateCompareFilter, + disabled, +}: CompareFilterProps): JSX.Element | null { // This keeps the state of the rolling date range filter, even when different drop down options are selected // The default value for this is one month const [tentativeCompareTo, setTentativeCompareTo] = useState(compareFilter?.compare_to || '-1m') - const disabled: boolean = !canEditInsight || !supportsCompare - useEffect(() => { const newCompareTo = compareFilter?.compare_to if (!!newCompareTo && tentativeCompareTo != newCompareTo) { setTentativeCompareTo(newCompareTo) } - }, [compareFilter?.compare_to]) + }, [compareFilter?.compare_to]) // eslint-disable-line react-hooks/exhaustive-deps // Hide compare filter control when disabled to avoid states where control is "disabled but checked" if (disabled) { diff --git a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx index f1d7410f53ca0..e9ef0b32bbb80 100644 --- a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx +++ b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx @@ -24,6 +24,7 @@ import { DataWarehouseTableForInsight } from 'scenes/data-warehouse/types' import { ActionType, CohortType, EventDefinition, PropertyDefinition } from '~/types' +import { HogQLDropdown } from '../HogQLDropdown/HogQLDropdown' import { taxonomicFilterLogic } from '../TaxonomicFilter/taxonomicFilterLogic' import { TZLabel } from '../TZLabel' @@ -291,8 +292,21 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element label: column.name + ' (' + column.type + ')', value: column.name, })) + const hogqlOption = { label: 'HogQL Expression', value: '' } const itemValue = localDefinition ? group?.getValue?.(localDefinition) : null + const isUsingHogQLExpression = (value: string | undefined): boolean => { + if (value === undefined) { + return false + } + const column = Object.values(_definition.fields ?? {}).find((n) => n.name == value) + return !column + } + + const distinct_id_field_value = + 'distinct_id_field' in localDefinition ? localDefinition.distinct_id_field : undefined + const timestamp_field_value = 'timestamp_field' in localDefinition ? localDefinition.timestamp_field : undefined + return (
    @@ -310,23 +324,33 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element Distinct ID field setLocalDefinition({ distinct_id_field: value })} /> + {isUsingHogQLExpression(distinct_id_field_value) && ( + setLocalDefinition({ distinct_id_field: value })} + /> + )} setLocalDefinition({ timestamp_field: value })} /> + {isUsingHogQLExpression(timestamp_field_value) && ( + setLocalDefinition({ timestamp_field: value })} + /> + )}
    void +}): JSX.Element => { + const [isHogQLDropdownVisible, setIsHogQLDropdownVisible] = useState(false) + + return ( +
    + setIsHogQLDropdownVisible(false)} + overlay={ + // eslint-disable-next-line react/forbid-dom-props +
    + { + onHogQLValueChange(currentValue) + setIsHogQLDropdownVisible(false) + }} + /> +
    + } + > + setIsHogQLDropdownVisible(!isHogQLDropdownVisible)} + > + {hogQLValue} + +
    +
    + ) +} diff --git a/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx b/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx index 9d0cf1f42b893..95b809878a98d 100644 --- a/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx +++ b/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx @@ -58,7 +58,7 @@ export function HogQLEditor({ {placeholder ?? (metadataSource && isActorsQuery(metadataSource) ? "Enter HogQL expression, such as:\n- properties.$geoip_country_name\n- toInt(properties.$browser_version) * 10\n- concat(properties.name, ' <', properties.email, '>')\n- is_identified ? 'user' : 'anon'" - : "Enter HogQL Expression, such as:\n- properties.$current_url\n- person.properties.$geoip_country_name\n- toInt(properties.`Long Field Name`) * 10\n- concat(event, ' ', distinct_id)\n- if(1 < 2, 'small', 'large')")} + : "Enter HogQL Expression, such as:\n- properties.$current_url\n- person.properties.$geoip_country_name\n- pdi.person.properties.email\n- toInt(properties.`Long Field Name`) * 10\n- concat(event, ' ', distinct_id)\n- if(1 < 2, 'small', 'large')")}
    grouped by - { - updateQuerySource({ interval: value } as Partial) - }} - options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ - value: value as IntervalType, - label, - hidden, - disabledReason, - }))} - /> + {isIntervalManuallySet ? ( + { + setIsIntervalManuallySet(false) + }} + tooltip="Unpin interval" + className="flex-1" + center + size="small" + icon={} + > + {interval || 'day'} + + ) : ( + { + updateQuerySource({ interval: value } as Partial) + }} + options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ + value: value as IntervalType, + label, + hidden, + disabledReason, + }))} + /> + )} ) } diff --git a/frontend/src/lib/components/JSSnippet.tsx b/frontend/src/lib/components/JSSnippet.tsx index 0d93f5d71f845..50ccf855ffcb3 100644 --- a/frontend/src/lib/components/JSSnippet.tsx +++ b/frontend/src/lib/components/JSSnippet.tsx @@ -6,7 +6,7 @@ import { apiHostOrigin } from 'lib/utils/apiHost' import posthog from 'posthog-js' import { teamLogic } from 'scenes/teamLogic' -export function snippetFunctions(): string { +function snippetFunctions(arrayJs = '/static/array.js'): string { const methods: string[] = [] const posthogPrototype = Object.getPrototypeOf(posthog) for (const key of Object.getOwnPropertyNames(posthogPrototype)) { @@ -20,23 +20,24 @@ export function snippetFunctions(): string { } const snippetMethods = methods.join(' ') - return `!function(t,e){var o,n,p,r;e.__SV||(window.posthog=e,e._i=[],e.init=function(i,s,a){function g(t,e){var o=e.split(".");2==o.length&&(t=t[o[0]],e=o[1]),t[e]=function(){t.push([e].concat(Array.prototype.slice.call(arguments,0)))}}(p=t.createElement("script")).type="text/javascript",p.crossOrigin="anonymous",p.async=!0,p.src=s.api_host.replace(".i.posthog.com","-assets.i.posthog.com")+"/static/array.js",(r=t.getElementsByTagName("script")[0]).parentNode.insertBefore(p,r);var u=e;for(void 0!==a?u=e[a]=[]:a="posthog",u.people=u.people||[],u.toString=function(t){var e="posthog";return"posthog"!==a&&(e+="."+a),t||(e+=" (stub)"),e},u.people.toString=function(){return u.toString(1)+".people (stub)"},o="${snippetMethods}".split(" "),n=0;n', - ` ${snippetFunctions()}`, - ` posthog.init('${currentTeam?.api_token}',{api_host:'${apiHostOrigin()}', ${ + ` ${snippetFunctions(arrayJs)}`, + ` posthog.init('${currentTeam?.api_token}', { + api_host:'${apiHostOrigin()}'${ isPersonProfilesDisabled ? `` - : `person_profiles: 'identified_only' // or 'always' to create profiles for anonymous users as well` + : `,\n person_profiles: 'identified_only' // or 'always' to create profiles for anonymous users as well` } - })`, + })`, '', ] .map((x) => ' '.repeat(indent) + x) @@ -48,3 +49,11 @@ export function JSSnippet(): JSX.Element { return {snippet} } + +export function JSSnippetV2(): JSX.Element { + const { currentTeam } = useValues(teamLogic) + + const snippet = useJsSnippet(0, `/array/${currentTeam?.api_token}/array.js`) + + return {snippet} +} diff --git a/frontend/src/lib/components/Metalytics/MetalyticsSummary.tsx b/frontend/src/lib/components/Metalytics/MetalyticsSummary.tsx new file mode 100644 index 0000000000000..67230d017641c --- /dev/null +++ b/frontend/src/lib/components/Metalytics/MetalyticsSummary.tsx @@ -0,0 +1,39 @@ +import { IconPulse } from '@posthog/icons' +import { LemonBadge, LemonButton } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' + +import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' +import { SidePanelTab } from '~/types' + +import { metalyticsLogic } from './metalyticsLogic' + +export function MetalyticsSummary(): JSX.Element | null { + const { instanceId, viewCount, viewCountLoading } = useValues(metalyticsLogic) + const safeViewCount = viewCount?.views ?? 0 + const safeUniqueUsers = viewCount?.users ?? 0 + const { openSidePanel } = useActions(sidePanelStateLogic) + + if (!instanceId || viewCountLoading) { + return null + } + + return ( + + } + size="small" + onClick={() => openSidePanel(SidePanelTab.Activity, 'metalytics')} + tooltip={`${safeUniqueUsers} PostHog members have viewed this a total of ${safeViewCount} times. Click to see more.`} + /> + + + ) +} diff --git a/frontend/src/lib/components/Metalytics/metalyticsLogic.ts b/frontend/src/lib/components/Metalytics/metalyticsLogic.ts new file mode 100644 index 0000000000000..8ddc838701121 --- /dev/null +++ b/frontend/src/lib/components/Metalytics/metalyticsLogic.ts @@ -0,0 +1,97 @@ +import { connect, kea, path, selectors } from 'kea' +import { loaders } from 'kea-loaders' +import { subscriptions } from 'kea-subscriptions' +import api from 'lib/api' +import { membersLogic } from 'scenes/organization/membersLogic' + +import { activityForSceneLogic } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { HogQLQuery, NodeKind } from '~/queries/schema' +import { hogql } from '~/queries/utils' + +import type { metalyticsLogicType } from './metalyticsLogicType' + +export const metalyticsLogic = kea([ + path(['lib', 'components', 'metalytics', 'metalyticsLogic']), + connect({ + values: [activityForSceneLogic, ['sceneActivityFilters'], membersLogic, ['members']], + }), + + loaders(({ values }) => ({ + viewCount: [ + null as { views: number; users: number } | null, + { + loadViewCount: async () => { + const query: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: hogql`SELECT SUM(count) AS count, COUNT(DISTINCT app_source_id) AS unique_users + FROM app_metrics + WHERE app_source = 'metalytics' + AND instance_id = ${values.instanceId}`, + } + + // NOTE: I think this gets cached heavily - how to correctly invalidate? + const response = await api.query(query, undefined, undefined, true) + const result = response.results as number[][] + return { + views: result[0][0], + users: result[0][1], + } + }, + }, + ], + recentUsers: [ + [] as string[], + { + loadUsersLast30days: async () => { + const query: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: hogql`SELECT DISTINCT app_source_id + FROM app_metrics + WHERE app_source = 'metalytics' + AND instance_id = ${values.instanceId} + AND timestamp >= NOW() - INTERVAL 30 DAY + ORDER BY timestamp DESC`, + } + + const response = await api.query(query, undefined, undefined, true) + return response.results.map((result) => result[0]) as string[] + }, + }, + ], + })), + + selectors({ + instanceId: [ + (s) => [s.sceneActivityFilters], + (sceneActivityFilters) => + sceneActivityFilters?.item_id ? `${sceneActivityFilters.scope}:${sceneActivityFilters.item_id}` : null, + ], + scope: [(s) => [s.sceneActivityFilters], (sceneActivityFilters) => sceneActivityFilters?.scope], + + recentUserMembers: [ + (s) => [s.recentUsers, s.members], + (recentUsers, members) => { + if (!members || !recentUsers) { + return [] + } + // Filter members whose IDs match the recentUsers array + const filteredMembers = members.filter((member) => recentUsers.includes(String(member.user.id))) + return filteredMembers + }, + ], + }), + + subscriptions(({ actions }) => ({ + instanceId: async (instanceId) => { + if (instanceId) { + actions.loadViewCount() + actions.loadUsersLast30days() + + await api.create('/api/projects/@current/metalytics/', { + metric_name: 'viewed', + instance_id: instanceId, + }) + } + }, + })), +]) diff --git a/frontend/src/lib/components/Playlist/Playlist.tsx b/frontend/src/lib/components/Playlist/Playlist.tsx index 7e95122e77b68..41853e0600a2b 100644 --- a/frontend/src/lib/components/Playlist/Playlist.tsx +++ b/frontend/src/lib/components/Playlist/Playlist.tsx @@ -26,7 +26,7 @@ export type PlaylistSection = { export type PlaylistProps = { sections: PlaylistSection[] listEmptyState: JSX.Element - content: ({ activeItem }: { activeItem: T | null }) => JSX.Element + content: (({ activeItem }: { activeItem: T | null }) => JSX.Element) | null title?: string notebooksHref?: string embedded?: boolean @@ -149,7 +149,7 @@ export function Playlist< onDoubleClick={() => setListCollapsed(!listCollapsed)} />
    -
    {content({ activeItem })}
    + {content &&
    {content({ activeItem })}
    }
    ) } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index 756844702c3e5..54389b51c0271 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -166,7 +166,7 @@ function ValueDisplay({ } > : undefined} > @@ -222,8 +222,8 @@ export function PropertiesTable({ parent, }: PropertiesTableType): JSX.Element { const [searchTerm, setSearchTerm] = useState('') - const { hidePostHogPropertiesInTable } = useValues(userPreferencesLogic) - const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) + const { hidePostHogPropertiesInTable, hideNullValues } = useValues(userPreferencesLogic) + const { setHidePostHogPropertiesInTable, setHideNullValues } = useActions(userPreferencesLogic) const { isCloudOrDev } = useValues(preflightLogic) const objectProperties = useMemo(() => { @@ -283,11 +283,18 @@ export function PropertiesTable({ }) } - if (filterable && hidePostHogPropertiesInTable) { - entries = entries.filter(([key]) => { - const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) - const isNonDollarPostHogProperty = isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) - return !isPostHogProperty && !isNonDollarPostHogProperty + if (filterable) { + entries = entries.filter(([key, value]) => { + if (hideNullValues && value === null) { + return false + } + if (hidePostHogPropertiesInTable) { + const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = + isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + } + return true }) } @@ -299,7 +306,7 @@ export function PropertiesTable({ }) } return entries - }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable]) + }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable, hideNullValues]) if (Array.isArray(properties)) { return ( @@ -424,7 +431,7 @@ export function PropertiesTable({ return ( <> {(searchable || filterable) && ( -
    +
    {searchable && ( )} {filterable && ( - + <> + + + + )} @@ -467,6 +483,7 @@ export function PropertiesTable({ onClick={() => { setSearchTerm('') setHidePostHogPropertiesInTable(false) + setHideNullValues(false) }} > Clear filters diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index a9389640cbeb6..3e0f670074598 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -46,6 +46,29 @@ function getSessionReplayLink(): string { return `\nSession: ${replayUrl}` } +function getErrorTrackingLink(): string { + const filterGroup = encodeURIComponent( + JSON.stringify({ + type: 'AND', + values: [ + { + type: 'AND', + values: [ + { + key: '$session_id', + value: [posthog.get_session_id()], + operator: 'exact', + type: 'event', + }, + ], + }, + ], + }) + ) + + return `\nExceptions: https://us.posthog.com/project/2/error_tracking?filterGroup=${filterGroup}` +} + function getDjangoAdminLink( user: UserType | null, cloudRegion: Region | null | undefined, @@ -178,6 +201,11 @@ export const TARGET_AREA_TO_NAME = [ 'data-attr': `support-form-target-area-web_analytics`, label: 'Web Analytics', }, + { + value: 'error_tracking', + 'data-attr': `support-form-target-area-error_tracking`, + label: 'Error tracking', + }, ], }, ] @@ -211,6 +239,7 @@ export type SupportTicketTargetArea = | 'toolbar' | 'surveys' | 'web_analytics' + | 'error_tracking' export type SupportTicketSeverityLevel = keyof typeof SEVERITY_LEVEL_TO_NAME export type SupportTicketKind = keyof typeof SUPPORT_KIND_TO_SUBJECT @@ -446,6 +475,7 @@ export const supportLogic = kea([ `\nTarget area: ${target_area}` + `\nReport event: http://go/ticketByUUID/${zendesk_ticket_uuid}` + getSessionReplayLink() + + getErrorTrackingLink() + getCurrentLocationLink() + getDjangoAdminLink( userLogic.values.user, diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts index 103e89b2bfed9..77e7e0e9eb52e 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts @@ -73,35 +73,41 @@ describe('versionCheckerLogic', () => { { versionCount: 1, expectation: null }, { versionCount: 11, + expectation: null, + }, + { + versionCount: 51, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.10', - numVersionsBehind: 10, - level: 'info', + latestAvailableVersion: '1.0.50', + numVersionsBehind: 50, + level: 'error', }, }, { - versionCount: 15, + minorUsedVersion: 40, + versionCount: 1, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.14', - numVersionsBehind: 14, - level: 'info', + latestAvailableVersion: '1.40.0', + numVersionsBehind: 40, + level: 'warning', }, }, { - versionCount: 25, + majorUsedVersion: 2, + versionCount: 1, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.24', - numVersionsBehind: 24, - level: 'error', + latestAvailableVersion: '2.0.0', + numVersionsBehind: 1, + level: 'info', }, }, ])('return a version warning if diff is great enough', async (options) => { // TODO: How do we clear the persisted value? const versionsList = Array.from({ length: options.versionCount }, (_, i) => ({ - version: `1.0.${i}`, + version: `${options.majorUsedVersion || 1}.${options.minorUsedVersion || 0}.${i}`, })).reverse() useMockedVersions( @@ -143,13 +149,14 @@ describe('versionCheckerLogic', () => { }, { usedVersions: [ - { version: '1.80.0', timestamp: '2023-01-01T12:00:00Z' }, - { version: '1.83.1-beta', timestamp: '2023-01-01T10:00:00Z' }, - { version: '1.84.0-delta', timestamp: '2023-01-01T08:00:00Z' }, + { version: '1.40.0', timestamp: '2023-01-01T12:00:00Z' }, + { version: '1.41.1-beta', timestamp: '2023-01-01T10:00:00Z' }, + { version: '1.42.0', timestamp: '2023-01-01T08:00:00Z' }, + { version: '1.42.0-delta', timestamp: '2023-01-01T08:00:00Z' }, ], expectation: { - latestUsedVersion: '1.84.0-delta', - numVersionsBehind: 1, + latestUsedVersion: '1.42.0', + numVersionsBehind: 42, latestAvailableVersion: '1.84.0', level: 'warning', }, diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts index 7ffecbbf89c82..4c6067adf4afc 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts @@ -174,6 +174,7 @@ export const versionCheckerLogic = kea([ if (!warning && sdkVersions && latestAvailableVersion) { const diff = diffVersions(latestAvailableVersion, latestUsedVersion) + if (diff && diff.diff > 0) { // there's a difference between the latest used version and the latest available version @@ -188,18 +189,14 @@ export const versionCheckerLogic = kea([ } let level: 'warning' | 'info' | 'error' | undefined - if (diff.kind === 'major' || numVersionsBehind >= 20) { + if (diff.kind === 'major') { + level = 'info' // it is desirable to be on the latest major version, but not critical + } else if (diff.kind === 'minor') { + level = numVersionsBehind >= 40 ? 'warning' : undefined + } + + if (level === undefined && numVersionsBehind >= 50) { level = 'error' - } else if (diff.kind === 'minor' && diff.diff >= 15) { - level = 'warning' - } else if ((diff.kind === 'minor' && diff.diff >= 10) || numVersionsBehind >= 10) { - level = 'info' - } else if (latestUsedVersion.extra) { - // if we have an extra (alpha/beta/rc/etc.) version, we should always show a warning if they aren't on the latest - level = 'warning' - } else { - // don't warn for a small number of patch versions behind - level = undefined } // we check if there is a "latest user version string" to avoid returning odd data in unexpected cases diff --git a/frontend/src/lib/components/ViewRecordingButton.tsx b/frontend/src/lib/components/ViewRecordingButton.tsx index 1d0c7adb1d4b0..37de0ba0eaae0 100644 --- a/frontend/src/lib/components/ViewRecordingButton.tsx +++ b/frontend/src/lib/components/ViewRecordingButton.tsx @@ -10,20 +10,27 @@ import { EventType } from '~/types' export default function ViewRecordingButton({ sessionId, timestamp, + inModal = false, ...props }: Pick & { sessionId: string timestamp?: string | Dayjs + // whether to open in a modal or navigate to the replay page + inModal?: boolean }): JSX.Element { const { openSessionPlayer } = useActions(sessionPlayerModalLogic) return ( { - const fiveSecondsBeforeEvent = dayjs(timestamp).valueOf() - 5000 - openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) - }} + to={inModal ? undefined : urls.replaySingle(sessionId)} + onClick={ + inModal + ? () => { + const fiveSecondsBeforeEvent = timestamp ? dayjs(timestamp).valueOf() - 5000 : 0 + openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) + } + : undefined + } sideIcon={} {...props} > diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 623848eeba79f..d41a232518b18 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -165,6 +165,7 @@ export const FEATURE_FLAGS = { PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas FEATURE_FLAG_COHORT_CREATION: 'feature-flag-cohort-creation', // owner: @neilkakkar #team-feature-success INSIGHT_HORIZONTAL_CONTROLS: 'insight-horizontal-controls', // owner: @benjackwhite + SURVEYS_ADAPTIVE_LIMITS: 'surveys-adaptive-limits', // owner: #team-feature-success SURVEYS_WIDGETS: 'surveys-widgets', // owner: #team-feature-success SURVEYS_EVENTS: 'surveys-events', // owner: #team-feature-success SURVEYS_ACTIONS: 'surveys-actions', // owner: #team-feature-success @@ -197,19 +198,14 @@ export const FEATURE_FLAGS = { SETTINGS_BOUNCE_RATE_PAGE_VIEW_MODE: 'settings-bounce-rate-page-view-mode', // owner: @robbie-c ONBOARDING_DASHBOARD_TEMPLATES: 'onboarding-dashboard-templates', // owner: @raquelmsmith MULTIPLE_BREAKDOWNS: 'multiple-breakdowns', // owner: @skoob13 #team-product-analytics - WEB_ANALYTICS_LIVE_USER_COUNT: 'web-analytics-live-user-count', // owner: @robbie-c SETTINGS_SESSION_TABLE_VERSION: 'settings-session-table-version', // owner: @robbie-c INSIGHT_FUNNELS_USE_UDF: 'insight-funnels-use-udf', // owner: @aspicer #team-product-analytics INSIGHT_FUNNELS_USE_UDF_TRENDS: 'insight-funnels-use-udf-trends', // owner: @aspicer #team-product-analytics FIRST_TIME_FOR_USER_MATH: 'first-time-for-user-math', // owner: @skoob13 #team-product-analytics MULTITAB_EDITOR: 'multitab-editor', // owner: @EDsCODE #team-data-warehouse - WEB_ANALYTICS_REPLAY: 'web-analytics-replay', // owner: @robbie-c BATCH_EXPORTS_POSTHOG_HTTP: 'posthog-http-batch-exports', EXPERIMENT_MAKE_DECISION: 'experiment-make-decision', // owner: @jurajmajerik #team-feature-success DATA_MODELING: 'data-modeling', // owner: @EDsCODE #team-data-warehouse - WEB_ANALYTICS_CONVERSION_GOALS: 'web-analytics-conversion-goals', // owner: @robbie-c - WEB_ANALYTICS_LAST_CLICK: 'web-analytics-last-click', // owner: @robbie-c - WEB_ANALYTICS_LCP_SCORE: 'web-analytics-lcp-score', // owner: @robbie-c HEDGEHOG_SKIN_SPIDERHOG: 'hedgehog-skin-spiderhog', // owner: @benjackwhite INSIGHT_VARIABLES: 'insight_variables', // owner: @Gilbert09 #team-data-warehouse WEB_EXPERIMENTS: 'web-experiments', // owner: @team-feature-success @@ -221,21 +217,25 @@ export const FEATURE_FLAGS = { EXPERIMENTS_HOGQL: 'experiments-hogql', // owner: @jurajmajerik #team-experiments ROLE_BASED_ACCESS_CONTROL: 'role-based-access-control', // owner: @zach MESSAGING: 'messaging', // owner @mariusandra #team-cdp - SESSION_REPLAY_URL_BLOCKLIST: 'session-replay-url-blocklist', // owner: @richard-better #team-replay BILLING_TRIAL_FLOW: 'billing-trial-flow', // owner: @zach EDIT_DWH_SOURCE_CONFIG: 'edit_dwh_source_config', // owner: @Gilbert09 #team-data-warehouse AI_SURVEY_RESPONSE_SUMMARY: 'ai-survey-response-summary', // owner: @pauldambra - CUSTOM_CHANNEL_TYPE_RULES: 'custom-channel-type-rules', // owner: @robbie-c #team-web-analytics SELF_SERVE_CREDIT_OVERRIDE: 'self-serve-credit-override', // owner: @zach + FEATURE_MANAGEMENT_UI: 'feature-management-ui', // owner: @haven #team-feature-flags CUSTOM_CSS_THEMES: 'custom-css-themes', // owner: @daibhin + METALYTICS: 'metalytics', // owner: @surbhi EXPERIMENTS_MULTIPLE_METRICS: 'experiments-multiple-metrics', // owner: @jurajmajerik #team-experiments - WEB_ANALYTICS_WARN_CUSTOM_EVENT_NO_SESSION: 'web-analytics-warn-custom-event-no-session', // owner: @robbie-c #team-web-analytics - TWO_FACTOR_UI: 'two-factor-ui', // owner: @zach + REMOTE_CONFIG: 'remote-config', // owner: @benjackwhite SITE_DESTINATIONS: 'site-destinations', // owner: @mariusandra #team-cdp SITE_APP_FUNCTIONS: 'site-app-functions', // owner: @mariusandra #team-cdp + HOG_TRANSFORMATIONS: 'hog-transformations', // owner: #team-cdp REPLAY_HOGQL_FILTERS: 'replay-hogql-filters', // owner: @pauldambra #team-replay REPLAY_LIST_RECORDINGS_AS_QUERY: 'replay-list-recordings-as-query', // owner: @pauldambra #team-replay SUPPORT_MESSAGE_OVERRIDE: 'support-message-override', // owner: @abigail + BILLING_SKIP_FORECASTING: 'billing-skip-forecasting', // owner: @zach + EXPERIMENT_STATS_V2: 'experiment-stats-v2', // owner: @danielbachhuber #team-experiments + WEB_ANALYTICS_PERIOD_COMPARISON: 'web-analytics-period-comparison', // owner: @rafaeelaudibert #team-web-analytics + WEB_ANALYTICS_CONVERSION_GOAL_FILTERS: 'web-analytics-conversion-goal-filters', // owner: @rafaeelaudibert #team-web-analytics } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/integrations/IntegrationScopesWarning.tsx b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx new file mode 100644 index 0000000000000..c9e6c7a61d764 --- /dev/null +++ b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx @@ -0,0 +1,65 @@ +import api from 'lib/api' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { Link } from 'lib/lemon-ui/Link' +import { useMemo } from 'react' + +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' + +export function IntegrationScopesWarning({ + integration, + schema, +}: { + integration: IntegrationType + schema?: HogFunctionInputSchemaType +}): JSX.Element { + const getScopes = useMemo((): string[] => { + const scopes: any[] = [] + const possibleScopeLocation = [integration.config.scope, integration.config.scopes] + + possibleScopeLocation.map((scope) => { + if (typeof scope === 'string') { + scopes.push(scope.split(' ')) + scopes.push(scope.split(',')) + } + if (typeof scope === 'object') { + scopes.push(scope) + } + }) + return scopes + .filter((scope: any) => typeof scope === 'object') + .reduce((a, b) => (a.length > b.length ? a : b), []) + }, [integration.config]) + + const requiredScopes = schema?.requiredScopes?.split(' ') || [] + const missingScopes = requiredScopes.filter((scope: string) => !getScopes.includes(scope)) + + if (missingScopes.length === 0 || getScopes.length === 0) { + return <> + } + return ( +
    + + Required scopes are missing: [{missingScopes.join(', ')}]. + {integration.kind === 'hubspot' ? ( + + Note that some features may not be available on your current HubSpot plan. Check out{' '} + + this page + {' '} + for more details. + + ) : null} + +
    + ) +} diff --git a/frontend/src/lib/integrations/IntegrationView.tsx b/frontend/src/lib/integrations/IntegrationView.tsx index 31cd12e82eb40..80590299bda4d 100644 --- a/frontend/src/lib/integrations/IntegrationView.tsx +++ b/frontend/src/lib/integrations/IntegrationView.tsx @@ -1,15 +1,18 @@ import { LemonBanner } from '@posthog/lemon-ui' import api from 'lib/api' import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' +import { IntegrationScopesWarning } from 'lib/integrations/IntegrationScopesWarning' -import { IntegrationType } from '~/types' +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' export function IntegrationView({ integration, suffix, + schema, }: { integration: IntegrationType suffix?: JSX.Element + schema?: HogFunctionInputSchemaType }): JSX.Element { const errors = (integration.errors && integration.errors?.split(',')) || [] @@ -36,7 +39,7 @@ export function IntegrationView({ {suffix}
    - {errors.length > 0 && ( + {errors.length > 0 ? (
    + ) : ( + )}
    ) diff --git a/frontend/src/lib/integrations/SlackIntegrationHelpers.tsx b/frontend/src/lib/integrations/SlackIntegrationHelpers.tsx index 14d533bf9fca9..b5690b4c55333 100644 --- a/frontend/src/lib/integrations/SlackIntegrationHelpers.tsx +++ b/frontend/src/lib/integrations/SlackIntegrationHelpers.tsx @@ -1,7 +1,7 @@ import { LemonBanner, LemonButton, LemonInputSelect, LemonInputSelectOption, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { IconSlackExternal } from 'lib/lemon-ui/icons' -import { useMemo } from 'react' +import { useEffect, useMemo } from 'react' import { IntegrationType, SlackChannelType } from '~/types' @@ -53,6 +53,12 @@ export function SlackChannelPicker({ onChange, value, integration, disabled }: S return value }, [value, slackChannels]) + useEffect(() => { + if (!disabled) { + loadSlackChannels() + } + }, [loadSlackChannels, disabled]) + return ( <> - + {showLeftMonth && ( ([ path(['lib', 'logic', 'userPreferencesLogic']), actions({ setHidePostHogPropertiesInTable: (enabled: boolean) => ({ enabled }), + setHideNullValues: (enabled: boolean) => ({ enabled }), }), reducers(() => ({ hidePostHogPropertiesInTable: [ @@ -16,5 +17,6 @@ export const userPreferencesLogic = kea([ setHidePostHogPropertiesInTable: (_, { enabled }) => enabled, }, ], + hideNullValues: [true, { persist: true }, { setHideNullValues: (_, { enabled }) => enabled }], })), ]) diff --git a/frontend/src/lib/monaco/CodeEditor.tsx b/frontend/src/lib/monaco/CodeEditor.tsx index aaa5303d35451..10dabd94c1bb4 100644 --- a/frontend/src/lib/monaco/CodeEditor.tsx +++ b/frontend/src/lib/monaco/CodeEditor.tsx @@ -16,7 +16,7 @@ import * as monaco from 'monaco-editor' import { useEffect, useMemo, useRef, useState } from 'react' import { themeLogic } from '~/layout/navigation-3000/themeLogic' -import { AnyDataNode, HogLanguage } from '~/queries/schema' +import { AnyDataNode, HogLanguage, HogQLMetadataResponse } from '~/queries/schema' if (loader) { loader.config({ monaco }) @@ -32,9 +32,15 @@ export interface CodeEditorProps extends Omit sourceQuery?: AnyDataNode globals?: Record schema?: Record | null + onMetadata?: (metadata: HogQLMetadataResponse) => void + onError?: (error: string | null, isValidView: boolean) => void } let codeEditorIndex = 0 +export function initModel(model: editor.ITextModel, builtCodeEditorLogic: BuiltLogic): void { + ;(model as any).codeEditorLogic = builtCodeEditorLogic +} + function initEditor( monaco: Monaco, editor: importedEditor.IStandaloneCodeEditor, @@ -44,7 +50,9 @@ function initEditor( ): void { // This gives autocomplete access to the specific editor const model = editor.getModel() - ;(model as any).codeEditorLogic = builtCodeEditorLogic + if (model) { + initModel(model, builtCodeEditorLogic) + } if (editorProps?.language === 'hog') { initHogLanguage(monaco) @@ -112,6 +120,8 @@ export function CodeEditor({ globals, sourceQuery, schema, + onError, + onMetadata, ...editorProps }: CodeEditorProps): JSX.Element { const { isDarkModeOn } = useValues(themeLogic) @@ -130,6 +140,8 @@ export function CodeEditor({ sourceQuery, monaco: monaco, editor: editor, + onError, + onMetadata, }) useMountedLogic(builtCodeEditorLogic) diff --git a/frontend/src/lib/monaco/codeEditorLogic.tsx b/frontend/src/lib/monaco/codeEditorLogic.tsx index 05506028c1c93..42e95f25209a4 100644 --- a/frontend/src/lib/monaco/codeEditorLogic.tsx +++ b/frontend/src/lib/monaco/codeEditorLogic.tsx @@ -1,6 +1,7 @@ import type { Monaco } from '@monaco-editor/react' import { actions, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' +import { subscriptions } from 'kea-subscriptions' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' // Note: we can oly import types and not values from monaco-editor, because otherwise some Monaco code breaks @@ -48,6 +49,8 @@ export interface CodeEditorLogicProps { editor?: editor.IStandaloneCodeEditor | null globals?: Record multitab?: boolean + onError?: (error: string | null, isValidView: boolean) => void + onMetadata?: (metadata: HogQLMetadataResponse) => void } export const codeEditorLogic = kea([ @@ -98,6 +101,7 @@ export const codeEditorLogic = kea([ variables, }) breakpoint() + props.onMetadata?.(response) return [query, response] }, }, @@ -270,6 +274,14 @@ export const codeEditorLogic = kea([ }, ], }), + subscriptions(({ props, values }) => ({ + isValidView: (isValidView) => { + props.onError?.(values.error, isValidView) + }, + error: (error) => { + props.onError?.(error, values.isValidView) + }, + })), propsChanged(({ actions, props }, oldProps) => { if ( props.query !== oldProps.query || diff --git a/frontend/src/lib/utils/concurrencyController.ts b/frontend/src/lib/utils/concurrencyController.ts index 47683b954be86..941af92f33b74 100644 --- a/frontend/src/lib/utils/concurrencyController.ts +++ b/frontend/src/lib/utils/concurrencyController.ts @@ -1,5 +1,5 @@ +import FastPriorityQueue from 'fastpriorityqueue' import { promiseResolveReject } from 'lib/utils' - class ConcurrencyControllerItem { _debugTag?: string _runFn: () => Promise @@ -52,7 +52,9 @@ export class ConcurrencyController { _concurrencyLimit: number _current: ConcurrencyControllerItem[] = [] - private _queue: ConcurrencyControllerItem[] = [] + private _queue: FastPriorityQueue> = new FastPriorityQueue( + (a, b) => a._priority < b._priority + ) constructor(concurrencyLimit: number) { this._concurrencyLimit = concurrencyLimit @@ -79,7 +81,7 @@ export class ConcurrencyController { }): Promise => { const item = new ConcurrencyControllerItem(this, fn, abortController, priority, debugTag) - this._queue.push(item) + this._queue.add(item) this._tryRunNext() @@ -87,8 +89,7 @@ export class ConcurrencyController { } _runNext(): void { - this._queue.sort((a, b) => a._priority - b._priority) - const next = this._queue.shift() + const next = this._queue.poll() if (next) { next._runFn() .catch(() => { diff --git a/frontend/src/loadPostHogJS.tsx b/frontend/src/loadPostHogJS.tsx index badabf1105246..4dfc4e30ee47f 100644 --- a/frontend/src/loadPostHogJS.tsx +++ b/frontend/src/loadPostHogJS.tsx @@ -67,6 +67,7 @@ export function loadPostHogJS(): void { capture_copied_text: true, }, person_profiles: 'always', + __preview_remote_config: true, // Helper to capture events for assertions in Cypress _onCapture: (window as any)._cypress_posthog_captures diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 9d14c1b3c0acf..de9e072e7f69b 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -119,6 +119,7 @@ export const defaultMocks: Mocks = { }, }, ], + '/api/users/@me/two_factor_status/': () => [200, { is_enabled: true, backup_codes: [], method: 'TOTP' }], '/api/environments/@current/': MOCK_DEFAULT_TEAM, '/api/projects/@current/': MOCK_DEFAULT_TEAM, '/api/projects/:team_id/comments/count': { count: 0 }, diff --git a/frontend/src/models/dashboardsModel.tsx b/frontend/src/models/dashboardsModel.tsx index 99c4cdf01ea96..2c23d08675c2a 100644 --- a/frontend/src/models/dashboardsModel.tsx +++ b/frontend/src/models/dashboardsModel.tsx @@ -114,10 +114,10 @@ export const dashboardsModel = kea([ const beforeChange = { ...values.rawDashboards[id] } - const response = (await api.update( + const response = await api.update( `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, payload - )) as DashboardType + ) const updatedAttribute = Object.keys(payload)[0] if (updatedAttribute === 'name' || updatedAttribute === 'description' || updatedAttribute === 'tags') { eventUsageLogic.actions.reportDashboardFrontEndUpdate( @@ -134,10 +134,10 @@ export const dashboardsModel = kea([ button: { label: 'Undo', action: async () => { - const reverted = (await api.update( + const reverted = await api.update( `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, beforeChange - )) as DashboardType + ) actions.updateDashboardSuccess(getQueryBasedDashboard(reverted)) lemonToast.success('Dashboard change reverted') }, @@ -160,31 +160,34 @@ export const dashboardsModel = kea([ }) ) as DashboardType, pinDashboard: async ({ id, source }) => { - const response = (await api.update( + const response = await api.update( `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, { pinned: true, } - )) as DashboardType + ) eventUsageLogic.actions.reportDashboardPinToggled(true, source) return getQueryBasedDashboard(response)! }, unpinDashboard: async ({ id, source }) => { - const response = (await api.update( + const response = await api.update( `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, { pinned: false, } - )) as DashboardType + ) eventUsageLogic.actions.reportDashboardPinToggled(false, source) return getQueryBasedDashboard(response)! }, duplicateDashboard: async ({ id, name, show, duplicateTiles }) => { - const result = (await api.create(`api/environments/${teamLogic.values.currentTeamId}/dashboards/`, { - use_dashboard: id, - name: `${name} (Copy)`, - duplicate_tiles: duplicateTiles, - })) as DashboardType + const result = await api.create( + `api/environments/${teamLogic.values.currentTeamId}/dashboards/`, + { + use_dashboard: id, + name: `${name} (Copy)`, + duplicate_tiles: duplicateTiles, + } + ) if (show) { router.actions.push(urls.dashboard(result.id)) } diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index b3cada69a6a89..f1bbaa4516c55 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -81,6 +81,8 @@ export interface DataNodeLogicProps { filtersOverride?: DashboardFilter | null /** Dashboard variables to override the ones in the query */ variablesOverride?: Record | null + + autoLoad?: boolean } export const AUTOLOAD_INTERVAL = 30000 @@ -114,7 +116,7 @@ export const dataNodeLogic = kea([ ], ], })), - props({ query: {}, variablesOverride: undefined } as DataNodeLogicProps), + props({ query: {}, variablesOverride: undefined, autoLoad: true } as DataNodeLogicProps), propsChanged(({ actions, props }, oldProps) => { if (!props.query) { return // Can't do anything without a query @@ -131,6 +133,7 @@ export const dataNodeLogic = kea([ actions.loadData(queryVarsHaveChanged, queryStatus.id) } else if ( hasQueryChanged && + props.autoLoad && !(props.cachedResults && props.key.includes('dashboard')) && // Don't load data on dashboard if cached results are available (!props.cachedResults || (isInsightQueryNode(props.query) && !props.cachedResults['result'] && !props.cachedResults['results'])) @@ -188,6 +191,10 @@ export const dataNodeLogic = kea([ if (cache.localResults[stringifiedQuery] && !refresh) { return cache.localResults[stringifiedQuery] } + + if (!query.query) { + return null + } } if (!values.currentTeamId) { @@ -334,6 +341,12 @@ export const dataNodeLogic = kea([ ], })), reducers(({ props }) => ({ + isRefresh: [ + false, + { + loadData: (_, { refresh }) => !!refresh, + }, + ], dataLoading: [ false, { @@ -471,8 +484,12 @@ export const dataNodeLogic = kea([ (variablesOverride) => !!variablesOverride, ], isShowingCachedResults: [ - () => [(_, props) => props.cachedResults ?? null, (_, props) => props.query], - (cachedResults: AnyResponseType | null, query: DataNode): boolean => { + (s) => [(_, props) => props.cachedResults ?? null, (_, props) => props.query, s.isRefresh], + (cachedResults: AnyResponseType | null, query: DataNode, isRefresh): boolean => { + if (isRefresh) { + return false + } + return ( !!cachedResults || (cache.localResults && 'query' in query && JSON.stringify(query.query) in cache.localResults) @@ -627,23 +644,26 @@ export const dataNodeLogic = kea([ (s) => [s.nextAllowedRefresh, s.lastRefresh], (nextAllowedRefresh: string | null, lastRefresh: string | null) => (): string => { const now = dayjs() - let disabledReason = '' - if (!!nextAllowedRefresh && now.isBefore(dayjs(nextAllowedRefresh))) { - // If this is a saved insight, the result will contain nextAllowedRefresh, and we use that to disable the button - disabledReason = `You can refresh this insight again ${dayjs(nextAllowedRefresh).from(now)}` - } else if ( - !!lastRefresh && - now.subtract(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES - 0.5, 'minutes').isBefore(lastRefresh) - ) { - // Unsaved insights don't get cached and get refreshed on every page load, but we avoid allowing users to click - // 'refresh' more than once every UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES. This can be bypassed by simply - // refreshing the page though, as there's no cache layer on the backend - disabledReason = `You can refresh this insight again ${dayjs(lastRefresh) - .add(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, 'minutes') - .from(now)}` + // Saved insights has a nextAllowedRefresh we use to check if the user can refresh again + if (nextAllowedRefresh) { + const nextRefreshTime = dayjs(nextAllowedRefresh) + if (now.isBefore(nextRefreshTime)) { + return `You can refresh this insight again ${nextRefreshTime.from(now)}` + } } - - return disabledReason + // For unsaved insights we check the last refresh time + if (lastRefresh) { + const earliestRefresh = dayjs(lastRefresh).add( + UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, + 'minutes' + ) + if (now.isBefore(earliestRefresh)) { + return `You can refresh this insight again ${earliestRefresh.from(now)}` + } + } + // If we don't have a nextAllowedRefresh or lastRefresh, we can refresh, so we + // return an empty string + return '' }, ], timings: [ diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 5a2830a135ef2..5ac043b46654f 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -55,6 +55,7 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { )} { @@ -59,6 +61,11 @@ export const variablesLogic = kea([ setEditorQuery: (query: string) => ({ query }), updateSourceQuery: true, })), + propsChanged(({ props, actions }, oldProps) => { + if (oldProps.queryInput !== props.queryInput) { + actions.setEditorQuery(props.queryInput ?? '') + } + }), reducers({ internalSelectedVariables: [ [] as HogQLVariable[], @@ -167,7 +174,6 @@ export const variablesLogic = kea([ }, {} as Record), }, } - const queryVarsHaveChanged = haveVariablesOrFiltersChanged(query.source, values.query.source) if (!queryVarsHaveChanged) { return diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index 61c189df39ec8..6fb22cff1ea55 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -157,7 +157,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX const setQuerySource = useCallback( (source: HogQLQuery) => props.setQuery?.({ ...props.query, source }), - [props.setQuery] + [props.setQuery, props.query] ) let component: JSX.Element | null = null diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index e1053f61571aa..4480fe9977755 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -1,7 +1,7 @@ -import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { actions, afterMount, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' import { dayjs } from 'lib/dayjs' -import { lightenDarkenColor, RGBToHex, uuid } from 'lib/utils' +import { lightenDarkenColor, objectsEqual, RGBToHex, uuid } from 'lib/utils' import mergeObject from 'lodash.merge' import { teamLogic } from 'scenes/teamLogic' @@ -242,6 +242,11 @@ export const dataVisualizationLogic = kea([ ['loadData'], ], })), + propsChanged(({ actions, props }, oldProps) => { + if (props.query && !objectsEqual(props.query, oldProps.query)) { + actions._setQuery(props.query) + } + }), props({ query: { source: {} } } as DataVisualizationLogicProps), actions(({ values }) => ({ setVisualizationType: (visualizationType: ChartDisplayType) => ({ visualizationType }), @@ -280,12 +285,14 @@ export const dataVisualizationLogic = kea([ colorMode: values.isDarkModeOn ? 'dark' : 'light', }), setConditionalFormattingRulesPanelActiveKeys: (keys: string[]) => ({ keys }), + _setQuery: (node: DataVisualizationNode) => ({ node }), })), reducers(({ props }) => ({ query: [ props.query, { setQuery: (_, { node }) => node, + _setQuery: (_, { node }) => node, }, ], visualizationType: [ @@ -583,7 +590,7 @@ export const dataVisualizationLogic = kea([ (state, props) => [props.key, state.dashboardId], (key, dashboardId) => { // Key for SQL editor based visiaulizations - return !key.includes('SQLEditorScene') && !dashboardId + return !key.includes('new-SQL') && !dashboardId }, ], sourceFeatures: [(_, props) => [props.query], (query): Set => getQueryFeatures(query.source)], diff --git a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx index d8364d21c2508..d532805db83a5 100644 --- a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx +++ b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx @@ -1,7 +1,9 @@ +import { useValues } from 'kea' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { useState } from 'react' +import { groupsModel } from '~/models/groupsModel' import { EventsNode, EventsQuery, HogQLQuery, SessionAttributionExplorerQuery } from '~/queries/schema' import { isHogQLQuery, isSessionAttributionExplorerQuery } from '~/queries/utils' import { AnyPropertyFilter } from '~/types' @@ -21,6 +23,7 @@ export function EventPropertyFilters< isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? query.filters?.properties : query.properties const eventNames = isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? [] : query.event ? [query.event] : [] + const { groupsTaxonomicTypes } = useValues(groupsModel) return !properties || Array.isArray(properties) ? ( - loadData(true)}> - Refresh - + + loadData(true)} + className={disabledReason ? 'opacity-50' : ''} + disabledReason={canBypassRefreshDisabled ? '' : disabledReason} + > + Refresh + + )}
    diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx index 58bcbf6154cde..90f036c734bc4 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx @@ -77,7 +77,7 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps): isStepsFunnel || isTrendsFunnel const hasPathsAdvanced = hasAvailableFeature(AvailableFeature.PATHS_ADVANCED) - const hasAttribution = isStepsFunnel + const hasAttribution = isStepsFunnel || isTrendsFunnel const hasPathsHogQL = isPaths && pathsFilter?.includeEventTypes?.includes(PathType.HogQL) const editorFilters: InsightEditorFilterGroup[] = [ diff --git a/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx b/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx index d75685def2228..ab8b30abc4f5e 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx @@ -34,7 +34,7 @@ import { isValidBreakdown } from '~/queries/utils' import { ChartDisplayType } from '~/types' export function InsightDisplayConfig(): JSX.Element { - const { insightProps } = useValues(insightLogic) + const { insightProps, canEditInsight } = useValues(insightLogic) const { isTrends, @@ -54,11 +54,15 @@ export function InsightDisplayConfig(): JSX.Element { supportsPercentStackView, yAxisScaleType, isNonTimeSeriesDisplay, + compareFilter, + supportsCompare, } = useValues(insightVizDataLogic(insightProps)) const { isTrendsFunnel, isStepsFunnel, isTimeToConvertFunnel, isEmptyFunnel } = useValues( funnelDataLogic(insightProps) ) + const { updateCompareFilter } = useActions(insightVizDataLogic(insightProps)) + const showCompare = (isTrends && display !== ChartDisplayType.ActionsAreaGraph) || isStickiness const showInterval = isTrendsFunnel || @@ -161,7 +165,11 @@ export function InsightDisplayConfig(): JSX.Element { {showCompare && ( - + )}
    diff --git a/frontend/src/queries/nodes/WebOverview/WebOverview.tsx b/frontend/src/queries/nodes/WebOverview/WebOverview.tsx index 925087cd8f9dc..83af7956c55da 100644 --- a/frontend/src/queries/nodes/WebOverview/WebOverview.tsx +++ b/frontend/src/queries/nodes/WebOverview/WebOverview.tsx @@ -2,11 +2,9 @@ import { IconTrending } from '@posthog/icons' import { LemonSkeleton } from '@posthog/lemon-ui' import { useValues } from 'kea' import { getColorVar } from 'lib/colors' -import { FEATURE_FLAGS } from 'lib/constants' import { IconTrendingDown, IconTrendingFlat } from 'lib/lemon-ui/icons' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { humanFriendlyDuration, humanFriendlyLargeNumber, isNotNil, range } from 'lib/utils' import { useState } from 'react' @@ -42,14 +40,13 @@ export function WebOverview(props: { onData, dataNodeCollectionId: dataNodeCollectionId ?? key, }) - const { featureFlags } = useValues(featureFlagLogic) const { response, responseLoading } = useValues(logic) const webOverviewQueryResponse = response as WebOverviewQueryResponse | undefined const samplingRate = webOverviewQueryResponse?.samplingRate - const numSkeletons = props.query.conversionGoal ? 4 : featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_LCP_SCORE] ? 6 : 5 + const numSkeletons = props.query.conversionGoal ? 4 : 6 return ( <> diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 4cc9cdcdb99f1..b81e4669af38a 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -546,22 +546,6 @@ }, "type": "object" }, - "AssistantCompareFilter": { - "additionalProperties": false, - "properties": { - "compare": { - "default": false, - "description": "Whether to compare the current date range to a previous date range.", - "type": "boolean" - }, - "compare_to": { - "default": "-7d", - "description": "The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.", - "type": "string" - } - }, - "type": "object" - }, "AssistantDateTimePropertyFilter": { "additionalProperties": false, "properties": { @@ -585,7 +569,7 @@ "type": "string" }, "AssistantEventType": { - "enum": ["status", "message"], + "enum": ["status", "message", "conversation"], "type": "string" }, "AssistantFunnelsBreakdownFilter": { @@ -742,7 +726,7 @@ "description": "Breakdown the chart by a property" }, "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1043,27 +1027,11 @@ } ] }, - "AssistantInsightDateRange": { - "additionalProperties": false, - "properties": { - "date_from": { - "default": "-7d", - "description": "Start date. The value can be:\n- a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-1w` for 1 week ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.\n- an absolute ISO 8601 date string. a constant `yStart` for the current year start. a constant `mStart` for the current month start. a constant `dStart` for the current day start. Prefer using relative dates.", - "type": ["string", "null"] - }, - "date_to": { - "default": null, - "description": "Right boundary of the date range. Use `null` for the current date. You can not use relative dates here.", - "type": ["string", "null"] - } - }, - "type": "object" - }, "AssistantInsightsQueryBase": { "additionalProperties": false, "properties": { "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1092,9 +1060,8 @@ "content": { "type": "string" }, - "done": { - "description": "We only need this \"done\" value to tell when the particular message is finished during its streaming. It won't be necessary when we optimize streaming to NOT send the entire message every time a character is added.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai", @@ -1365,7 +1332,7 @@ "description": "Compare to date range" }, "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1469,8 +1436,25 @@ ], "type": "string" }, + "BaseAssistantMessage": { + "additionalProperties": false, + "properties": { + "id": { + "type": "string" + } + }, + "type": "object" + }, "BaseMathType": { - "enum": ["total", "dau", "weekly_active", "monthly_active", "unique_session", "first_time_for_user"], + "enum": [ + "total", + "dau", + "weekly_active", + "monthly_active", + "unique_session", + "first_time_for_user", + "first_matching_event_for_user" + ], "type": "string" }, "BinCountValue": { @@ -2246,6 +2230,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "timezone": { "type": "string" }, @@ -3792,9 +3779,12 @@ "additionalProperties": false, "properties": { "compare": { + "default": false, + "description": "Whether to compare the current date range to a previous date range.", "type": "boolean" }, "compare_to": { + "description": "The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.", "type": "string" } }, @@ -4585,6 +4575,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -6090,6 +6083,9 @@ }, "response": { "$ref": "#/definitions/ExperimentTrendsQueryResponse" + }, + "stats_version": { + "type": "integer" } }, "required": ["count_query", "kind"], @@ -6140,6 +6136,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -6200,16 +6199,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai/failure", "type": "string" } }, - "required": ["type", "done"], + "required": ["type"], "type": "object" }, "FeaturePropertyFilter": { @@ -6853,7 +6851,7 @@ "description": "Breakdown of the events and actions" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -7245,6 +7243,12 @@ "query": { "type": "string" }, + "table_names": { + "items": { + "type": "string" + }, + "type": "array" + }, "warnings": { "items": { "$ref": "#/definitions/HogQLNotice" @@ -7533,17 +7537,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "description": "Human messages are only appended when done.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "human", "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "InsightActorsQuery": { @@ -7766,24 +7768,6 @@ }, "type": "object" }, - "InsightDateRange": { - "additionalProperties": false, - "properties": { - "date_from": { - "default": "-7d", - "type": ["string", "null"] - }, - "date_to": { - "type": ["string", "null"] - }, - "explicitDate": { - "default": false, - "description": "Whether the date_from and date_to should be used verbatim. Disables rounding to the start and end of period.", - "type": ["boolean", "null"] - } - }, - "type": "object" - }, "InsightFilter": { "anyOf": [ { @@ -7946,7 +7930,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8003,7 +7987,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8060,7 +8044,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8117,7 +8101,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8174,7 +8158,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8284,7 +8268,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8693,7 +8677,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -9406,6 +9390,12 @@ "query": { "type": "string" }, + "table_names": { + "items": { + "type": "string" + }, + "type": "array" + }, "warnings": { "items": { "$ref": "#/definitions/HogQLNotice" @@ -9855,6 +9845,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -10483,6 +10476,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -11137,9 +11133,8 @@ "content": { "type": "string" }, - "done": { - "const": true, - "type": "boolean" + "id": { + "type": "string" }, "substeps": { "items": { @@ -11152,7 +11147,7 @@ "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "RecordingOrder": { @@ -11455,7 +11450,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -11599,17 +11594,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "description": "Router messages are not streamed, so they can only be done.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai/router", "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "SamplingRate": { @@ -12127,7 +12120,7 @@ "description": "Compare to date range" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -12666,8 +12659,19 @@ "$ref": "#/definitions/CompareFilter", "description": "Compare to date range" }, + "conversionGoal": { + "anyOf": [ + { + "$ref": "#/definitions/WebAnalyticsConversionGoal" + }, + { + "type": "null" + } + ], + "description": "Whether we should be comparing against a specific conversion goal" + }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -12778,8 +12782,11 @@ } ] }, - "done": { - "type": "boolean" + "id": { + "type": "string" + }, + "initiator": { + "type": "string" }, "plan": { "type": "string" @@ -12858,6 +12865,9 @@ "WebExternalClicksTableQuery": { "additionalProperties": false, "properties": { + "compareFilter": { + "$ref": "#/definitions/CompareFilter" + }, "conversionGoal": { "anyOf": [ { @@ -12971,6 +12981,9 @@ "WebGoalsQuery": { "additionalProperties": false, "properties": { + "compareFilter": { + "$ref": "#/definitions/CompareFilter" + }, "conversionGoal": { "anyOf": [ { @@ -13110,8 +13123,8 @@ "WebOverviewQuery": { "additionalProperties": false, "properties": { - "compare": { - "type": "boolean" + "compareFilter": { + "$ref": "#/definitions/CompareFilter" }, "conversionGoal": { "anyOf": [ @@ -13242,6 +13255,9 @@ "breakdownBy": { "$ref": "#/definitions/WebStatsBreakdown" }, + "compareFilter": { + "$ref": "#/definitions/CompareFilter" + }, "conversionGoal": { "anyOf": [ { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 790bfd5b681c3..7375910003a3f 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -367,6 +367,7 @@ export interface HogQLMetadataResponse { warnings: HogQLNotice[] notices: HogQLNotice[] query_status?: never + table_names?: string[] } export type AutocompleteCompletionItemKind = @@ -819,7 +820,7 @@ interface InsightVizNodeViewProps { /** Base class for insight query nodes. Should not be used directly. */ export interface InsightsQueryBase> extends Node { /** Date range for the query */ - dateRange?: InsightDateRange + dateRange?: DateRange /** * Exclude internal and test users by applying the respective filters * @@ -910,6 +911,8 @@ export interface TrendsQuery extends InsightsQueryBase { breakdownFilter?: BreakdownFilter /** Compare to date range */ compareFilter?: CompareFilter + /** Whether we should be comparing against a specific conversion goal */ + conversionGoal?: WebAnalyticsConversionGoal | null } export type AssistantArrayPropertyFilterOperator = PropertyOperator.Exact | PropertyOperator.IsNot @@ -1002,31 +1005,11 @@ export type AssistantGroupPropertyFilter = AssistantBasePropertyFilter & { export type AssistantPropertyFilter = AssistantGenericPropertyFilter | AssistantGroupPropertyFilter -export interface AssistantInsightDateRange { - /** - * Start date. The value can be: - * - a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-1w` for 1 week ago, `-14d` for 14 days ago, `-30h` for 30 hours ago. - * - an absolute ISO 8601 date string. - * a constant `yStart` for the current year start. - * a constant `mStart` for the current month start. - * a constant `dStart` for the current day start. - * Prefer using relative dates. - * @default -7d - */ - date_from?: string | null - - /** - * Right boundary of the date range. Use `null` for the current date. You can not use relative dates here. - * @default null - */ - date_to?: string | null -} - export interface AssistantInsightsQueryBase { /** * Date range for the query */ - dateRange?: AssistantInsightDateRange + dateRange?: DateRange /** * Exclude internal and test users by applying the respective filters @@ -1169,7 +1152,7 @@ export interface AssistantTrendsFilter { yAxisScaleType?: TrendsFilterLegacy['y_axis_scale_type'] } -export interface AssistantCompareFilter { +export interface CompareFilter { /** * Whether to compare the current date range to a previous date range. * @default false @@ -1178,7 +1161,6 @@ export interface AssistantCompareFilter { /** * The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago. - * @default -7d */ compare_to?: string } @@ -1787,6 +1769,7 @@ interface WebAnalyticsQueryBase> extends DataNode< dateRange?: DateRange properties: WebAnalyticsPropertyFilters conversionGoal?: WebAnalyticsConversionGoal | null + compareFilter?: CompareFilter sampling?: { enabled?: boolean forceSamplingRate?: SamplingRate @@ -1798,7 +1781,6 @@ interface WebAnalyticsQueryBase> extends DataNode< export interface WebOverviewQuery extends WebAnalyticsQueryBase { kind: NodeKind.WebOverviewQuery - compare?: boolean includeLCPScore?: boolean } @@ -2004,6 +1986,7 @@ export interface ExperimentTrendsQueryResponse { probability: Record significant: boolean significance_code: ExperimentSignificanceCode + stats_version?: integer p_value: number credible_intervals: Record } @@ -2039,6 +2022,7 @@ export interface ExperimentTrendsQuery extends DataNode export interface Breakdown { @@ -2353,11 +2326,6 @@ export interface BreakdownFilter { breakdown_hide_other_aggregation?: boolean | null // hides the "other" field for trends } -export interface CompareFilter { - compare?: boolean - compare_to?: string -} - // TODO: Rename to `DashboardFilters` for consistency with `HogQLFilters` export interface DashboardFilter { date_from?: string | null @@ -2484,48 +2452,41 @@ export enum AssistantMessageType { Router = 'ai/router', } -export interface HumanMessage { +export interface BaseAssistantMessage { + id?: string +} + +export interface HumanMessage extends BaseAssistantMessage { type: AssistantMessageType.Human content: string - /** Human messages are only appended when done. */ - done: true } -export interface AssistantMessage { +export interface AssistantMessage extends BaseAssistantMessage { type: AssistantMessageType.Assistant content: string - /** - * We only need this "done" value to tell when the particular message is finished during its streaming. - * It won't be necessary when we optimize streaming to NOT send the entire message every time a character is added. - */ - done?: boolean } -export interface ReasoningMessage { +export interface ReasoningMessage extends BaseAssistantMessage { type: AssistantMessageType.Reasoning content: string substeps?: string[] - done: true } -export interface VisualizationMessage { +export interface VisualizationMessage extends BaseAssistantMessage { type: AssistantMessageType.Visualization plan?: string answer?: AssistantTrendsQuery | AssistantFunnelsQuery - done?: boolean + initiator?: string } -export interface FailureMessage { +export interface FailureMessage extends BaseAssistantMessage { type: AssistantMessageType.Failure content?: string - done: true } -export interface RouterMessage { +export interface RouterMessage extends BaseAssistantMessage { type: AssistantMessageType.Router content: string - /** Router messages are not streamed, so they can only be done. */ - done: true } export type RootAssistantMessage = @@ -2539,6 +2500,7 @@ export type RootAssistantMessage = export enum AssistantEventType { Status = 'status', Message = 'message', + Conversation = 'conversation', } export enum AssistantGenerationStatusType { diff --git a/frontend/src/scenes/actions/actionLogic.ts b/frontend/src/scenes/actions/actionLogic.ts index e3a7791b6cdc1..a3101cb8d9daf 100644 --- a/frontend/src/scenes/actions/actionLogic.ts +++ b/frontend/src/scenes/actions/actionLogic.ts @@ -5,7 +5,8 @@ import { DataManagementTab } from 'scenes/data-management/DataManagementScene' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' -import { ActionType, Breadcrumb, HogFunctionType } from '~/types' +import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { ActionType, ActivityScope, Breadcrumb, HogFunctionType } from '~/types' import { actionEditLogic } from './actionEditLogic' import type { actionLogicType } from './actionLogicType' @@ -104,6 +105,18 @@ export const actionLogic = kea([ (s) => [s.action], (action) => action?.steps?.some((step) => step.properties?.find((p) => p.type === 'cohort')) ?? false, ], + + activityFilters: [ + (s) => [s.action], + (action): ActivityFilters | null => { + return action?.id + ? { + scope: ActivityScope.ACTION, + item_id: String(action.id), + } + : null + }, + ], }), listeners(({ actions, values }) => ({ checkIsFinished: ({ action }) => { diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 5beec89927201..f6a646f64f7c4 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -30,6 +30,7 @@ export const appScenes: Record any> = { [Scene.Experiments]: () => import('./experiments/Experiments'), [Scene.Experiment]: () => import('./experiments/Experiment'), [Scene.FeatureFlags]: () => import('./feature-flags/FeatureFlags'), + [Scene.FeatureManagement]: () => import('./feature-flags/FeatureManagement'), [Scene.FeatureFlag]: () => import('./feature-flags/FeatureFlag'), [Scene.EarlyAccessFeatures]: () => import('./early-access-features/EarlyAccessFeatures'), [Scene.EarlyAccessFeature]: () => import('./early-access-features/EarlyAccessFeature'), diff --git a/frontend/src/scenes/authentication/Login2FA.tsx b/frontend/src/scenes/authentication/Login2FA.tsx index f620856bf4541..b420c0fc79a3c 100644 --- a/frontend/src/scenes/authentication/Login2FA.tsx +++ b/frontend/src/scenes/authentication/Login2FA.tsx @@ -25,7 +25,7 @@ export function Login2FA(): JSX.Element { >

    Two-Factor Authentication

    -

    Enter a token from your authenticator app.

    +

    Enter a token from your authenticator app or a backup code.

    {generalError && {generalError.detail}} diff --git a/frontend/src/scenes/authentication/Setup2FA.tsx b/frontend/src/scenes/authentication/TwoFactorSetup.tsx similarity index 78% rename from frontend/src/scenes/authentication/Setup2FA.tsx rename to frontend/src/scenes/authentication/TwoFactorSetup.tsx index 617cd208267cd..57c73f3a655a4 100644 --- a/frontend/src/scenes/authentication/Setup2FA.tsx +++ b/frontend/src/scenes/authentication/TwoFactorSetup.tsx @@ -6,17 +6,17 @@ import { Form } from 'kea-forms' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonField } from 'lib/lemon-ui/LemonField' -import { setup2FALogic } from './setup2FALogic' +import { twoFactorLogic } from './twoFactorLogic' -export function Setup2FA({ onSuccess }: { onSuccess: () => void }): JSX.Element | null { - const { startSetupLoading, generalError } = useValues(setup2FALogic({ onSuccess })) +export function TwoFactorSetup({ onSuccess }: { onSuccess: () => void }): JSX.Element | null { + const { startSetupLoading, generalError } = useValues(twoFactorLogic({ onSuccess })) if (startSetupLoading) { return null } return ( <> - +
    diff --git a/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx b/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx new file mode 100644 index 0000000000000..ae63d8649e87d --- /dev/null +++ b/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx @@ -0,0 +1,38 @@ +import { useActions, useValues } from 'kea' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { LemonModal } from 'lib/lemon-ui/LemonModal' +import { membersLogic } from 'scenes/organization/membersLogic' +import { userLogic } from 'scenes/userLogic' + +import { twoFactorLogic } from './twoFactorLogic' +import { TwoFactorSetup } from './TwoFactorSetup' + +export function TwoFactorSetupModal(): JSX.Element { + const { isTwoFactorSetupModalOpen, forceOpenTwoFactorSetupModal } = useValues(twoFactorLogic) + const { closeTwoFactorSetupModal } = useActions(twoFactorLogic) + + return ( + closeTwoFactorSetupModal() : undefined} + closable={!forceOpenTwoFactorSetupModal} + > +
    + {forceOpenTwoFactorSetupModal && ( + + Your organization requires you to set up 2FA. + + )} +

    Use an authenticator app like Google Authenticator or 1Password to scan the QR code below.

    + { + closeTwoFactorSetupModal() + userLogic.actions.loadUser() + membersLogic.actions.loadAllMembers() + }} + /> +
    +
    + ) +} diff --git a/frontend/src/scenes/authentication/inviteSignupLogic.ts b/frontend/src/scenes/authentication/inviteSignupLogic.ts index 67d66bbecf30e..2e5be913f1f92 100644 --- a/frontend/src/scenes/authentication/inviteSignupLogic.ts +++ b/frontend/src/scenes/authentication/inviteSignupLogic.ts @@ -81,9 +81,12 @@ export const inviteSignupLogic = kea([ forms(({ actions, values }) => ({ signup: { defaults: { role_at_organization: '' } as AcceptInvitePayloadInterface, - errors: ({ password, first_name }) => ({ - password: !password ? 'Please enter your password to continue' : values.validatedPassword.feedback, + errors: ({ password, first_name, role_at_organization }) => ({ + password: !password + ? 'Please enter your password to continue' + : values.validatedPassword.feedback || undefined, first_name: !first_name ? 'Please enter your name' : undefined, + role_at_organization: !role_at_organization ? 'Please select your role to continue' : undefined, }), submit: async (payload, breakpoint) => { breakpoint() diff --git a/frontend/src/scenes/authentication/login2FALogic.ts b/frontend/src/scenes/authentication/login2FALogic.ts index 6232f6225312a..179462746a3b5 100644 --- a/frontend/src/scenes/authentication/login2FALogic.ts +++ b/frontend/src/scenes/authentication/login2FALogic.ts @@ -45,16 +45,12 @@ export const login2FALogic = kea([ twofactortoken: { defaults: { token: '' } as TwoFactorForm, errors: ({ token }) => ({ - token: !token - ? 'Please enter a token to continue' - : token.length !== 6 || isNaN(parseInt(token)) - ? 'A token must consist of 6 digits' - : null, + token: !token ? 'Please enter a token to continue' : null, }), submit: async ({ token }, breakpoint) => { breakpoint() try { - return await api.create('api/login/token', { token }) + return await api.create('api/login/token', { token }) } catch (e) { const { code, detail } = e as Record actions.setGeneralError(code, detail) diff --git a/frontend/src/scenes/authentication/loginLogic.ts b/frontend/src/scenes/authentication/loginLogic.ts index 19d73edc00158..6ace53926171f 100644 --- a/frontend/src/scenes/authentication/loginLogic.ts +++ b/frontend/src/scenes/authentication/loginLogic.ts @@ -86,7 +86,7 @@ export const loginLogic = kea([ } breakpoint() - const response = await api.create('api/login/precheck', { email }) + const response = await api.create('api/login/precheck', { email }) return { status: 'completed', ...response } }, }, @@ -102,7 +102,7 @@ export const loginLogic = kea([ submit: async ({ email, password }, breakpoint) => { breakpoint() try { - return await api.create('api/login', { email, password }) + return await api.create('api/login', { email, password }) } catch (e) { const { code } = e as Record let { detail } = e as Record diff --git a/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts b/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts index 55ef8dc85375d..b1b15274f4a44 100644 --- a/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts +++ b/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts @@ -31,7 +31,7 @@ export const verifyEmailLogic = kea([ { validateEmailToken: async ({ uuid, token }: { uuid: string; token: string }, breakpoint) => { try { - await api.create(`api/users/${uuid}/verify_email/`, { token, uuid }) + await api.create(`api/users/verify_email/`, { token, uuid }) actions.setView('success') await breakpoint(2000) window.location.href = '/' @@ -48,7 +48,7 @@ export const verifyEmailLogic = kea([ { requestVerificationLink: async ({ uuid }: { uuid: string }) => { try { - await api.create(`api/users/${uuid}/request_email_verification/`, { uuid }) + await api.create(`api/users/request_email_verification/`, { uuid }) lemonToast.success( 'A new verification link has been sent to the associated email address. Please check your inbox.' ) diff --git a/frontend/src/scenes/authentication/setup2FALogic.ts b/frontend/src/scenes/authentication/twoFactorLogic.ts similarity index 58% rename from frontend/src/scenes/authentication/setup2FALogic.ts rename to frontend/src/scenes/authentication/twoFactorLogic.ts index 7ea0f4a498510..37c331b809868 100644 --- a/frontend/src/scenes/authentication/setup2FALogic.ts +++ b/frontend/src/scenes/authentication/twoFactorLogic.ts @@ -4,9 +4,11 @@ import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import api from 'lib/api' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { membersLogic } from 'scenes/organization/membersLogic' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' +import { userLogic } from 'scenes/userLogic' -import type { setup2FALogicType } from './setup2FALogicType' +import type { twoFactorLogicType } from './twoFactorLogicType' export interface TwoFactorForm { token: number | null @@ -18,24 +20,55 @@ export interface TwoFactorStatus { method: string | null } -export interface Setup2FALogicProps { +export interface TwoFactorLogicProps { onSuccess?: () => void } -export const setup2FALogic = kea([ +export const twoFactorLogic = kea([ path(['scenes', 'authentication', 'loginLogic']), - props({} as Setup2FALogicProps), + props({} as TwoFactorLogicProps), connect({ - values: [preflightLogic, ['preflight'], featureFlagLogic, ['featureFlags']], + values: [preflightLogic, ['preflight'], featureFlagLogic, ['featureFlags'], userLogic, ['user']], + actions: [userLogic, ['loadUser'], membersLogic, ['loadAllMembers']], }), actions({ setGeneralError: (code: string, detail: string) => ({ code, detail }), clearGeneralError: true, - setup: true, loadStatus: true, generateBackupCodes: true, + disable2FA: true, + openTwoFactorSetupModal: (forceOpen?: boolean) => ({ forceOpen }), + closeTwoFactorSetupModal: true, + toggleDisable2FAModal: (open: boolean) => ({ open }), + toggleBackupCodesModal: (open: boolean) => ({ open }), }), reducers({ + isTwoFactorSetupModalOpen: [ + false, + { + openTwoFactorSetupModal: () => true, + closeTwoFactorSetupModal: () => false, + }, + ], + forceOpenTwoFactorSetupModal: [ + false, + { + openTwoFactorSetupModal: (_, { forceOpen }) => !!forceOpen, + closeTwoFactorSetupModal: () => false, + }, + ], + isDisable2FAModalOpen: [ + false, + { + toggleDisable2FAModal: (_, { open }) => open, + }, + ], + isBackupCodesModalOpen: [ + false, + { + toggleBackupCodesModal: (_, { open }) => open, + }, + ], generalError: [ null as { code: string; detail: string } | null, { @@ -67,9 +100,9 @@ export const setup2FALogic = kea([ startSetup: [ {}, { - setup: async (_, breakpoint) => { + openTwoFactorSetupModal: async (_, breakpoint) => { breakpoint() - await api.get('api/users/@me/start_2fa_setup/') + await api.get('api/users/@me/two_factor_start_setup/') return { status: 'completed' } }, }, @@ -86,21 +119,7 @@ export const setup2FALogic = kea([ null as { backup_codes: string[] } | null, { generateBackupCodes: async () => { - return await api.create('api/users/@me/two_factor_backup_codes/') - }, - }, - ], - disable2FA: [ - false, - { - disable2FA: async () => { - try { - await api.create('api/users/@me/two_factor_disable/') - return true - } catch (e) { - const { code, detail } = e as Record - throw { code, detail } - } + return await api.create('api/users/@me/two_factor_backup_codes/') }, }, ], @@ -114,7 +133,7 @@ export const setup2FALogic = kea([ submit: async ({ token }, breakpoint) => { breakpoint() try { - return await api.create('api/users/@me/validate_2fa/', { token }) + return await api.create('api/users/@me/two_factor_validate/', { token }) } catch (e) { const { code, detail } = e as Record actions.setGeneralError(code, detail) @@ -129,16 +148,35 @@ export const setup2FALogic = kea([ actions.loadStatus() props.onSuccess?.() }, - disable2FASuccess: () => { - lemonToast.success('2FA disabled successfully') + disable2FA: async () => { + try { + await api.create('api/users/@me/two_factor_disable/') + lemonToast.success('2FA disabled successfully') + actions.loadStatus() + + // Refresh user and members + actions.loadUser() + actions.loadAllMembers() + } catch (e) { + const { code, detail } = e as Record + actions.setGeneralError(code, detail) + throw e + } }, generateBackupCodesSuccess: () => { lemonToast.success('Backup codes generated successfully') }, + closeTwoFactorSetupModal: () => { + // Clear the form when closing the modal + actions.resetToken() + }, })), - afterMount(({ actions }) => { - actions.setup() + afterMount(({ actions, values }) => { actions.loadStatus() + + if (values.user && values.user.organization?.enforce_2fa && !values.user.is_2fa_enabled) { + actions.openTwoFactorSetupModal(true) + } }), ]) diff --git a/frontend/src/scenes/billing/billingLogic.tsx b/frontend/src/scenes/billing/billingLogic.tsx index ac78f13424b72..d6f0fa86493a1 100644 --- a/frontend/src/scenes/billing/billingLogic.tsx +++ b/frontend/src/scenes/billing/billingLogic.tsx @@ -4,6 +4,7 @@ import { FieldNamePath, forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' import api, { getJSONOrNull } from 'lib/api' +import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { LemonBannerAction } from 'lib/lemon-ui/LemonBanner/LemonBanner' import { lemonBannerLogic } from 'lib/lemon-ui/LemonBanner/lemonBannerLogic' @@ -211,7 +212,13 @@ export const billingLogic = kea([ null as BillingType | null, { loadBilling: async () => { - const response = await api.get('api/billing') + // Note: this is a temporary flag to skip forecasting in the billing page + // for customers running into performance issues until we have a more permanent fix + // of splitting the billing and forecasting data. + const skipForecasting = values.featureFlags[FEATURE_FLAGS.BILLING_SKIP_FORECASTING] + const response = await api.get( + 'api/billing' + (skipForecasting ? '?include_forecasting=false' : '') + ) return parseBillingResponse(response) }, diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx index 03757f853741f..e694c94ea1ad0 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx +++ b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx @@ -1,36 +1,46 @@ +import { NodeKind } from '~/queries/schema' + import { applyTemplate } from './newDashboardLogic' describe('template function in newDashboardLogic', () => { it('ignores unused variables', () => { expect( - applyTemplate({ a: 'hello', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: 'hello', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: 'hello', b: 'hi' }) }) it('uses identified variables', () => { expect( - applyTemplate({ a: '{VARIABLE_1}', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: '{VARIABLE_1}', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: { event: '$pageview', @@ -38,4 +48,85 @@ describe('template function in newDashboardLogic', () => { b: 'hi', }) }) + + it('replaces variables in query based tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.TrendsQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + math: 'total', + }, + }) + }) + + it("removes the math property from query based tiles that don't support it", () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.LifecycleQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + }, + }) + }) + + it('removes the math property from retention insight tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + math: 'dau' as any, + type: 'events' as any, + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.RetentionQuery + ) + ).toEqual({ + a: { + id: '$pageview', + type: 'events', + }, + }) + }) }) diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.ts b/frontend/src/scenes/dashboard/newDashboardLogic.ts index 6749067872258..564a24f736c1f 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.ts +++ b/frontend/src/scenes/dashboard/newDashboardLogic.ts @@ -5,11 +5,15 @@ import api from 'lib/api' import { DashboardRestrictionLevel } from 'lib/constants' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { dashboardsModel } from '~/models/dashboardsModel' +import { legacyEntityToNode, sanitizeRetentionEntity } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { getQueryBasedDashboard } from '~/queries/nodes/InsightViz/utils' +import { NodeKind } from '~/queries/schema' +import { isInsightVizNode } from '~/queries/utils' import { DashboardTemplateType, DashboardTemplateVariableType, DashboardTile, DashboardType, JsonType } from '~/types' import type { newDashboardLogicType } from './newDashboardLogicType' @@ -35,24 +39,47 @@ export interface NewDashboardLogicProps { } // Currently this is a very generic recursive function incase we want to add template variables to aspects beyond events -export function applyTemplate(obj: DashboardTile | JsonType, variables: DashboardTemplateVariableType[]): JsonType { +export function applyTemplate( + obj: DashboardTile | JsonType, + variables: DashboardTemplateVariableType[], + queryKind: NodeKind | null +): JsonType { if (typeof obj === 'string') { if (obj.startsWith('{') && obj.endsWith('}')) { const variableId = obj.substring(1, obj.length - 1) const variable = variables.find((variable) => variable.id === variableId) if (variable && variable.default) { + // added for future compatibility - at the moment we only have event variables + const isEventVariable = variable.type === 'event' + + if (queryKind && isEventVariable) { + let mathAvailability = MathAvailability.None + if (queryKind === NodeKind.TrendsQuery) { + mathAvailability = MathAvailability.All + } else if (queryKind === NodeKind.StickinessQuery) { + mathAvailability = MathAvailability.ActorsOnly + } else if (queryKind === NodeKind.FunnelsQuery) { + mathAvailability = MathAvailability.FunnelsOnly + } + return ( + queryKind === NodeKind.RetentionQuery + ? sanitizeRetentionEntity(variable.default as any) + : legacyEntityToNode(variable.default as any, true, mathAvailability) + ) as JsonType + } + return variable.default as JsonType } return obj } } if (Array.isArray(obj)) { - return obj.map((item) => applyTemplate(item, variables)) + return obj.map((item) => applyTemplate(item, variables, queryKind)) } if (typeof obj === 'object' && obj !== null) { const newObject: JsonType = {} for (const [key, value] of Object.entries(obj)) { - newObject[key] = applyTemplate(value, variables) + newObject[key] = applyTemplate(value, variables, queryKind) } return newObject } @@ -60,7 +87,15 @@ export function applyTemplate(obj: DashboardTile | JsonType, variables: Dashboar } function makeTilesUsingVariables(tiles: DashboardTile[], variables: DashboardTemplateVariableType[]): JsonType[] { - return tiles.map((tile: DashboardTile) => applyTemplate(tile, variables)) + return tiles.map((tile: DashboardTile) => { + const isQueryBased = 'query' in tile && tile.query != null + const queryKind: NodeKind | null = isQueryBased + ? isInsightVizNode(tile.query as any) + ? (tile.query as any)?.source.kind + : (tile.query as any)?.kind + : null + return applyTemplate(tile, variables, queryKind) + }) } export const newDashboardLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx index 3004b8ee60daf..e888ee9abb21d 100644 --- a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx +++ b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx @@ -5,7 +5,6 @@ import { LemonButton, LemonCheckbox, LemonDivider, - LemonDropdown, LemonInput, LemonModal, LemonSelect, @@ -14,12 +13,12 @@ import { import { useActions, useValues } from 'kea' import { Field, Form } from 'kea-forms' import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { HogQLEditor } from 'lib/components/HogQLEditor/HogQLEditor' +import { HogQLDropdown } from 'lib/components/HogQLDropdown/HogQLDropdown' import { IconSwapHoriz } from 'lib/lemon-ui/icons' import { useState } from 'react' import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' -import { DatabaseSchemaField, NodeKind } from '~/queries/schema' +import { DatabaseSchemaField } from '~/queries/schema' export function ViewLinkModal(): JSX.Element { const { isJoinTableModalOpen } = useValues(viewLinkLogic) @@ -122,6 +121,7 @@ export function ViewLinkForm(): JSX.Element { /> {sourceIsUsingHogQLExpression && ( {joiningIsUsingHogQLExpression && ( void -}): JSX.Element => { - const [isHogQLDropdownVisible, setIsHogQLDropdownVisible] = useState(false) - - return ( -
    - setIsHogQLDropdownVisible(false)} - overlay={ - // eslint-disable-next-line react/forbid-dom-props -
    - { - onHogQLValueChange(currentValue) - setIsHogQLDropdownVisible(false) - }} - /> -
    - } - > - setIsHogQLDropdownVisible(!isHogQLDropdownVisible)} - > - {hogQLValue} - -
    -
    - ) -} - interface KeyLabelProps { column: DatabaseSchemaField } diff --git a/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx b/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx index 3576303ebddd9..edf174a7cf216 100644 --- a/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx +++ b/frontend/src/scenes/data-warehouse/editor/EditorScene.tsx @@ -8,6 +8,7 @@ import { useRef } from 'react' import { Sidebar } from '~/layout/navigation-3000/components/Sidebar' import { navigation3000Logic } from '~/layout/navigation-3000/navigationLogic' +import { ViewLinkModal } from '../ViewLinkModal' import { editorSceneLogic } from './editorSceneLogic' import { editorSizingLogic } from './editorSizingLogic' import { QueryWindow } from './QueryWindow' @@ -47,6 +48,7 @@ export function EditorScene(): JSX.Element { )}
    + ) } diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx index 988323bd02093..2fd8adf883adb 100644 --- a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx +++ b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx @@ -1,95 +1,52 @@ import 'react-data-grid/lib/styles.css' import { IconGear } from '@posthog/icons' -import { LemonButton, LemonTabs, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonTabs } from '@posthog/lemon-ui' import clsx from 'clsx' -import { BindLogic, useActions, useValues } from 'kea' -import { router } from 'kea-router' +import { useActions, useValues } from 'kea' import { AnimationType } from 'lib/animations/animations' import { Animation } from 'lib/components/Animation/Animation' import { ExportButton } from 'lib/components/ExportButton/ExportButton' import { useMemo } from 'react' import DataGrid from 'react-data-grid' -import { InsightErrorState } from 'scenes/insights/EmptyStates' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' -import { insightLogic } from 'scenes/insights/insightLogic' +import { InsightErrorState, StatelessInsightLoadingState } from 'scenes/insights/EmptyStates' import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { themeLogic } from '~/layout/navigation-3000/themeLogic' -import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' +import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' +import { ElapsedTime } from '~/queries/nodes/DataNode/ElapsedTime' import { LineGraph } from '~/queries/nodes/DataVisualization/Components/Charts/LineGraph' import { SideBar } from '~/queries/nodes/DataVisualization/Components/SideBar' import { Table } from '~/queries/nodes/DataVisualization/Components/Table' import { TableDisplay } from '~/queries/nodes/DataVisualization/Components/TableDisplay' -import { variableModalLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variableModalLogic' +import { AddVariableButton } from '~/queries/nodes/DataVisualization/Components/Variables/AddVariableButton' import { VariablesForInsight } from '~/queries/nodes/DataVisualization/Components/Variables/Variables' import { variablesLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variablesLogic' import { DataTableVisualizationProps } from '~/queries/nodes/DataVisualization/DataVisualization' -import { - dataVisualizationLogic, - DataVisualizationLogicProps, -} from '~/queries/nodes/DataVisualization/dataVisualizationLogic' -import { displayLogic } from '~/queries/nodes/DataVisualization/displayLogic' -import { DataVisualizationNode, HogQLQueryResponse, NodeKind } from '~/queries/schema' -import { ChartDisplayType, ExporterFormat, ItemMode } from '~/types' +import { dataVisualizationLogic } from '~/queries/nodes/DataVisualization/dataVisualizationLogic' +import { HogQLQueryResponse } from '~/queries/schema' +import { ChartDisplayType, ExporterFormat } from '~/types' -import { DATAWAREHOUSE_EDITOR_ITEM_ID } from '../external/dataWarehouseExternalSceneLogic' import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' import { multitabEditorLogic } from './multitabEditorLogic' import { outputPaneLogic, OutputTab } from './outputPaneLogic' +import { InfoTab } from './OutputPaneTabs/InfoTab' -interface OutputPaneProps { - onSave: () => void - saveDisabledReason?: string - onQueryInputChange: () => void - logicKey: string - query: string -} - -export function OutputPane({ - onQueryInputChange, - onSave, - saveDisabledReason, - logicKey, - query, -}: OutputPaneProps): JSX.Element { +export function OutputPane(): JSX.Element { const { activeTab } = useValues(outputPaneLogic) const { setActiveTab } = useActions(outputPaneLogic) + const { variablesForInsight } = useValues(variablesLogic) - const codeEditorKey = `hogQLQueryEditor/${router.values.location.pathname}` - - const { editingView, queryInput } = useValues( - multitabEditorLogic({ - key: codeEditorKey, - }) - ) + const { editingView, sourceQuery, exportContext, isValidView, error, editorKey } = useValues(multitabEditorLogic) + const { saveAsInsight, saveAsView, setSourceQuery, runQuery } = useActions(multitabEditorLogic) const { isDarkModeOn } = useValues(themeLogic) - const { response, responseLoading } = useValues( - dataNodeLogic({ - key: logicKey, - query: { - kind: NodeKind.HogQLQuery, - query, - }, - doNotLoad: !query, - }) - ) + const { response, responseLoading, responseError, queryId, pollResponse } = useValues(dataNodeLogic) const { dataWarehouseSavedQueriesLoading } = useValues(dataWarehouseViewsLogic) const { updateDataWarehouseSavedQuery } = useActions(dataWarehouseViewsLogic) + const { visualizationType, queryCancelled } = useValues(dataVisualizationLogic) - const { insightProps } = useValues( - insightLogic({ - dashboardItemId: DATAWAREHOUSE_EDITOR_ITEM_ID, - cachedInsight: null, - doNotLoad: true, - }) - ) - const { setQuery } = useActions( - insightDataLogic({ - ...insightProps, - }) - ) + const vizKey = useMemo(() => `SQLEditorScene`, []) const columns = useMemo(() => { return ( @@ -114,50 +71,13 @@ export function OutputPane({ }) }, [response]) - const Content = (): JSX.Element | null => { - if (activeTab === OutputTab.Results) { - return responseLoading ? ( - - ) : !response ? ( - Query results will appear here - ) : ( -
    - -
    - ) - } - - if (activeTab === OutputTab.Visualization) { - return !response ? ( -
    - Query results will visualized here -
    - ) : ( -
    - -
    - ) - } - - return null - } - return (
    + {variablesForInsight.length > 0 && ( +
    + +
    + )}
    + + + {exportContext && ( + + )} + {editingView ? ( <> updateDataWarehouseSavedQuery({ id: editingView.id, - query: { - kind: NodeKind.HogQLQuery, - query: queryInput, - }, + query: sourceQuery.source, types: response?.types ?? [], }) } @@ -194,82 +137,55 @@ export function OutputPane({ ) : ( - onSave()} disabledReason={saveDisabledReason}> + saveAsView()} + disabledReason={isValidView ? '' : 'Some fields may need an alias'} + > Save as view )} - onQueryInputChange()}> + runQuery()} + > Run
    -
    - +
    + +
    +
    +
    ) } -function DataTableVisualizationContent({ - query, - setQuery, - activeTab, -}: { - query: DataVisualizationNode - setQuery: (query: DataVisualizationNode) => void - activeTab: OutputTab -}): JSX.Element { - const vizKey = `SQLEditorScene.${activeTab}` - const dataVisualizationLogicProps: DataVisualizationLogicProps = { - key: vizKey, - query, - dashboardId: undefined, - dataNodeCollectionId: vizKey, - insightMode: ItemMode.Edit, - loadPriority: undefined, - setQuery, - cachedResults: undefined, - variablesOverride: undefined, - } - - const dataNodeLogicProps: DataNodeLogicProps = { - query: query.source, - key: vizKey, - cachedResults: undefined, - loadPriority: undefined, - dataNodeCollectionId: vizKey, - variablesOverride: undefined, - } - - return ( - - - - - - - - - - - - ) -} - -function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX.Element { - const logic = insightLogic({ - dashboardItemId: DATAWAREHOUSE_EDITOR_ITEM_ID, - cachedInsight: null, - }) - const { saveAs } = useActions(logic) - +function InternalDataTableVisualization( + props: DataTableVisualizationProps & { onSaveInsight: () => void } +): JSX.Element { const { query, visualizationType, @@ -277,8 +193,6 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX showResultControls, response, responseLoading, - responseError, - queryCancelled, isChartSettingsPanelOpen, } = useValues(dataVisualizationLogic) @@ -322,29 +236,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX
    )} -
    - {visualizationType !== ChartDisplayType.ActionsTable && responseError ? ( -
    - -
    - ) : ( - component - )} -
    +
    {component}
    {showResultControls && ( <> @@ -361,27 +253,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX tooltip="Visualization settings" /> - {props.exportContext && ( - - )} - - saveAs(true, false)}> + props.onSaveInsight()}> Create insight
    @@ -389,9 +261,114 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX
    )} - -
    ) } + +const ErrorState = ({ responseError, sourceQuery, queryCancelled, response }: any): JSX.Element | null => { + return ( +
    + +
    + ) +} + +const Content = ({ + activeTab, + responseError, + responseLoading, + response, + sourceQuery, + queryCancelled, + columns, + rows, + isDarkModeOn, + vizKey, + setSourceQuery, + exportContext, + saveAsInsight, + queryId, + pollResponse, + editorKey, +}: any): JSX.Element | null => { + if (activeTab === OutputTab.Results) { + if (responseError) { + return ( + + ) + } + + return responseLoading ? ( + + ) : !response ? ( +
    + Query results will appear here +
    + ) : ( +
    + +
    + ) + } + + if (activeTab === OutputTab.Visualization) { + if (responseError) { + return ( + + ) + } + + return !response ? ( +
    + Query results will be visualized here +
    + ) : ( +
    + +
    + ) + } + + if (activeTab === OutputTab.Info) { + return ( +
    + +
    + ) + } + + return null +} diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx new file mode 100644 index 0000000000000..1c3bbe26558cc --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx @@ -0,0 +1,111 @@ +import { LemonButton, LemonTag, Tooltip } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { LemonTable } from 'lib/lemon-ui/LemonTable' +import { humanFriendlyDetailedTime } from 'lib/utils' + +import { multitabEditorLogic } from '../multitabEditorLogic' +import { infoTabLogic } from './infoTabLogic' + +interface InfoTabProps { + codeEditorKey: string +} + +export function InfoTab({ codeEditorKey }: InfoTabProps): JSX.Element { + const { sourceTableItems } = useValues(infoTabLogic({ codeEditorKey: codeEditorKey })) + const { editingView, isEditingMaterializedView } = useValues(multitabEditorLogic) + const { runDataWarehouseSavedQuery } = useActions(multitabEditorLogic) + + return ( +
    +
    +
    +

    Materialization

    + BETA +
    +
    + {isEditingMaterializedView ? ( +
    + {editingView?.last_run_at ? ( + `Last run at ${humanFriendlyDetailedTime(editingView.last_run_at)}` + ) : ( +
    + Materialization scheduled +
    + )} + editingView && runDataWarehouseSavedQuery(editingView.id)} + className="mt-2" + type="secondary" + > + Run now + +
    + ) : ( +
    +

    + Materialized views are a way to pre-compute data in your data warehouse. This allows you + to run queries faster and more efficiently. +

    + editingView && runDataWarehouseSavedQuery(editingView.id)} + type="primary" + disabledReason={editingView ? undefined : 'You must save the view first'} + > + Materialize + +
    + )} +
    +
    +
    +

    Dependencies

    +

    + Dependencies are tables that this query uses. See when a source or materialized table was last run. +

    +
    + name, + }, + { + key: 'Type', + title: 'Type', + render: (_, { type }) => type, + }, + { + key: 'Status', + title: 'Status', + render: (_, { type, status }) => { + if (type === 'source') { + return ( + + N/A + + ) + } + return status + }, + }, + { + key: 'Last run at', + title: 'Last run at', + render: (_, { type, last_run_at }) => { + if (type === 'source') { + return ( + + N/A + + ) + } + return humanFriendlyDetailedTime(last_run_at) + }, + }, + ]} + dataSource={sourceTableItems} + /> +
    + ) +} diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts new file mode 100644 index 0000000000000..4510e80db6693 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts @@ -0,0 +1,63 @@ +import { connect, kea, key, path, props, selectors } from 'kea' +import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' +import { dataWarehouseViewsLogic } from 'scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic' + +import { multitabEditorLogic } from '../multitabEditorLogic' +import type { infoTabLogicType } from './infoTabLogicType' + +export interface InfoTableRow { + name: string + type: 'source' | 'table' + status?: string + last_run_at?: string +} + +export interface InfoTabLogicProps { + codeEditorKey: string +} + +export const infoTabLogic = kea([ + path(['data-warehouse', 'editor', 'outputPaneTabs', 'infoTabLogic']), + props({} as InfoTabLogicProps), + key((props) => props.codeEditorKey), + connect((props: InfoTabLogicProps) => ({ + values: [ + multitabEditorLogic({ key: props.codeEditorKey }), + ['metadata'], + databaseTableListLogic, + ['posthogTablesMap', 'dataWarehouseTablesMap'], + dataWarehouseViewsLogic, + ['dataWarehouseSavedQueryMap'], + ], + })), + selectors({ + sourceTableItems: [ + (s) => [s.metadata, s.dataWarehouseSavedQueryMap], + (metadata, dataWarehouseSavedQueryMap) => { + if (!metadata) { + return [] + } + return ( + metadata.table_names?.map((table_name) => { + const view = dataWarehouseSavedQueryMap[table_name] + if (view) { + return { + name: table_name, + type: 'table', + status: view.status, + last_run_at: view.last_run_at || 'never', + } + } + + return { + name: table_name, + type: 'source', + status: undefined, + last_run_at: undefined, + } + }) || [] + ) + }, + ], + }), +]) diff --git a/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx b/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx index 10e36c436e739..502084bee80e6 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryPane.tsx @@ -3,12 +3,15 @@ import { Resizer } from 'lib/components/Resizer/Resizer' import { CodeEditor, CodeEditorProps } from 'lib/monaco/CodeEditor' import { AutoSizer } from 'react-virtualized/dist/es/AutoSizer' +import { HogQLQuery } from '~/queries/schema' + import { editorSizingLogic } from './editorSizingLogic' interface QueryPaneProps { queryInput: string promptError: string | null codeEditorProps: Partial + sourceQuery: HogQLQuery } export function QueryPane(props: QueryPaneProps): JSX.Element { @@ -31,6 +34,7 @@ export function QueryPane(props: QueryPaneProps): JSX.Element { className="border" language="hogQL" value={props.queryInput} + sourceQuery={props.sourceQuery} height={height} width={width} {...props.codeEditorProps} diff --git a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx index 35a41c0f402b7..85c9d80ef6270 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx @@ -14,7 +14,7 @@ interface QueryTabsProps { export function QueryTabs({ models, onClear, onClick, onAdd, activeModelUri }: QueryTabsProps): JSX.Element { return ( -
    +
    {models.map((model: QueryTab) => ( - +
    +
    + +
    {editingView && (
    - Editing view "{editingView.name}" + + Editing {editingView.status ? 'materialized view' : 'view'} "{editingView.name}" +
    )} { setQueryInput(v ?? '') }, @@ -66,17 +76,70 @@ export function QueryWindow(): JSX.Element { runQuery() } }, + onError: (error, isValidView) => { + setError(error) + setIsValidView(isValidView) + }, + onMetadata: (metadata) => { + setMetadata(metadata) + }, }} /> - + + +
    ) } + +function InternalQueryWindow(): JSX.Element | null { + const { cacheLoading, sourceQuery, queryInput } = useValues(multitabEditorLogic) + const { setSourceQuery } = useActions(multitabEditorLogic) + + // NOTE: hacky way to avoid flicker loading + if (cacheLoading) { + return null + } + + const dataVisualizationLogicProps: DataVisualizationLogicProps = { + key: dataNodeKey, + query: sourceQuery, + dashboardId: undefined, + dataNodeCollectionId: dataNodeKey, + insightMode: ItemMode.Edit, + loadPriority: undefined, + cachedResults: undefined, + variablesOverride: undefined, + setQuery: setSourceQuery, + } + + const dataNodeLogicProps: DataNodeLogicProps = { + query: sourceQuery.source, + key: dataNodeKey, + cachedResults: undefined, + loadPriority: undefined, + dataNodeCollectionId: dataNodeKey, + variablesOverride: undefined, + autoLoad: false, + } + + const variablesLogicProps: VariablesLogicProps = { + key: dataVisualizationLogicProps.key, + readOnly: false, + queryInput, + } + + return ( + + + + + + + + + + + + ) +} diff --git a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx similarity index 63% rename from frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts rename to frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx index c7128d138beea..c45ea5559fb5a 100644 --- a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts +++ b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx @@ -1,9 +1,9 @@ +import { Tooltip } from '@posthog/lemon-ui' import Fuse from 'fuse.js' import { connect, kea, path, selectors } from 'kea' import { router } from 'kea-router' import { subscriptions } from 'kea-subscriptions' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { IconCalculate, IconClipboardEdit } from 'lib/lemon-ui/icons' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' @@ -42,20 +42,6 @@ const savedQueriesfuse = new Fuse([], { includeMatches: true, }) -const nonMaterializedViewsfuse = new Fuse([], { - keys: [{ name: 'name', weight: 2 }], - threshold: 0.3, - ignoreLocation: true, - includeMatches: true, -}) - -const materializedViewsfuse = new Fuse([], { - keys: [{ name: 'name', weight: 2 }], - threshold: 0.3, - ignoreLocation: true, - includeMatches: true, -}) - export const editorSidebarLogic = kea([ path(['data-warehouse', 'editor', 'editorSidebarLogic']), connect({ @@ -66,8 +52,6 @@ export const editorSidebarLogic = kea([ ['dataWarehouseSavedQueries', 'dataWarehouseSavedQueryMapById', 'dataWarehouseSavedQueriesLoading'], databaseTableListLogic, ['posthogTables', 'dataWarehouseTables', 'databaseLoading', 'views', 'viewsMapById'], - featureFlagLogic, - ['featureFlags'], ], actions: [ editorSceneLogic, @@ -86,19 +70,13 @@ export const editorSidebarLogic = kea([ s.relevantPosthogTables, s.relevantDataWarehouseTables, s.databaseLoading, - s.relevantNonMaterializedViews, - s.relevantMaterializedViews, - s.featureFlags, ], ( relevantSavedQueries, dataWarehouseSavedQueriesLoading, relevantPosthogTables, relevantDataWarehouseTables, - databaseLoading, - relevantNonMaterializedViews, - relevantMaterializedViews, - featureFlags + databaseLoading ) => [ { key: 'data-warehouse-sources', @@ -163,13 +141,19 @@ export const editorSidebarLogic = kea([ key: 'data-warehouse-views', noun: ['view', 'views'], loading: dataWarehouseSavedQueriesLoading, - items: (featureFlags[FEATURE_FLAGS.DATA_MODELING] - ? relevantNonMaterializedViews - : relevantSavedQueries - ).map(([savedQuery, matches]) => ({ + items: relevantSavedQueries.map(([savedQuery, matches]) => ({ key: savedQuery.id, name: savedQuery.name, url: '', + icon: savedQuery.status ? ( + + + + ) : ( + + + + ), searchMatch: matches ? { matchingFields: matches.map((match) => match.key), @@ -185,7 +169,7 @@ export const editorSidebarLogic = kea([ onClick: () => { multitabEditorLogic({ key: `hogQLQueryEditor/${router.values.location.pathname}`, - }).actions.createTab(savedQuery.query.query, savedQuery) + }).actions.editView(savedQuery.query.query, savedQuery) }, }, { @@ -195,16 +179,6 @@ export const editorSidebarLogic = kea([ actions.toggleJoinTableModal() }, }, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && !savedQuery.status - ? [ - { - label: 'Materialize', - onClick: () => { - actions.runDataWarehouseSavedQuery(savedQuery.id) - }, - }, - ] - : []), { label: 'Delete', status: 'danger', @@ -215,63 +189,6 @@ export const editorSidebarLogic = kea([ ], })), } as SidebarCategory, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] - ? [ - { - key: 'data-warehouse-materialized-views', - noun: ['materialized view', 'materialized views'], - loading: dataWarehouseSavedQueriesLoading, - items: relevantMaterializedViews.map(([materializedView, matches]) => ({ - key: materializedView.id, - name: materializedView.name, - url: '', - searchMatch: matches - ? { - matchingFields: matches.map((match) => match.key), - nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, - } - : null, - onClick: () => { - actions.selectSchema(materializedView) - }, - menuItems: [ - { - label: 'Edit view definition', - onClick: () => { - multitabEditorLogic({ - key: `hogQLQueryEditor/${router.values.location.pathname}`, - }).actions.createTab(materializedView.query.query, materializedView) - }, - }, - { - label: 'Add join', - onClick: () => { - actions.selectSourceTable(materializedView.name) - actions.toggleJoinTableModal() - }, - }, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && materializedView.status - ? [ - { - label: 'Run', - onClick: () => { - actions.runDataWarehouseSavedQuery(materializedView.id) - }, - }, - ] - : []), - { - label: 'Delete', - status: 'danger', - onClick: () => { - actions.deleteDataWarehouseSavedQuery(materializedView.id) - }, - }, - ], - })), - }, - ] - : []), ], ], nonMaterializedViews: [ @@ -327,28 +244,6 @@ export const editorSidebarLogic = kea([ return dataWarehouseSavedQueries.map((savedQuery) => [savedQuery, null]) }, ], - relevantNonMaterializedViews: [ - (s) => [s.nonMaterializedViews, navigation3000Logic.selectors.searchTerm], - (nonMaterializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { - if (searchTerm) { - return nonMaterializedViewsfuse - .search(searchTerm) - .map((result) => [result.item, result.matches as FuseSearchMatch[]]) - } - return nonMaterializedViews.map((view) => [view, null]) - }, - ], - relevantMaterializedViews: [ - (s) => [s.materializedViews, navigation3000Logic.selectors.searchTerm], - (materializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { - if (searchTerm) { - return materializedViewsfuse - .search(searchTerm) - .map((result) => [result.item, result.matches as FuseSearchMatch[]]) - } - return materializedViews.map((view) => [view, null]) - }, - ], })), subscriptions({ dataWarehouseTables: (dataWarehouseTables) => { diff --git a/frontend/src/scenes/data-warehouse/editor/editorSizingLogic.tsx b/frontend/src/scenes/data-warehouse/editor/editorSizingLogic.tsx index 25bc04388e5a0..397bad5ff33d5 100644 --- a/frontend/src/scenes/data-warehouse/editor/editorSizingLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/editorSizingLogic.tsx @@ -13,7 +13,7 @@ export interface EditorSizingLogicProps { const MINIMUM_NAVIGATOR_WIDTH = 100 const NAVIGATOR_DEFAULT_WIDTH = 350 const MINIMUM_QUERY_PANE_HEIGHT = 100 -const DEFAULT_QUERY_PANE_HEIGHT = 600 +const DEFAULT_QUERY_PANE_HEIGHT = 400 export const editorSizingLogic = kea([ path(['scenes', 'data-warehouse', 'editor', 'editorSizingLogic']), diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx index c6b9e4ed575c9..94995a446ae2d 100644 --- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx @@ -1,19 +1,32 @@ import { Monaco } from '@monaco-editor/react' import { LemonDialog, LemonInput, lemonToast } from '@posthog/lemon-ui' import { actions, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' +import { router } from 'kea-router' import { subscriptions } from 'kea-subscriptions' import { LemonField } from 'lib/lemon-ui/LemonField' -import { ModelMarker } from 'lib/monaco/codeEditorLogic' -import { editor, MarkerSeverity, Uri } from 'monaco-editor' +import { initModel } from 'lib/monaco/CodeEditor' +import { codeEditorLogic } from 'lib/monaco/codeEditorLogic' +import { editor, Uri } from 'monaco-editor' +import { insightsApi } from 'scenes/insights/utils/api' +import { urls } from 'scenes/urls' import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' -import { performQuery } from '~/queries/query' -import { HogLanguage, HogQLMetadata, HogQLMetadataResponse, HogQLNotice, HogQLQuery, NodeKind } from '~/queries/schema' -import { DataWarehouseSavedQuery } from '~/types' +import { insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz' +import { queryExportContext } from '~/queries/query' +import { HogQLMetadataResponse, HogQLQuery, NodeKind } from '~/queries/schema' +import { DataVisualizationNode } from '~/queries/schema' +import { DataWarehouseSavedQuery, ExportContext } from '~/types' +import { DATAWAREHOUSE_EDITOR_ITEM_ID } from '../external/dataWarehouseExternalSceneLogic' import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' import type { multitabEditorLogicType } from './multitabEditorLogicType' +export const dataNodeKey = insightVizDataNodeKey({ + dashboardItemId: DATAWAREHOUSE_EDITOR_ITEM_ID, + cachedInsight: null, + doNotLoad: true, +}) + export interface MultitabEditorLogicProps { key: string monaco?: Monaco | null @@ -35,13 +48,18 @@ export const multitabEditorLogic = kea([ connect({ actions: [ dataWarehouseViewsLogic, - ['deleteDataWarehouseSavedQuerySuccess', 'createDataWarehouseSavedQuerySuccess'], + [ + 'loadDataWarehouseSavedQueriesSuccess', + 'deleteDataWarehouseSavedQuerySuccess', + 'createDataWarehouseSavedQuerySuccess', + 'runDataWarehouseSavedQuery', + ], ], }), actions({ setQueryInput: (queryInput: string) => ({ queryInput }), updateState: true, - runQuery: (queryOverride?: string) => ({ queryOverride }), + runQuery: (queryOverride?: string, switchTab?: boolean) => ({ queryOverride, switchTab }), setActiveQuery: (query: string) => ({ query }), setTabs: (tabs: QueryTab[]) => ({ tabs }), addTab: (tab: QueryTab) => ({ tab }), @@ -53,8 +71,14 @@ export const multitabEditorLogic = kea([ initialize: true, saveAsView: true, saveAsViewSubmit: (name: string) => ({ name }), - reloadMetadata: true, - setMetadata: (query: string, metadata: HogQLMetadataResponse) => ({ query, metadata }), + saveAsInsight: true, + saveAsInsightSubmit: (name: string) => ({ name }), + setCacheLoading: (loading: boolean) => ({ loading }), + setError: (error: string | null) => ({ error }), + setIsValidView: (isValidView: boolean) => ({ isValidView }), + setSourceQuery: (sourceQuery: DataVisualizationNode) => ({ sourceQuery }), + setMetadata: (metadata: HogQLMetadataResponse) => ({ metadata }), + editView: (query: string, view: DataWarehouseSavedQuery) => ({ query, view }), }), propsChanged(({ actions, props }, oldProps) => { if (!oldProps.monaco && !oldProps.editor && props.monaco && props.editor) { @@ -62,6 +86,24 @@ export const multitabEditorLogic = kea([ } }), reducers(({ props }) => ({ + cacheLoading: [ + true, + { + setCacheLoading: (_, { loading }) => loading, + }, + ], + sourceQuery: [ + { + kind: NodeKind.DataVisualizationNode, + source: { + kind: NodeKind.HogQLQuery, + query: '', + }, + } as DataVisualizationNode, + { + setSourceQuery: (_, { sourceQuery }) => sourceQuery, + }, + ], queryInput: [ '', { @@ -100,57 +142,37 @@ export const multitabEditorLogic = kea([ setTabs: (_, { tabs }) => tabs, }, ], - metadata: [ - null as null | [string, HogQLMetadataResponse], + error: [ + null as string | null, { - setMetadata: (_, { query, metadata }) => [query, metadata], + setError: (_, { error }) => error, }, ], - modelMarkers: [ - [] as ModelMarker[], + isValidView: [ + false, { - setMetadata: (_, { query, metadata }) => { - const model = props.editor?.getModel() - if (!model || !metadata) { - return [] - } - const markers: ModelMarker[] = [] - const metadataResponse = metadata - - function noticeToMarker(error: HogQLNotice, severity: MarkerSeverity): ModelMarker { - const start = model!.getPositionAt(error.start ?? 0) - const end = model!.getPositionAt(error.end ?? query.length) - return { - start: error.start ?? 0, - startLineNumber: start.lineNumber, - startColumn: start.column, - end: error.end ?? query.length, - endLineNumber: end.lineNumber, - endColumn: end.column, - message: error.message ?? 'Unknown error', - severity: severity, - hogQLFix: error.fix, - } - } - - for (const notice of metadataResponse?.errors ?? []) { - markers.push(noticeToMarker(notice, 8 /* MarkerSeverity.Error */)) - } - for (const notice of metadataResponse?.warnings ?? []) { - markers.push(noticeToMarker(notice, 4 /* MarkerSeverity.Warning */)) - } - for (const notice of metadataResponse?.notices ?? []) { - markers.push(noticeToMarker(notice, 1 /* MarkerSeverity.Hint */)) - } - - props.monaco?.editor.setModelMarkers(model, 'hogql', markers) - return markers - }, + setIsValidView: (_, { isValidView }) => isValidView, + }, + ], + metadata: [ + null as HogQLMetadataResponse | null, + { + setMetadata: (_, { metadata }) => metadata, }, ], + editorKey: [props.key], })), listeners(({ values, props, actions, asyncActions }) => ({ + editView: ({ query, view }) => { + const maybeExistingTab = values.allTabs.find((tab) => tab.view?.id === view.id) + if (maybeExistingTab) { + actions.selectTab(maybeExistingTab) + } else { + actions.createTab(query, view) + } + }, createTab: ({ query = '', view }) => { + const mountedCodeEditorLogic = codeEditorLogic.findMounted() let currentModelCount = 1 const allNumbers = values.allTabs.map((tab) => parseInt(tab.uri.path.split('/').pop() || '0')) while (allNumbers.includes(currentModelCount)) { @@ -161,6 +183,11 @@ export const multitabEditorLogic = kea([ const uri = props.monaco.Uri.parse(currentModelCount.toString()) const model = props.monaco.editor.createModel(query, 'hogQL', uri) props.editor?.setModel(model) + + if (mountedCodeEditorLogic) { + initModel(model, mountedCodeEditorLogic) + } + actions.addTab({ uri, view, @@ -218,6 +245,13 @@ export const multitabEditorLogic = kea([ initialize: () => { const allModelQueries = localStorage.getItem(editorModelsStateKey(props.key)) const activeModelUri = localStorage.getItem(activemodelStateKey(props.key)) + const mountedCodeEditorLogic = + codeEditorLogic.findMounted() || + codeEditorLogic({ + key: props.key, + query: values.sourceQuery?.source.query ?? '', + language: 'hogQL', + }) if (allModelQueries) { // clear existing models @@ -237,6 +271,7 @@ export const multitabEditorLogic = kea([ uri, view: model.view, }) + mountedCodeEditorLogic && initModel(newModel, mountedCodeEditorLogic) } }) @@ -272,6 +307,7 @@ export const multitabEditorLogic = kea([ actions.createTab() } } + actions.setCacheLoading(false) }, setQueryInput: () => { actions.updateState() @@ -287,18 +323,24 @@ export const multitabEditorLogic = kea([ }) localStorage.setItem(editorModelsStateKey(props.key), JSON.stringify(queries)) }, - runQuery: ({ queryOverride }) => { - if (values.activeQuery === queryOverride || values.activeQuery === values.queryInput) { - dataNodeLogic({ - key: values.activeTabKey, - query: { - kind: NodeKind.HogQLQuery, - query: queryOverride || values.queryInput, - }, - alwaysRefresh: true, - }).actions.loadData(true) - } - actions.setActiveQuery(queryOverride || values.queryInput) + runQuery: ({ queryOverride, switchTab }) => { + const query = queryOverride || values.queryInput + + actions.setSourceQuery({ + ...values.sourceQuery, + source: { + ...values.sourceQuery.source, + query, + }, + }) + dataNodeLogic({ + key: dataNodeKey, + query: { + ...values.sourceQuery.source, + query, + }, + autoLoad: false, + }).actions.loadData(!switchTab) }, saveAsView: async () => { LemonDialog.openForm({ @@ -319,41 +361,53 @@ export const multitabEditorLogic = kea([ }) }, saveAsViewSubmit: async ({ name }) => { - const query: HogQLQuery = { - kind: NodeKind.HogQLQuery, - query: values.queryInput, - } + const query: HogQLQuery = values.sourceQuery.source const logic = dataNodeLogic({ - key: values.activeTabKey, - query: { - kind: NodeKind.HogQLQuery, - query: values.queryInput, - }, + key: dataNodeKey, + query, }) const types = logic.values.response?.types ?? [] await dataWarehouseViewsLogic.asyncActions.createDataWarehouseSavedQuery({ name, query, types }) }, - reloadMetadata: async (_, breakpoint) => { - const model = props.editor?.getModel() - if (!model || !props.monaco) { - return - } - await breakpoint(300) - const query = values.queryInput - if (query === '') { - return - } - - const response = await performQuery({ - kind: NodeKind.HogQLMetadata, - language: HogLanguage.hogQL, - query: query, + saveAsInsight: async () => { + LemonDialog.openForm({ + title: 'Save as new insight', + initialValues: { + name: '', + }, + content: ( + + + + ), + errors: { + name: (name) => (!name ? 'You must enter a name' : undefined), + }, + onSubmit: async ({ name }) => actions.saveAsInsightSubmit(name), }) - breakpoint() - actions.setMetadata(query, response) + }, + saveAsInsightSubmit: async ({ name }) => { + const insight = await insightsApi.create({ + name, + query: values.sourceQuery, + saved: true, + }) + + lemonToast.info(`You're now viewing ${insight.name || insight.derived_name || name}`) + + router.actions.push(urls.insightView(insight.short_id)) + }, + loadDataWarehouseSavedQueriesSuccess: ({ dataWarehouseSavedQueries }) => { + // keep tab views up to date + const newTabs = values.allTabs.map((tab) => ({ + ...tab, + view: dataWarehouseSavedQueries.find((v) => v.id === tab.view?.id), + })) + actions.setTabs(newTabs) + actions.updateState() }, deleteDataWarehouseSavedQuerySuccess: ({ payload: viewId }) => { const tabToRemove = values.allTabs.find((tab) => tab.view?.id === viewId) @@ -385,35 +439,32 @@ export const multitabEditorLogic = kea([ const _model = props.monaco.editor.getModel(activeModelUri.uri) const val = _model?.getValue() actions.setQueryInput(val ?? '') - actions.runQuery() - dataNodeLogic({ - key: values.activeTabKey, - query: { - kind: NodeKind.HogQLQuery, - query: val ?? '', - }, - doNotLoad: !val, - }).mount() + actions.runQuery(undefined, true) } }, - queryInput: () => { - actions.reloadMetadata() + allTabs: () => { + // keep selected tab up to date + const activeTab = values.allTabs.find((tab) => tab.uri.path === values.activeModelUri?.uri.path) + activeTab && actions.selectTab(activeTab) }, })), selectors({ - activeTabKey: [(s) => [s.activeModelUri], (activeModelUri) => `hogQLQueryEditor/${activeModelUri?.uri.path}`], - isValidView: [(s) => [s.metadata], (metadata) => !!(metadata && metadata[1]?.isValidView)], - hasErrors: [ - (s) => [s.modelMarkers], - (modelMarkers) => !!(modelMarkers ?? []).filter((e) => e.severity === 8 /* MarkerSeverity.Error */).length, + exportContext: [ + (s) => [s.sourceQuery], + (sourceQuery) => { + // TODO: use active tab at some point + const filename = 'export' + + return { + ...queryExportContext(sourceQuery.source, undefined, undefined), + filename, + } as ExportContext + }, ], - error: [ - (s) => [s.hasErrors, s.modelMarkers], - (hasErrors, modelMarkers) => { - const firstError = modelMarkers.find((e) => e.severity === 8 /* MarkerSeverity.Error */) - return hasErrors && firstError - ? `Error on line ${firstError.startLineNumber}, column ${firstError.startColumn}` - : null + isEditingMaterializedView: [ + (s) => [s.editingView], + (editingView) => { + return !!editingView?.status }, ], }), diff --git a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts index 659c79b440635..4e06f611dc49d 100644 --- a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts +++ b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts @@ -5,6 +5,7 @@ import type { outputPaneLogicType } from './outputPaneLogicType' export enum OutputTab { Results = 'results', Visualization = 'visualization', + Info = 'info', } export const outputPaneLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx index a924b0ba594b7..f8e1a5a131205 100644 --- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx @@ -513,18 +513,60 @@ export const SOURCE_DETAILS: Record = { placeholder: 'COMPUTE_WAREHOUSE', }, { - name: 'user', - label: 'User', - type: 'text', - required: true, - placeholder: 'user', - }, - { - name: 'password', - label: 'Password', - type: 'password', + type: 'select', + name: 'auth_type', + label: 'Authentication type', required: true, - placeholder: '', + defaultValue: 'password', + options: [ + { + label: 'Password', + value: 'password', + fields: [ + { + name: 'username', + label: 'Username', + type: 'text', + required: true, + placeholder: 'User1', + }, + { + name: 'password', + label: 'Password', + type: 'password', + required: true, + placeholder: '', + }, + ], + }, + { + label: 'Key pair', + value: 'keypair', + fields: [ + { + name: 'username', + label: 'Username', + type: 'text', + required: true, + placeholder: 'User1', + }, + { + name: 'private_key', + label: 'Private key', + type: 'textarea', + required: true, + placeholder: '', + }, + { + name: 'passphrase', + label: 'Passphrase', + type: 'password', + required: false, + placeholder: '', + }, + ], + }, + ], }, { name: 'role', diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx index d66a0285526ba..ae61570189150 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx @@ -70,8 +70,13 @@ export const dataWarehouseViewsLogic = kea([ actions.loadDatabase() }, runDataWarehouseSavedQuery: async ({ viewId }) => { - await api.dataWarehouseSavedQueries.run(viewId) - actions.loadDataWarehouseSavedQueries() + try { + await api.dataWarehouseSavedQueries.run(viewId) + lemonToast.success('Materialization started') + actions.loadDataWarehouseSavedQueries() + } catch (error) { + lemonToast.error(`Failed to run materialization`) + } }, })), selectors({ @@ -92,6 +97,17 @@ export const dataWarehouseViewsLogic = kea([ ) }, ], + dataWarehouseSavedQueryMap: [ + (s) => [s.dataWarehouseSavedQueries], + (dataWarehouseSavedQueries) => { + return ( + dataWarehouseSavedQueries?.reduce((acc, cur) => { + acc[cur.name] = cur + return acc + }, {} as Record) ?? {} + ) + }, + ], }), events(({ actions, cache }) => ({ afterMount: () => { diff --git a/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx new file mode 100644 index 0000000000000..3f9c7b2a50168 --- /dev/null +++ b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx @@ -0,0 +1,40 @@ +import { LemonButton } from '@posthog/lemon-ui' +import { useActions } from 'kea' +import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' +import { supportLogic } from 'lib/components/Support/supportLogic' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' + +import { ProductKey } from '~/types' + +export const AlphaAccessScenePrompt = ({ children }: { children: React.ReactElement }): JSX.Element => { + const hasErrorTracking = useFeatureFlag('ERROR_TRACKING') + const { openSupportForm } = useActions(supportLogic) + + return hasErrorTracking ? ( + children + ) : ( + + openSupportForm({ + target_area: 'error_tracking', + isEmailFormOpen: true, + message: 'Hi\n\nI would like to request access to the error tracking product', + severity_level: 'low', + }) + } + > + Request access + + } + productKey={ProductKey.ERROR_TRACKING} + /> + ) +} diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx index d381d62640a0f..33c8f2af6779e 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx @@ -8,6 +8,7 @@ import { humanFriendlyDetailedTime } from 'lib/utils' import { useEffect, useState } from 'react' import { SceneExport } from 'scenes/sceneTypes' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { errorTrackingSymbolSetLogic } from './errorTrackingSymbolSetLogic' import { SymbolSetUploadModal } from './SymbolSetUploadModal' @@ -25,19 +26,24 @@ export function ErrorTrackingConfigurationScene(): JSX.Element { }, [loadSymbolSets]) return ( -
    -

    Symbol sets

    -

    - Source maps are required to demangle any minified code in your exception stack traces. PostHog - automatically retrieves source maps where possible. Cases where it was not possible are listed below. - Source maps can be uploaded retroactively but changes will only apply to all future exceptions ingested. -

    - {missingSymbolSets.length > 0 && ( - - )} - {validSymbolSets.length > 0 && } - -
    + +
    +

    Symbol sets

    +

    + Source maps are required to demangle any minified code in your exception stack traces. PostHog + automatically retrieves source maps where possible. Cases where it was not possible are listed + below. Source maps can be uploaded retroactively but changes will only apply to all future + exceptions ingested. +

    + {missingSymbolSets.length > 0 && ( + + )} + {(validSymbolSets.length > 0 || missingSymbolSets.length === 0) && ( + + )} + +
    +
    ) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx index 7965e2563ae22..33549cf177fa2 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx @@ -8,6 +8,7 @@ import { SceneExport } from 'scenes/sceneTypes' import { ErrorTrackingIssue } from '~/queries/schema' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import ErrorTrackingFilters from './ErrorTrackingFilters' import { errorTrackingIssueSceneLogic } from './errorTrackingIssueSceneLogic' @@ -40,47 +41,52 @@ export function ErrorTrackingIssueScene(): JSX.Element { }, []) return ( - <> - - updateIssue({ assignee })} - type="secondary" - showName - /> -
    - updateIssue({ status: 'archived' })}> - Archive - - updateIssue({ status: 'resolved' })}> - Resolve - + + <> + + updateIssue({ assignee })} + type="secondary" + showName + /> +
    + updateIssue({ status: 'archived' })} + > + Archive + + updateIssue({ status: 'resolved' })}> + Resolve + +
    -
    + ) : ( + updateIssue({ status: 'active' })} + tooltip="Mark as active" + > + {STATUS_LABEL[issue.status]} + + ) ) : ( - updateIssue({ status: 'active' })} - tooltip="Mark as active" - > - {STATUS_LABEL[issue.status]} - + false ) - ) : ( - false - ) - } - /> - - - - - - + } + /> + + + + + + + ) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx index 754cb398ec136..d2fb4ffc8ebb7 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx @@ -16,6 +16,7 @@ import { ErrorTrackingIssue } from '~/queries/schema' import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleComponent } from '~/queries/types' import { InsightLogicProps } from '~/types' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import { errorTrackingDataNodeLogic } from './errorTrackingDataNodeLogic' import ErrorTrackingFilters from './ErrorTrackingFilters' @@ -52,14 +53,16 @@ export function ErrorTrackingScene(): JSX.Element { } return ( - -
    - - - - {selectedIssueIds.length === 0 ? : } - - + + +
    + + + + {selectedIssueIds.length === 0 ? : } + + + ) } diff --git a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts index c1a847a8ab647..260e4e4d08ccb 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts @@ -27,7 +27,9 @@ const customOptions: Record = { all: [lastYear, lastMonth, lastDay], } -const DEFAULT_FILTER_GROUP = { +export const DEFAULT_ERROR_TRACKING_DATE_RANGE = { date_from: '-7d', date_to: null } + +export const DEFAULT_ERROR_TRACKING_FILTER_GROUP = { type: FilterLogicalOperator.And, values: [{ type: FilterLogicalOperator.And, values: [] }], } @@ -42,15 +44,15 @@ export const errorTrackingLogic = kea([ actions({ setDateRange: (dateRange: DateRange) => ({ dateRange }), setAssignee: (assignee: number | null) => ({ assignee }), + setSearchQuery: (searchQuery: string) => ({ searchQuery }), setFilterGroup: (filterGroup: UniversalFiltersGroup) => ({ filterGroup }), setFilterTestAccounts: (filterTestAccounts: boolean) => ({ filterTestAccounts }), - setSearchQuery: (searchQuery: string) => ({ searchQuery }), setSparklineSelectedPeriod: (period: string | null) => ({ period }), _setSparklineOptions: (options: SparklineOption[]) => ({ options }), }), reducers({ dateRange: [ - { date_from: '-7d', date_to: null } as DateRange, + DEFAULT_ERROR_TRACKING_DATE_RANGE as DateRange, { persist: true }, { setDateRange: (_, { dateRange }) => dateRange, @@ -64,7 +66,7 @@ export const errorTrackingLogic = kea([ }, ], filterGroup: [ - DEFAULT_FILTER_GROUP as UniversalFiltersGroup, + DEFAULT_ERROR_TRACKING_FILTER_GROUP as UniversalFiltersGroup, { persist: true }, { setFilterGroup: (_, { filterGroup }) => filterGroup, diff --git a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts index bd36ead868256..92dbd6d61dcac 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts @@ -1,9 +1,17 @@ +import equal from 'fast-deep-equal' import { actions, connect, kea, path, reducers, selectors } from 'kea' +import { actionToUrl, router, urlToAction } from 'kea-router' import { subscriptions } from 'kea-subscriptions' +import { objectsEqual } from 'lib/utils' +import { Params } from 'scenes/sceneTypes' import { DataTableNode, ErrorTrackingQuery } from '~/queries/schema' -import { errorTrackingLogic } from './errorTrackingLogic' +import { + DEFAULT_ERROR_TRACKING_DATE_RANGE, + DEFAULT_ERROR_TRACKING_FILTER_GROUP, + errorTrackingLogic, +} from './errorTrackingLogic' import type { errorTrackingSceneLogicType } from './errorTrackingSceneLogicType' import { errorTrackingQuery } from './queries' @@ -23,6 +31,10 @@ export const errorTrackingSceneLogic = kea([ 'hasGroupActions', ], ], + actions: [ + errorTrackingLogic, + ['setAssignee', 'setDateRange', 'setFilterGroup', 'setSearchQuery', 'setFilterTestAccounts'], + ], }), actions({ @@ -86,4 +98,79 @@ export const errorTrackingSceneLogic = kea([ subscriptions(({ actions }) => ({ query: () => actions.setSelectedIssueIds([]), })), + + actionToUrl(({ values }) => { + const buildURL = (): [ + string, + Params, + Record, + { + replace: boolean + } + ] => { + const searchParams: Params = { + orderBy: values.orderBy, + filterTestAccounts: values.filterTestAccounts, + } + + if (values.assignee) { + searchParams.assignee = values.assignee + } + if (values.searchQuery) { + searchParams.searchQuery = values.searchQuery + } + if (!objectsEqual(values.filterGroup, DEFAULT_ERROR_TRACKING_FILTER_GROUP)) { + searchParams.filterGroup = values.filterGroup + } + if (!objectsEqual(values.dateRange, DEFAULT_ERROR_TRACKING_DATE_RANGE)) { + searchParams.dateRange = values.dateRange + } + + if (!objectsEqual(searchParams, router.values.searchParams)) { + return [router.values.location.pathname, searchParams, router.values.hashParams, { replace: true }] + } + + return [ + router.values.location.pathname, + router.values.searchParams, + router.values.hashParams, + { replace: false }, + ] + } + + return { + setOrderBy: () => buildURL(), + setAssignee: () => buildURL(), + setDateRange: () => buildURL(), + setFilterGroup: () => buildURL(), + setSearchQuery: () => buildURL(), + setFilterTestAccounts: () => buildURL(), + } + }), + + urlToAction(({ actions, values }) => { + const urlToAction = (_: any, params: Params): void => { + if (params.orderBy && !equal(params.orderBy, values.orderBy)) { + actions.setOrderBy(params.orderBy) + } + if (params.dateRange && !equal(params.dateRange, values.dateRange)) { + actions.setDateRange(params.dateRange) + } + if (params.filterGroup && !equal(params.filterGroup, values.filterGroup)) { + actions.setFilterGroup(params.filterGroup) + } + if (params.filterTestAccounts && !equal(params.filterTestAccounts, values.filterTestAccounts)) { + actions.setFilterTestAccounts(params.filterTestAccounts) + } + if (params.assignee && !equal(params.assignee, values.assignee)) { + actions.setAssignee(params.assignee) + } + if (params.searchQuery && !equal(params.searchQuery, values.searchQuery)) { + actions.setSearchQuery(params.searchQuery) + } + } + return { + '*': urlToAction, + } + }), ]) diff --git a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx index d5a2abca2306f..8d9aefd7269a9 100644 --- a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx +++ b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx @@ -188,10 +188,10 @@ export function FlutterSnippet({ flagKey, variant }: SnippetProps): JSX.Element <> {`if (${clientSuffix}${flagFunction}('${flagKey}')${variantSuffix}) { - // Do something differently for this user + // Do something differently for this user } else { - // It's a good idea to let control variant always be the default behaviour, - // so if something goes wrong with flag evaluation, you don't break your app. + // It's a good idea to let control variant always be the default behaviour, + // so if something goes wrong with flag evaluation, you don't break your app. } `} diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index d1fd0b140bc4d..125fb2320ddab 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -6,6 +6,7 @@ import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { groupsAccessLogic, GroupsAccessStatus } from 'lib/introductions/groupsAccessLogic' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' @@ -20,6 +21,7 @@ const ExperimentFormFields = (): JSX.Element => { const { addExperimentGroup, removeExperimentGroup, setExperiment, createExperiment, setExperimentType } = useActions(experimentLogic) const { webExperimentsAvailable } = useValues(experimentsLogic) + const { groupsAccessStatus } = useValues(groupsAccessLogic) return (
    @@ -103,37 +105,40 @@ const ExperimentFormFields = (): JSX.Element => { />
    )} -
    -

    Participant type

    -
    - The type on which to aggregate metrics. You can change this at any time during the experiment. -
    - - { - const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined + {groupsAccessStatus === GroupsAccessStatus.AlreadyUsing && ( +
    +

    Participant type

    +
    + The type on which to aggregate metrics. You can change this at any time during the + experiment. +
    + + { + const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined - setExperiment({ - parameters: { - ...experiment.parameters, - aggregation_group_type_index: groupTypeIndex ?? undefined, - }, - }) - }} - options={[ - { value: -1, label: 'Persons' }, - ...Array.from(groupTypes.values()).map((groupType) => ({ - value: groupType.group_type_index, - label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), - })), - ]} - /> -
    + setExperiment({ + parameters: { + ...experiment.parameters, + aggregation_group_type_index: groupTypeIndex ?? undefined, + }, + }) + }} + options={[ + { value: -1, label: 'Persons' }, + ...Array.from(groupTypes.values()).map((groupType) => ({ + value: groupType.group_type_index, + label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), + })), + ]} + /> +
    + )}

    Variants

    Add up to 9 variants to test against your control.
    diff --git a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx index 575eb84c52708..7f4378a7ec5ef 100644 --- a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx @@ -1,28 +1,16 @@ import { IconInfo } from '@posthog/icons' import { Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { InsightEmptyState } from 'scenes/insights/EmptyStates' import { InsightViz } from '~/queries/nodes/InsightViz/InsightViz' -import { queryFromFilters } from '~/queries/nodes/InsightViz/utils' import { CachedExperimentTrendsQueryResponse, InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' -import { - _TrendsExperimentResults, - BaseMathType, - ChartDisplayType, - InsightType, - PropertyFilterType, - PropertyOperator, -} from '~/types' +import { BaseMathType, ChartDisplayType, InsightType, PropertyFilterType, PropertyOperator } from '~/types' import { experimentLogic } from '../experimentLogic' -import { transformResultFilters } from '../utils' export function CumulativeExposuresChart(): JSX.Element { const { experiment, experimentResults, getMetricType } = useValues(experimentLogic) - const { featureFlags } = useValues(featureFlagLogic) const metricIdx = 0 const metricType = getMetricType(metricIdx) @@ -32,99 +20,52 @@ export function CumulativeExposuresChart(): JSX.Element { variants.push(`holdout-${experiment.holdout.id}`) } - let query + let query: InsightVizNode - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - if (metricType === InsightType.TRENDS) { - query = { - kind: NodeKind.InsightVizNode, - source: (experimentResults as CachedExperimentTrendsQueryResponse).exposure_query, - } - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', - }, - }, - } + if (metricType === InsightType.TRENDS) { + query = { + kind: NodeKind.InsightVizNode, + source: (experimentResults as CachedExperimentTrendsQueryResponse)?.exposure_query || { + kind: NodeKind.TrendsQuery, + series: [], + interval: 'day', + }, } } else { - if (metricType === InsightType.TRENDS && experiment.parameters?.custom_exposure_filter) { - const trendResults = experimentResults as _TrendsExperimentResults - const queryFilters = { - ...trendResults.exposure_filters, - display: ChartDisplayType.ActionsLineGraphCumulative, - } as _TrendsExperimentResults['exposure_filters'] - query = queryFromFilters(transformResultFilters(queryFilters)) - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: - metricType === InsightType.TRENDS - ? '$feature_flag_called' - : experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', + query = { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: experiment.start_date, + date_to: experiment.end_date, + }, + interval: 'day', + trendsFilter: { + display: ChartDisplayType.ActionsLineGraphCumulative, + showLegend: false, + smoothingIntervals: 1, + }, + series: [ + { + kind: NodeKind.EventsNode, + event: experiment.filters?.events?.[0]?.name, + math: BaseMathType.UniqueUsers, + properties: [ + { + key: `$feature/${experiment.feature_flag_key}`, + value: variants, + operator: PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ], }, + ], + breakdownFilter: { + breakdown: `$feature/${experiment.feature_flag_key}`, + breakdown_type: 'event', }, - } + }, } } @@ -139,7 +80,7 @@ export function CumulativeExposuresChart(): JSX.Element { {experiment.start_date ? ( ), + ...query, showTable: true, }} setQuery={() => {}} diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx index 95938242c143d..e7797f03de7ba 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx @@ -3,10 +3,10 @@ import { LemonBanner, LemonInput, Link, Tooltip } from '@posthog/lemon-ui' import { BindLogic, useActions, useValues } from 'kea' import { LemonSlider } from 'lib/lemon-ui/LemonSlider' import { humanFriendlyNumber } from 'lib/utils' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightLogic } from 'scenes/insights/insightLogic' import { Query } from '~/queries/Query/Query' +import { ExperimentFunnelsQuery, ExperimentTrendsQuery, NodeKind } from '~/queries/schema' import { ExperimentIdType, InsightType } from '~/types' import { MetricInsightId } from '../constants' @@ -120,7 +120,16 @@ export function DataCollectionCalculator({ experimentId }: ExperimentCalculatorP syncWithUrl: false, }) const { insightProps } = useValues(insightLogicInstance) - const { query } = useValues(insightDataLogic(insightProps)) + let query = null + if (experiment.metrics.length > 0) { + query = { + kind: NodeKind.InsightVizNode, + source: + metricType === InsightType.FUNNELS + ? (experiment.metrics[0] as ExperimentFunnelsQuery).funnels_query + : (experiment.metrics[0] as ExperimentTrendsQuery).count_query, + } + } const funnelConversionRate = conversionMetrics?.totalRate * 100 || 0 diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx index 30d6b93ea8278..5ebf192769a2d 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -64,7 +64,12 @@ export function DistributionModal({ experimentId }: { experimentId: Experiment[' { saveSidebarExperimentFeatureFlag(featureFlag) - updateExperiment({ holdout_id: experiment.holdout_id }) + updateExperiment({ + holdout_id: experiment.holdout_id, + parameters: { + feature_flag_variants: featureFlag?.filters?.multivariate?.variants ?? [], + }, + }) closeDistributionModal() }} type="primary" diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx index 867d82bc83a37..59766e480529f 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -250,6 +250,12 @@ export function Goal(): JSX.Element { const [isModalOpen, setIsModalOpen] = useState(false) const metricType = getMetricType(0) + // :FLAG: CLEAN UP AFTER MIGRATION + const isDataWarehouseMetric = + featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && + metricType === InsightType.TRENDS && + (experiment.metrics[0] as ExperimentTrendsQuery).count_query?.series[0].kind === NodeKind.DataWarehouseNode + return (
    @@ -322,16 +328,18 @@ export function Goal(): JSX.Element { Change goal
    - {metricType === InsightType.TRENDS && !experimentMathAggregationForTrends() && ( - <> - -
    -
    - + {metricType === InsightType.TRENDS && + !experimentMathAggregationForTrends() && + !isDataWarehouseMetric && ( + <> + +
    +
    + +
    -
    - - )} + + )}
    )} = [ { @@ -149,16 +136,18 @@ export function SecondaryMetricsTable({ experimentId }: { experimentId: Experime ), render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { const { variant } = item - return
    {targetResults ? countDataForVariant(targetResults, variant) : '—'}
    + const count = targetResults ? countDataForVariant(targetResults, variant) : null + return
    {count === null ? '—' : humanFriendlyNumber(count)}
    }, }, { title: 'Exposure', render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { const { variant } = item - return ( -
    {targetResults ? exposureCountDataForVariant(targetResults, variant) : '—'}
    - ) + const exposureCount = targetResults + ? exposureCountDataForVariant(targetResults, variant) + : null + return
    {exposureCount === null ? '—' : humanFriendlyNumber(exposureCount)}
    }, }, { @@ -337,7 +326,7 @@ const AddSecondaryMetricButton = ({ metrics: any openEditModal: (metricIdx: number) => void }): JSX.Element => { - const { experiment, featureFlags } = useValues(experimentLogic({ experimentId })) + const { experiment } = useValues(experimentLogic({ experimentId })) const { setExperiment } = useActions(experimentLogic({ experimentId })) return ( { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] - setExperiment({ - metrics_secondary: newMetricsSecondary, - }) - openEditModal(newMetricsSecondary.length - 1) - } else { - const newSecondaryMetrics = [ - ...experiment.secondary_metrics, - { - name: '', - filters: getDefaultFilters(InsightType.FUNNELS, undefined), - }, - ] - setExperiment({ - secondary_metrics: newSecondaryMetrics, - }) - openEditModal(newSecondaryMetrics.length - 1) - } + const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] + setExperiment({ + metrics_secondary: newMetricsSecondary, + }) + openEditModal(newMetricsSecondary.length - 1) }} disabledReason={ metrics.length >= MAX_SECONDARY_METRICS diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx index b859dae72e071..4ba16ded0e86c 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -323,6 +323,13 @@ export function SummaryTable(): JSX.Element { ], }, ] + if (experiment.filters.insight === InsightType.FUNNELS) { + if (experiment.filters?.events?.[0]) { + filters.push(experiment.filters.events[0]) + } else if (experiment.filters?.actions?.[0]) { + filters.push(experiment.filters.actions[0]) + } + } const filterGroup: Partial = { filter_group: { type: FilterLogicalOperator.And, diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index c24376ac7e67c..df8580fee68dd 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -493,44 +493,38 @@ export function PageHeaderCustom(): JSX.Element { )} {experiment && isExperimentRunning && (
    - {!isExperimentStopped && !experiment.archived && ( - <> - - - exposureCohortId ? undefined : createExposureCohort() - } - fullWidth - data-attr={`${ - exposureCohortId ? 'view' : 'create' - }-exposure-cohort`} - to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} - targetBlank={!!exposureCohortId} - > - {exposureCohortId ? 'View' : 'Create'} exposure cohort - - loadExperimentResults(true)} - fullWidth - data-attr="refresh-experiment" - > - Refresh experiment results - - loadSecondaryMetricResults(true)} - fullWidth - data-attr="refresh-secondary-metrics" - > - Refresh secondary metrics - - - } - /> - - - )} + <> + + (exposureCohortId ? undefined : createExposureCohort())} + fullWidth + data-attr={`${exposureCohortId ? 'view' : 'create'}-exposure-cohort`} + to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} + targetBlank={!!exposureCohortId} + > + {exposureCohortId ? 'View' : 'Create'} exposure cohort + + loadExperimentResults(true)} + fullWidth + data-attr="refresh-experiment" + > + Refresh experiment results + + loadSecondaryMetricResults(true)} + fullWidth + data-attr="refresh-secondary-metrics" + > + Refresh secondary metrics + + + } + /> + + {!experiment.end_date && (
    @@ -107,7 +114,7 @@ export function PrimaryGoalFunnels(): JSX.Element { seriesIndicatorType="numeric" sortable={true} showNestedArrow={true} - {...commonActionFilterProps} + {...actionFilterProps} />
    ([ 'reportExperimentReleaseConditionsViewed', 'reportExperimentHoldoutAssigned', ], + teamLogic, + ['addProductIntent'], ], })), actions({ @@ -534,7 +538,10 @@ export const experimentLogic = kea([ }, ...(!draft && { start_date: dayjs() }), }) - response && actions.reportExperimentCreated(response) + if (response) { + actions.reportExperimentCreated(response) + actions.addProductIntent({ product_type: ProductKey.EXPERIMENTS }) + } } } catch (error: any) { lemonToast.error(error.detail || 'Failed to create experiment') @@ -822,6 +829,12 @@ export const experimentLogic = kea([ ...values.experiment.metrics[0], experiment_id: values.experimentId, } + if ( + queryWithExperimentId.kind === NodeKind.ExperimentTrendsQuery && + values.featureFlags[FEATURE_FLAGS.EXPERIMENT_STATS_V2] + ) { + queryWithExperimentId.stats_version = 2 + } const response = await performQuery(queryWithExperimentId, undefined, refresh) diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index f673e27c4b401..d7e2ad01c9133 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -70,6 +70,7 @@ import FeatureFlagProjects from './FeatureFlagProjects' import { FeatureFlagReleaseConditions } from './FeatureFlagReleaseConditions' import FeatureFlagSchedule from './FeatureFlagSchedule' import { featureFlagsLogic, FeatureFlagsTab } from './featureFlagsLogic' +import { FeatureFlagStatusIndicator } from './FeatureFlagStatusIndicator' import { RecentFeatureFlagInsights } from './RecentFeatureFlagInsightsCard' export const scene: SceneExport = { @@ -382,7 +383,8 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
    If your feature flag is applied before identifying the user, use this to ensure that the flag value remains consistent for the same user. - Depending on your setup, this option might not always be suitable.{' '} + Depending on your setup, this option might not always be suitable. This + feature requires creating profiles for anonymous users.{' '} ) : ( - { - LemonDialog.open({ - title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, - description: `This flag will be immediately ${ - newValue === true ? 'rolled out to' : 'rolled back from' - } the users matching the release conditions.`, - primaryButton: { - children: 'Confirm', - type: 'primary', - onClick: () => { - const updatedFlag = { ...featureFlag, active: newValue } - setFeatureFlag(updatedFlag) - saveFeatureFlag(updatedFlag) +
    + { + LemonDialog.open({ + title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, + description: `This flag will be immediately ${ + newValue === true ? 'rolled out to' : 'rolled back from' + } the users matching the release conditions.`, + primaryButton: { + children: 'Confirm', + type: 'primary', + onClick: () => { + const updatedFlag = { ...featureFlag, active: newValue } + setFeatureFlag(updatedFlag) + saveFeatureFlag(updatedFlag) + }, + size: 'small', }, - size: 'small', - }, - secondaryButton: { - children: 'Cancel', - type: 'tertiary', - size: 'small', - }, - }) - }} - label="Enabled" - disabledReason={ - !featureFlag.can_edit - ? "You only have view access to this feature flag. To make changes, contact the flag's creator." - : null - } - checked={featureFlag.active} - /> + secondaryButton: { + children: 'Cancel', + type: 'tertiary', + size: 'small', + }, + }) + }} + label="Enabled" + disabledReason={ + !featureFlag.can_edit + ? "You only have view access to this feature flag. To make changes, contact the flag's creator." + : null + } + checked={featureFlag.active} + /> + +
    )}
    diff --git a/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx index 0c5bc5df8edff..2878e8887e4c8 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx @@ -29,6 +29,7 @@ const REGULAR_FEATURE_FLAG: FeatureFlagType = { rollback_conditions: [], performed_rollback: false, can_edit: true, + user_access_level: 'editor', tags: [], surveys: [], } diff --git a/frontend/src/scenes/feature-flags/FeatureFlagSnippets.tsx b/frontend/src/scenes/feature-flags/FeatureFlagSnippets.tsx index dd8ebd131cefb..384283704aa07 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagSnippets.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagSnippets.tsx @@ -365,7 +365,7 @@ export function FlutterSnippet({ flagKey, multivariant, payload }: FeatureFlagSn return ( {`if (${clientSuffix}${flagFunction}('${flagKey}')${variantSuffix}) { - // do something + // do something } `} diff --git a/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx new file mode 100644 index 0000000000000..7b2ee5fc7fa14 --- /dev/null +++ b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx @@ -0,0 +1,45 @@ +import { LemonTag } from 'lib/lemon-ui/LemonTag' +import { Tooltip } from 'lib/lemon-ui/Tooltip' + +import { FeatureFlagStatus, FeatureFlagStatusResponse } from '~/types' + +export function FeatureFlagStatusIndicator({ + flagStatus, +}: { + flagStatus: FeatureFlagStatusResponse | null +}): JSX.Element | null { + if ( + !flagStatus || + [ + FeatureFlagStatus.ACTIVE, + FeatureFlagStatus.INACTIVE, + FeatureFlagStatus.DELETED, + FeatureFlagStatus.UNKNOWN, + ].includes(flagStatus.status) + ) { + return null + } + + return ( + +
    {flagStatus.reason}
    +
    + {flagStatus.status === FeatureFlagStatus.STALE && + 'Make sure to remove any references to this flag in your code before deleting it.'} + {flagStatus.status === FeatureFlagStatus.INACTIVE && + 'It is probably not being used in your code, but be sure to remove any references to this flag before deleting it.'} +
    + + } + placement="right" + > + + + {flagStatus.status} + + +
    + ) +} diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx index acf32b9788ed5..b929e2d203f19 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx @@ -9,6 +9,7 @@ import { mswDecorator } from '~/mocks/browser' import featureFlags from './__mocks__/feature_flags.json' const meta: Meta = { + tags: ['ff'], title: 'Scenes-App/Feature Flags', parameters: { layout: 'fullscreen', @@ -33,6 +34,13 @@ const meta: Meta = { 200, featureFlags.results.find((r) => r.id === Number(req.params['flagId'])), ], + '/api/projects/:team_id/feature_flags/:flagId/status': () => [ + 200, + { + status: 'active', + reason: 'Feature flag is active', + }, + ], }, post: { '/api/environments/:team_id/query': {}, diff --git a/frontend/src/scenes/feature-flags/FeatureManagement.tsx b/frontend/src/scenes/feature-flags/FeatureManagement.tsx new file mode 100644 index 0000000000000..d2d67c7286886 --- /dev/null +++ b/frontend/src/scenes/feature-flags/FeatureManagement.tsx @@ -0,0 +1,38 @@ +import { useActions, useValues } from 'kea' +import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { SceneExport } from 'scenes/sceneTypes' + +import { FeatureManagementDetail } from './FeatureManagementDetail' +import { featureManagementLogic } from './featureManagementLogic' + +export const scene: SceneExport = { + component: FeatureManagement, + logic: featureManagementLogic, +} + +export function FeatureManagement(): JSX.Element { + const { activeFeatureId, features } = useValues(featureManagementLogic) + const { setActiveFeatureId } = useActions(featureManagementLogic) + + return ( +
    +
      + {features.results.map((feature) => ( +
    • + setActiveFeatureId(feature.id)} + size="small" + fullWidth + active={activeFeatureId === feature.id} + > + {feature.name} + +
    • + ))} +
    +
    + +
    +
    + ) +} diff --git a/frontend/src/scenes/feature-flags/FeatureManagementDetail.tsx b/frontend/src/scenes/feature-flags/FeatureManagementDetail.tsx new file mode 100644 index 0000000000000..73c5776094983 --- /dev/null +++ b/frontend/src/scenes/feature-flags/FeatureManagementDetail.tsx @@ -0,0 +1,86 @@ +import { LemonSkeleton } from '@posthog/lemon-ui' +import { useValues } from 'kea' + +import { featureManagementDetailLogic } from './featureManagementDetailLogic' + +function Metadata(): JSX.Element { + return ( +
    +

    Metadata

    + + + +
    + ) +} + +function Rollout(): JSX.Element { + return ( +
    +

    Rollout

    + + + +
    + ) +} + +function Usage(): JSX.Element { + return ( +
    +

    Usage

    + + + +
    + ) +} + +function Activity(): JSX.Element { + return ( +
    +

    Activity

    + + + +
    + ) +} + +function History(): JSX.Element { + return ( +
    +

    History

    + + + +
    + ) +} + +function Permissions(): JSX.Element { + return ( +
    +

    Permissions

    + + + +
    + ) +} + +export function FeatureManagementDetail(): JSX.Element { + const { activeFeature } = useValues(featureManagementDetailLogic) + + return ( +
    +
    {activeFeature?.name}
    + + + + + + +
    + ) +} diff --git a/frontend/src/scenes/feature-flags/activityDescriptions.tsx b/frontend/src/scenes/feature-flags/activityDescriptions.tsx index 93fec0692b0c3..a85f73cde21ad 100644 --- a/frontend/src/scenes/feature-flags/activityDescriptions.tsx +++ b/frontend/src/scenes/feature-flags/activityDescriptions.tsx @@ -252,6 +252,7 @@ const featureFlagActionsMapping: Record< analytics_dashboards: () => null, has_enriched_analytics: () => null, surveys: () => null, + user_access_level: () => null, } export function flagActivityDescriber(logItem: ActivityLogItem, asNotification?: boolean): HumanizedChange { diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 48889df0f3d63..978348e795149 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -33,6 +33,7 @@ import { EarlyAccessFeatureType, FeatureFlagGroupType, FeatureFlagRollbackConditions, + FeatureFlagStatusResponse, FeatureFlagType, FilterLogicalOperator, FilterType, @@ -43,6 +44,7 @@ import { MultivariateFlagVariant, NewEarlyAccessFeatureType, OrganizationFeatureFlag, + ProductKey, PropertyFilterType, PropertyOperator, QueryBasedInsightModel, @@ -96,6 +98,7 @@ const NEW_FLAG: FeatureFlagType = { surveys: null, performed_rollback: false, can_edit: true, + user_access_level: 'editor', tags: [], } const NEW_VARIANT = { @@ -268,6 +271,8 @@ export const featureFlagLogic = kea([ ['updateFlag', 'deleteFlag'], sidePanelStateLogic, ['closeSidePanel'], + teamLogic, + ['addProductIntent'], ], })), actions({ @@ -561,6 +566,7 @@ export const featureFlagLogic = kea([ if (values.roleBasedAccessEnabled && savedFlag.id) { featureFlagPermissionsLogic({ flagId: null })?.actions.addAssociatedRoles(savedFlag.id) } + actions.addProductIntent({ product_type: ProductKey.FEATURE_FLAGS }) } else { savedFlag = await api.update( `api/projects/${values.currentProjectId}/feature_flags/${updatedFlag.id}`, @@ -750,6 +756,18 @@ export const featureFlagLogic = kea([ } }, }, + flagStatus: [ + null as FeatureFlagStatusResponse | null, + { + loadFeatureFlagStatus: () => { + const { currentTeamId } = values + if (currentTeamId && props.id && props.id !== 'new' && props.id !== 'link') { + return api.featureFlags.getStatus(currentTeamId, props.id) + } + return null + }, + }, + ], })), listeners(({ actions, values, props }) => ({ submitNewDashboardSuccessWithResult: async ({ result }) => { @@ -1035,8 +1053,10 @@ export const featureFlagLogic = kea([ actions.setFeatureFlag(formatPayloadsWithFlag) actions.loadRelatedInsights() actions.loadAllInsightsForFlag() + actions.loadFeatureFlagStatus() } else if (props.id !== 'new') { actions.loadFeatureFlag() + actions.loadFeatureFlagStatus() } }), ]) diff --git a/frontend/src/scenes/feature-flags/featureManagementDetailLogic.ts b/frontend/src/scenes/feature-flags/featureManagementDetailLogic.ts new file mode 100644 index 0000000000000..7389893c32860 --- /dev/null +++ b/frontend/src/scenes/feature-flags/featureManagementDetailLogic.ts @@ -0,0 +1,13 @@ +import { connect, kea, path, props } from 'kea' +import { teamLogic } from 'scenes/teamLogic' + +import type { featureManagementDetailLogicType } from './featureManagementDetailLogicType' +import { featureManagementLogic } from './featureManagementLogic' + +export const featureManagementDetailLogic = kea([ + props({}), + path(['scenes', 'features', 'featureManagementDetailLogic']), + connect({ + values: [teamLogic, ['currentTeamId'], featureManagementLogic, ['activeFeatureId', 'activeFeature']], + }), +]) diff --git a/frontend/src/scenes/feature-flags/featureManagementLogic.ts b/frontend/src/scenes/feature-flags/featureManagementLogic.ts new file mode 100644 index 0000000000000..b6de6f8b79bcf --- /dev/null +++ b/frontend/src/scenes/feature-flags/featureManagementLogic.ts @@ -0,0 +1,101 @@ +import { actions, afterMount, connect, kea, listeners, path, props, reducers, selectors } from 'kea' +import { loaders } from 'kea-loaders' +import { actionToUrl, urlToAction } from 'kea-router' +import api from 'lib/api' +import { Scene } from 'scenes/sceneTypes' +import { teamLogic } from 'scenes/teamLogic' +import { urls } from 'scenes/urls' + +import { Breadcrumb, Feature } from '~/types' + +import type { featureManagementLogicType } from './featureManagementLogicType' + +export interface FeatureManagementLogicProps { + id?: Feature['id'] +} +export interface FeaturesResult { + results: Feature[] + count: number + next?: string | null + previous?: string | null +} + +export const featureManagementLogic = kea([ + props({} as FeatureManagementLogicProps), + path(['scenes', 'features', 'featureManagementLogic']), + connect({ + values: [teamLogic, ['currentTeamId']], + }), + actions({ + setActiveFeatureId: (activeFeatureId: Feature['id']) => ({ activeFeatureId }), + }), + reducers({ + activeFeatureId: [ + null as Feature['id'] | null, + { + setActiveFeatureId: (_, { activeFeatureId }) => activeFeatureId, + }, + ], + }), + loaders(({ values }) => ({ + features: [ + { results: [], count: 0, offset: 0 } as FeaturesResult, + { + loadFeatures: async () => { + const response = await api.get(`api/projects/${values.currentTeamId}/features`) + return response.data as FeaturesResult + }, + }, + ], + })), + selectors({ + activeFeature: [ + (s) => [s.activeFeatureId, s.features], + (activeFeatureId, features) => features.results.find((feature) => feature.id === activeFeatureId) || null, + ], + breadcrumbs: [ + (s) => [s.activeFeatureId, s.activeFeature], + (activeFeatureId, activeFeature): Breadcrumb[] => { + const breadcrumbs: Breadcrumb[] = [ + { + key: Scene.FeatureManagement, + name: 'Features', + path: urls.featureManagement(), + }, + ] + + if (activeFeatureId) { + breadcrumbs.push({ + key: [Scene.FeatureManagement, activeFeatureId], + name: activeFeature?.name ?? 'Feature', + path: urls.featureManagement(String(activeFeatureId)), + }) + } + + return breadcrumbs + }, + ], + }), + listeners(({ actions, values }) => ({ + loadFeaturesSuccess: ({ features }) => { + if (values.activeFeatureId === null && features.results.length > 0) { + actions.setActiveFeatureId(features.results[0].id) + } + }, + })), + actionToUrl({ + setActiveFeatureId: ({ activeFeatureId }) => { + return urls.featureManagement(activeFeatureId) + }, + }), + urlToAction(({ actions, values }) => ({ + '/features/:id': ({ id }) => { + if (id && String(values.activeFeatureId) !== id) { + actions.setActiveFeatureId(Number(id)) + } + }, + })), + afterMount(({ actions }) => { + actions.loadFeatures() + }), +]) diff --git a/frontend/src/scenes/funnels/FunnelHistogram.tsx b/frontend/src/scenes/funnels/FunnelHistogram.tsx index 1709b0eb514b4..39a157b7b878e 100644 --- a/frontend/src/scenes/funnels/FunnelHistogram.tsx +++ b/frontend/src/scenes/funnels/FunnelHistogram.tsx @@ -29,6 +29,7 @@ export function FunnelHistogram(): JSX.Element | null {
    ([ if ( // TODO: Ideally we don't check filters anymore, but tests are still using this insightData?.filters?.insight !== InsightType.FUNNELS && + querySource && querySource?.kind !== NodeKind.FunnelsQuery ) { return [] @@ -275,6 +276,7 @@ export const funnelDataLogic = kea([ if ( // TODO: Ideally we don't check filters anymore, but tests are still using this insightData?.filters?.insight !== InsightType.FUNNELS && + querySource && querySource?.kind !== NodeKind.FunnelsQuery ) { return false diff --git a/frontend/src/scenes/groups/Group.tsx b/frontend/src/scenes/groups/Group.tsx index ccd14aec76ce2..0aaf07f9a9cde 100644 --- a/frontend/src/scenes/groups/Group.tsx +++ b/frontend/src/scenes/groups/Group.tsx @@ -6,11 +6,13 @@ import { PageHeader } from 'lib/components/PageHeader' import { PropertiesTable } from 'lib/components/PropertiesTable' import { TZLabel } from 'lib/components/TZLabel' import { isEventFilter } from 'lib/components/UniversalFilters/utils' +import { FEATURE_FLAGS } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { lemonToast } from 'lib/lemon-ui/LemonToast' import { Link } from 'lib/lemon-ui/Link' import { Spinner, SpinnerOverlay } from 'lib/lemon-ui/Spinner/Spinner' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { GroupDashboard } from 'scenes/groups/GroupDashboard' import { groupLogic, GroupLogicProps } from 'scenes/groups/groupLogic' import { RelatedGroups } from 'scenes/groups/RelatedGroups' @@ -86,11 +88,14 @@ export function Group(): JSX.Element { const { groupKey, groupTypeIndex } = logicProps const { setGroupEventsQuery } = useActions(groupLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) if (!groupData || !groupType) { return groupDataLoading ? : } + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' + return ( <> - Session recordings are currently disabled for this project. To use this - feature, please go to your{' '} + Session recordings are currently disabled for this {settingLevel}. To use + this feature, please go to your{' '} project settings{' '} and enable it. diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss index ddae1edbb3113..de2ede18a2fb8 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss @@ -38,7 +38,9 @@ } h2 { + width: 100%; text-align: center; + word-wrap: break-word; } ol { diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 06f0928dc54f3..da29f417bf1f3 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -31,7 +31,7 @@ import { urls } from 'scenes/urls' import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { seriesToActionsAndEvents } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery, Node } from '~/queries/schema' +import { FunnelsQuery, Node, QueryStatus } from '~/queries/schema' import { FilterType, InsightLogicProps, SavedInsightsTabs } from '~/types' import { samplingFilterLogic } from '../EditorFilters/samplingFilterLogic' @@ -82,25 +82,22 @@ function humanFileSize(size: number): string { return (+(size / Math.pow(1024, i))).toFixed(2) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i] } -export function InsightLoadingState({ +export function StatelessInsightLoadingState({ queryId, - insightProps, + pollResponse, + suggestion, }: { queryId?: string | null - insightProps: InsightLogicProps + pollResponse?: Record | null + suggestion?: JSX.Element }): JSX.Element { - const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) - const { insightPollResponse } = useValues(insightDataLogic(insightProps)) - - const { currentTeam } = useValues(teamLogic) - const [rowsRead, setRowsRead] = useState(0) const [bytesRead, setBytesRead] = useState(0) const [secondsElapsed, setSecondsElapsed] = useState(0) useEffect(() => { - const status = insightPollResponse?.status?.query_progress - const previousStatus = insightPollResponse?.previousStatus?.query_progress + const status = pollResponse?.status?.query_progress + const previousStatus = pollResponse?.previousStatus?.query_progress setRowsRead(previousStatus?.rows_read || 0) setBytesRead(previousStatus?.bytes_read || 0) const interval = setInterval(() => { @@ -113,21 +110,21 @@ export function InsightLoadingState({ return Math.min(bytesRead + diff / 30, status?.bytes_read || 0) }) setSecondsElapsed(() => { - return dayjs().diff(dayjs(insightPollResponse?.status?.start_time), 'milliseconds') + return dayjs().diff(dayjs(pollResponse?.status?.start_time), 'milliseconds') }) }, 100) return () => clearInterval(interval) - }, [insightPollResponse]) + }, [pollResponse]) + const bytesPerSecond = (bytesRead / (secondsElapsed || 1)) * 1000 - const estimatedRows = insightPollResponse?.status?.query_progress?.estimated_rows_total + const estimatedRows = pollResponse?.status?.query_progress?.estimated_rows_total const cpuUtilization = - (insightPollResponse?.status?.query_progress?.active_cpu_time || 0) / - (insightPollResponse?.status?.query_progress?.time_elapsed || 1) / + (pollResponse?.status?.query_progress?.active_cpu_time || 0) / + (pollResponse?.status?.query_progress?.time_elapsed || 1) / 10000 - currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' return (
    @@ -148,37 +145,14 @@ export function InsightLoadingState({ )}

    -
    - {currentTeam?.modifiers?.personsOnEventsMode === 'person_id_override_properties_joined' ? ( - <> - -

    - You can speed this query up by changing the{' '} - person properties mode setting. -

    - - ) : ( - <> - -

    - {suggestedSamplingPercentage && !samplingPercentage ? ( - - Need to speed things up? Try reducing the date range, removing breakdowns, or - turning on . - - ) : suggestedSamplingPercentage && samplingPercentage ? ( - <> - Still waiting around? You must have lots of data! Kick it up a notch with{' '} - . Or try reducing the date range and - removing breakdowns. - - ) : ( - <>Need to speed things up? Try reducing the date range or removing breakdowns. - )} -

    - - )} -
    + {suggestion ? ( + suggestion + ) : ( +
    + +

    Need to speed things up? Try reducing the date range.

    +
    + )} {queryId ? (
    Query ID: {queryId} @@ -189,6 +163,66 @@ export function InsightLoadingState({ ) } +export function InsightLoadingState({ + queryId, + insightProps, +}: { + queryId?: string | null + insightProps: InsightLogicProps +}): JSX.Element { + const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) + const { insightPollResponse } = useValues(insightDataLogic(insightProps)) + const { currentTeam } = useValues(teamLogic) + + const personsOnEventsMode = + currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' + + return ( +
    + + {personsOnEventsMode === 'person_id_override_properties_joined' ? ( + <> + +

    + You can speed this query up by changing the{' '} + person properties mode{' '} + setting. +

    + + ) : ( + <> + +

    + {suggestedSamplingPercentage && !samplingPercentage ? ( + + Need to speed things up? Try reducing the date range, removing breakdowns, + or turning on . + + ) : suggestedSamplingPercentage && samplingPercentage ? ( + <> + Still waiting around? You must have lots of data! Kick it up a notch with{' '} + . Or try reducing the date range + and removing breakdowns. + + ) : ( + <> + Need to speed things up? Try reducing the date range or removing breakdowns. + + )} +

    + + )} +
    + } + /> +
    + ) +} + export function InsightTimeoutState({ queryId }: { queryId?: string | null }): JSX.Element { const { openSupportForm } = useActions(supportLogic) diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index aec4a1eb32ed8..14b0b4cbd393d 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -100,6 +100,7 @@ export const insightVizDataLogic = kea([ updateDisplay: (display: ChartDisplayType | undefined) => ({ display }), updateHiddenLegendIndexes: (hiddenLegendIndexes: number[] | undefined) => ({ hiddenLegendIndexes }), setTimedOutQueryId: (id: string | null) => ({ id }), + setIsIntervalManuallySet: (isIntervalManuallySet: boolean) => ({ isIntervalManuallySet }), }), reducers({ @@ -109,6 +110,18 @@ export const insightVizDataLogic = kea([ setTimedOutQueryId: (_, { id }) => id, }, ], + + // Whether the interval has been manually set by the user. If true, prevents auto-adjusting the interval when date range changes. Reference: https://github.com/PostHog/posthog/issues/22785 + isIntervalManuallySet: [ + false, + { + updateQuerySource: (state, { querySource }) => { + // If interval is explicitly included in the update, mark it as manually set + return 'interval' in querySource ? true : state + }, + setIsIntervalManuallySet: (_, { isIntervalManuallySet }) => isIntervalManuallySet, + }, + ], }), selectors({ @@ -332,7 +345,7 @@ export const insightVizDataLogic = kea([ // We use 512 for query timeouts // Async queries put the error message on data.error_message, while synchronous ones use detail return insightDataError?.status === 400 || insightDataError?.status === 512 - ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking + ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking : null }, ], @@ -401,7 +414,11 @@ export const insightVizDataLogic = kea([ ...values.query, source: { ...values.querySource, - ...handleQuerySourceUpdateSideEffects(querySource, values.querySource as InsightQueryNode), + ...handleQuerySourceUpdateSideEffects( + querySource, + values.querySource as InsightQueryNode, + values.isIntervalManuallySet + ), }, } as Node) }, @@ -487,7 +504,8 @@ const getActiveUsersMath = ( const handleQuerySourceUpdateSideEffects = ( update: QuerySourceUpdate, - currentState: InsightQueryNode + currentState: InsightQueryNode, + isIntervalManuallySet: boolean ): QuerySourceUpdate => { const mergedUpdate = { ...update } as InsightQueryNode @@ -536,7 +554,8 @@ const handleQuerySourceUpdateSideEffects = ( update.dateRange && update.dateRange.date_from && (update.dateRange.date_from !== currentState.dateRange?.date_from || - update.dateRange.date_to !== currentState.dateRange?.date_to) + update.dateRange.date_to !== currentState.dateRange?.date_to) && + !isIntervalManuallySet // Only auto-adjust interval if not manually set ) { const { date_from, date_to } = { ...currentState.dateRange, ...update.dateRange } diff --git a/frontend/src/scenes/instance/SystemStatus/staffUsersLogic.ts b/frontend/src/scenes/instance/SystemStatus/staffUsersLogic.ts index 51054ab2fa8ac..3ac34a80e733b 100644 --- a/frontend/src/scenes/instance/SystemStatus/staffUsersLogic.ts +++ b/frontend/src/scenes/instance/SystemStatus/staffUsersLogic.ts @@ -33,8 +33,7 @@ export const staffUsersLogic = kea([ actions.setStaffUsersToBeAdded([]) const newStaffUsers = await Promise.all( staffUsersToBeAdded.map( - async (userUuid) => - (await api.update(`api/users/${userUuid}`, { is_staff: true })) as UserType + async (userUuid) => await api.update(`api/users/${userUuid}`, { is_staff: true }) ) ) const updatedAllUsers: UserType[] = [ @@ -45,7 +44,7 @@ export const staffUsersLogic = kea([ return updatedAllUsers }, deleteStaffUser: async ({ userUuid }) => { - await api.update(`api/users/${userUuid}`, { is_staff: false }) + await api.update(`api/users/${userUuid}`, { is_staff: false }) if (values.user?.uuid === userUuid) { actions.loadUser() // Loads the main user object to properly reflect staff user changes router.actions.push(urls.projectHomepage()) diff --git a/frontend/src/scenes/max/Intro.tsx b/frontend/src/scenes/max/Intro.tsx index c43cd86b53d2a..97f4f9fbfdc56 100644 --- a/frontend/src/scenes/max/Intro.tsx +++ b/frontend/src/scenes/max/Intro.tsx @@ -3,6 +3,7 @@ import { LemonButton, Popover } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { HedgehogBuddy } from 'lib/components/HedgehogBuddy/HedgehogBuddy' import { hedgehogBuddyLogic } from 'lib/components/HedgehogBuddy/hedgehogBuddyLogic' +import { uuid } from 'lib/utils' import { useMemo, useState } from 'react' import { maxGlobalLogic } from './maxGlobalLogic' @@ -19,13 +20,13 @@ export function Intro(): JSX.Element { const { hedgehogConfig } = useValues(hedgehogBuddyLogic) const { acceptDataProcessing } = useActions(maxGlobalLogic) const { dataProcessingAccepted } = useValues(maxGlobalLogic) - const { sessionId } = useValues(maxLogic) + const { conversation } = useValues(maxLogic) const [hedgehogDirection, setHedgehogDirection] = useState<'left' | 'right'>('right') const headline = useMemo(() => { - return HEADLINES[parseInt(sessionId.split('-').at(-1) as string, 16) % HEADLINES.length] - }, []) + return HEADLINES[parseInt((conversation?.id || uuid()).split('-').at(-1) as string, 16) % HEADLINES.length] + }, [conversation?.id]) return ( <> diff --git a/frontend/src/scenes/max/Max.stories.tsx b/frontend/src/scenes/max/Max.stories.tsx index bec5a519de8e0..51dc03ab0cb5c 100644 --- a/frontend/src/scenes/max/Max.stories.tsx +++ b/frontend/src/scenes/max/Max.stories.tsx @@ -6,7 +6,13 @@ import { projectLogic } from 'scenes/projectLogic' import { mswDecorator, useStorybookMocks } from '~/mocks/browser' -import { chatResponseChunk, failureChunk, generationFailureChunk } from './__mocks__/chatResponse.mocks' +import { + chatResponseChunk, + CONVERSATION_ID, + failureChunk, + generationFailureChunk, + humanMessage, +} from './__mocks__/chatResponse.mocks' import { MaxInstance } from './Max' import { maxGlobalLogic } from './maxGlobalLogic' import { maxLogic } from './maxLogic' @@ -16,7 +22,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - '/api/environments/:team_id/query/chat/': (_, res, ctx) => res(ctx.text(chatResponseChunk)), + '/api/environments/:team_id/conversations/': (_, res, ctx) => res(ctx.text(chatResponseChunk)), }, }), ], @@ -28,10 +34,7 @@ const meta: Meta = { } export default meta -// The session ID is hard-coded here, as it's used for randomizing the welcome headline -const SESSION_ID = 'b1b4b3b4-1b3b-4b3b-1b3b4b3b4b3b' - -const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element => { +const Template = ({ conversationId: CONVERSATION_ID }: { conversationId: string }): JSX.Element => { const { acceptDataProcessing } = useActions(maxGlobalLogic) useEffect(() => { @@ -40,7 +43,7 @@ const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element return (
    - +
    @@ -69,7 +72,7 @@ export const Welcome: StoryFn = () => { acceptDataProcessing(false) }, []) - return