diff --git a/.eslintrc.js b/.eslintrc.js index b66539acec105..59dae5ce57ff3 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -50,6 +50,7 @@ module.exports = { 'posthog', 'simple-import-sort', 'import', + "unused-imports" ], rules: { // PyCharm always adds curly braces, I guess vscode doesn't, PR reviewers often complain they are present on props that don't need them @@ -73,6 +74,7 @@ module.exports = { html: true, }, ], + "unused-imports/no-unused-imports": "error", 'no-unused-vars': 'off', '@typescript-eslint/no-unused-vars': [ 'error', @@ -290,6 +292,7 @@ module.exports = { ], }, ], + 'no-else-return': 'warn' }, overrides: [ { diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index 3dc9ef416a095..3092caba27f82 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -125,7 +125,7 @@ runs: touch frontend/dist/index.html touch frontend/dist/layout.html touch frontend/dist/exporter.html - [ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" | brotli --decompress --output=./share/GeoLite2-City.mmdb ) + [ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) - name: Wait for Clickhouse & Kafka shell: bash diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index bd49796749177..8da19852c721b 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -197,22 +197,33 @@ jobs: sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl - - name: Install python dependencies - run: | - uv pip install --system -r requirements.txt -r requirements-dev.txt + # First running migrations from master, to simulate the real-world scenario - - uses: actions/checkout@v3 + - name: Checkout master + uses: actions/checkout@v3 with: ref: master - - name: Run migrations up to master + - name: Install python dependencies for master run: | - # We need to ensure we have requirements for the master branch - # now also, so we can run migrations up to master. uv pip install --system -r requirements.txt -r requirements-dev.txt + + - name: Run migrations up to master + run: | python manage.py migrate - - uses: actions/checkout@v3 + # Now we can consider this PR's migrations + + - name: Checkout this PR + uses: actions/checkout@v3 + + - name: Install python dependencies for this PR + run: | + uv pip install --system -r requirements.txt -r requirements-dev.txt + + - name: Run migrations for this PR + run: | + python manage.py migrate - name: Check migrations run: | @@ -406,6 +417,7 @@ jobs: echo running_time_run_id=${run_id} >> $GITHUB_ENV echo running_time_run_started_at=${run_started_at} >> $GITHUB_ENV - name: Capture running time to PostHog + if: github.repository == 'PostHog/posthog' uses: PostHog/posthog-github-action@v0.1 with: posthog-token: ${{secrets.POSTHOG_API_TOKEN}} diff --git a/.github/workflows/ci-e2e.yml b/.github/workflows/ci-e2e.yml index 4f4489437ec62..5a50db9f5c981 100644 --- a/.github/workflows/ci-e2e.yml +++ b/.github/workflows/ci-e2e.yml @@ -305,6 +305,7 @@ jobs: echo running_time_run_id=${run_id} >> $GITHUB_ENV echo running_time_run_started_at=${run_started_at} >> $GITHUB_ENV - name: Capture running time to PostHog + if: github.repository == 'PostHog/posthog' uses: PostHog/posthog-github-action@v0.1 with: posthog-token: ${{secrets.POSTHOG_API_TOKEN}} diff --git a/.github/workflows/customer-data-pipeline.yml b/.github/workflows/customer-data-pipeline.yml deleted file mode 100644 index 07bbb6d4346b5..0000000000000 --- a/.github/workflows/customer-data-pipeline.yml +++ /dev/null @@ -1,154 +0,0 @@ -# -# Build and test the Docker image for the CDP service found in the cdp/ -# directory. -# -# This job is triggered by pushes to the master branch and by pull requests that -# touch the cdp/ directory. -# -# Once built we run the functional tests against the running image. - -name: CDP CI - -on: - push: - branches: - - master - paths: - - cdp/** - - .github/workflows/customer-data-pipeline.yml - pull_request: - branches: - - master - paths: - - cdp/** - - .github/workflows/customer-data-pipeline.yml - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: docker/setup-buildx-action@v2 - - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Lowercase GITHUB_REPOSITORY - id: lowercase - run: | - echo "repository=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT" - - - uses: docker/metadata-action@v5 - id: meta - with: - images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp - - # Make the image tags used for docker cache. We use this rather than - # ${{ github.repository }} directly because the repository - # organization name is has upper case characters, which are not - # allowed in docker image names. - - uses: docker/metadata-action@v5 - id: meta-cache - with: - images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp - tags: | - type=raw,value=cache - - - uses: docker/build-push-action@v4 - with: - context: cdp - file: cdp/Dockerfile - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=registry,ref=${{ steps.meta-cache.outputs.tags }} - cache-to: type=registry,ref=${{ steps.meta-cache.outputs.tags }},mode=max - - # Output the image tags so that we can use them in the next job. - outputs: - tags: ${{ steps.meta.outputs.tags }} - - test: - # Run the functional tests against the CDP service. We pull the image - # from GHCR and run it locally. We need only the db service from the - # main docker-compose.yml file, so we use the --services flag to only - # start that service. - runs-on: ubuntu-latest - needs: build - steps: - - uses: actions/checkout@v3 - - uses: docker/setup-buildx-action@v2 - - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Install pnpm - uses: pnpm/action-setup@v2 - with: - version: 8.x.x - - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: '18' - cache: 'pnpm' - cache-dependency-path: cdp/pnpm-lock.yaml - - - name: Install test dependencies - working-directory: cdp - run: | - pnpm install --frozen-lockfile - - - name: Start CDP - working-directory: cdp - run: | - mkdir -p /tmp/logs - - docker compose -f ../docker-compose.dev.yml up -d db >> /tmp/logs/db.txt - - # Wait for the db service to be ready, up to 30 seconds. - SECONDS=0 - until docker compose -f ../docker-compose.dev.yml exec -T db pg_isready; do - if [ $SECONDS -gt 30 ]; then - echo "Timed out waiting for db service to be ready." - exit 1 - fi - sleep 1 - done - - # Create a shell alias for the docker image we just built, using the tags output. - export SECRET_KEY=$(openssl rand -hex 32) - CDP_RUN="docker run -e SECRET_KEY=$SECRET_KEY -e DATABASE_URL=postgres://posthog:posthog@localhost:5432/posthog --rm --network=host ${{ needs.build.outputs.tags }}" - - # Run the migrations. - $CDP_RUN sqlx migrate run - - # Start the CDP service. - $CDP_RUN &> /tmp/logs/cdp.txt & - - # Run the functional tests. - pnpm jest - - - name: Lowercase GITHUB_REPOSITORY - id: lowercase - run: | - echo "repository=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT" - - - name: Generate docker latest tag - if: github.ref == 'refs/heads/master' - uses: docker/metadata-action@v5 - id: meta - with: - images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp - tags: | - type=raw,value=latest - - - name: Push image as latest on master - if: github.ref == 'refs/heads/master' - run: | - docker tag ${{ needs.build.outputs.tags }} ${{ steps.meta.outputs.tags }} - docker push ${{ steps.meta.outputs.tags }} diff --git a/.github/workflows/report-pr-age.yml b/.github/workflows/report-pr-age.yml index 1e20ccfc8b687..4bee112c25ba2 100644 --- a/.github/workflows/report-pr-age.yml +++ b/.github/workflows/report-pr-age.yml @@ -23,6 +23,7 @@ jobs: echo is_revert=false >> $GITHUB_ENV fi - name: Capture PR age to PostHog + if: github.repository == 'PostHog/posthog' uses: PostHog/posthog-github-action@v0.1 with: posthog-token: ${{secrets.POSTHOG_API_TOKEN}} diff --git a/.vscode/launch.json b/.vscode/launch.json index b4206a25f0009..f2e15ed84c0d8 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -20,7 +20,6 @@ "--scheduler", "redbeat.RedBeatScheduler", "--without-heartbeat", - "--without-gossip", "--without-mingle", "--pool=solo", "-Ofair", diff --git a/Procfile b/Procfile deleted file mode 100644 index 76a05d1dd982b..0000000000000 --- a/Procfile +++ /dev/null @@ -1,5 +0,0 @@ -release: REDIS_URL='redis://' python manage.py migrate -web: gunicorn posthog.wsgi --log-file - -worker: ./bin/docker-worker -celeryworker: ./bin/docker-worker-celery --with-scheduler # optional -pluginworker: ./bin/plugin-server # optional diff --git a/bin/migrate_kafka_data.py b/bin/migrate_kafka_data.py index 87eaf391657e2..3da55ed538c06 100755 --- a/bin/migrate_kafka_data.py +++ b/bin/migrate_kafka_data.py @@ -21,7 +21,6 @@ import argparse import sys -from typing import List from kafka import KafkaAdminClient, KafkaConsumer, KafkaProducer from kafka.errors import KafkaError @@ -192,7 +191,7 @@ def handle(**options): print("Polling for messages") # noqa: T201 messages_by_topic = consumer.poll(timeout_ms=timeout_ms) - futures: List[FutureRecordMetadata] = [] + futures: list[FutureRecordMetadata] = [] if not messages_by_topic: break diff --git a/bin/start b/bin/start index 4cb46f4ee7a22..38f83accb3968 100755 --- a/bin/start +++ b/bin/start @@ -15,7 +15,7 @@ service_warning() { nc -z localhost 9092 || ( service_warning 'Kafka'; bin/check_kafka_clickhouse_up ) curl -s 'http://localhost:8123/ping' || ( service_warning 'ClickHouse'; bin/check_kafka_clickhouse_up ) -[ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" | brotli --decompress --output=./share/GeoLite2-City.mmdb ) +[ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) ./bin/start-worker & ./bin/start-backend & diff --git a/bin/start-worker b/bin/start-worker index 8343e3652f5cf..c86615f746e47 100755 --- a/bin/start-worker +++ b/bin/start-worker @@ -7,7 +7,7 @@ trap 'kill $(jobs -p)' EXIT source ./bin/celery-queues.env # start celery worker with heartbeat (-B) -SKIP_ASYNC_MIGRATIONS_SETUP=0 CELERY_WORKER_QUEUES=$CELERY_WORKER_QUEUES celery -A posthog worker -B --scheduler redbeat.RedBeatScheduler --without-heartbeat --without-mingle -Ofair -n node@%h & +SKIP_ASYNC_MIGRATIONS_SETUP=0 CELERY_WORKER_QUEUES=$CELERY_WORKER_QUEUES celery -A posthog worker -B --scheduler redbeat.RedBeatScheduler --without-heartbeat --without-mingle --pool=solo -Ofair -n node@%h & if [[ "$PLUGIN_SERVER_IDLE" != "1" && "$PLUGIN_SERVER_IDLE" != "true" ]]; then ./bin/plugin-server diff --git a/cdp/.gitignore b/cdp/.gitignore deleted file mode 100644 index 849ddff3b7ec9..0000000000000 --- a/cdp/.gitignore +++ /dev/null @@ -1 +0,0 @@ -dist/ diff --git a/cdp/.swcrc b/cdp/.swcrc deleted file mode 100644 index 5c9c89a472916..0000000000000 --- a/cdp/.swcrc +++ /dev/null @@ -1,15 +0,0 @@ -{ - "jsc": { - "parser": { - "syntax": "typescript", - "tsx": false, - "decorators": false, - "dynamicImport": false - }, - "target": "es2020", - "baseUrl": "." - }, - "module": { - "type": "commonjs" - } -} diff --git a/cdp/Dockerfile b/cdp/Dockerfile deleted file mode 100644 index 0d3d0d87e7811..0000000000000 --- a/cdp/Dockerfile +++ /dev/null @@ -1,82 +0,0 @@ -# Build the CDP server image. We use a multi-stage build to first build the CDP -# node application, then copy the built files to the final image. -# -# Note: separtely we bundle the resulting dist folder into the -# production.Dockerfile image such that the main image can run the entire -# application without needing to build the CDP server. -# -# We also need to copy the migrations folder as the CDP server needs it to -# run the migrations. The migrations use the Rust application sqlx-cli to -# run the migrations, so we need to copy the compiled binary from the Rust -# image. I'm sure there's a better way to do this, but this works for now. - -FROM rust:1.68.2-slim-bullseye AS sqlx-cli-build - -WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] - -# Since we are using the slim image, we need to install `pkg-config` and -# `libssl-dev` so cargo install completes successfully. -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - "pkg-config" \ - "libssl-dev" \ - && \ - rm -rf /var/lib/apt/lists/* - -# Install SQLx CLI. -RUN cargo install --version 0.6.3 sqlx-cli --no-default-features --features native-tls,postgres - -FROM node:18.12.1-bullseye-slim AS cdp-build - -WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] - -# Install Node.js dependencies. -COPY package.json pnpm-lock.yaml ./ -RUN corepack enable && \ - mkdir /tmp/pnpm-store && \ - pnpm install --frozen-lockfile --store-dir /tmp/pnpm-store && \ - rm -rf /tmp/pnpm-store - -# Build the CDP server. -# -# Note: we run the build as a separate action to increase -# the cache hit ratio of the layers above. -COPY ./src/ ./src/ -COPY tsconfig.json .swcrc ./ -RUN pnpm build - -# As the CDP server is now built, let’s keep only prod dependencies in the -# node_module folder as we will copy it to the last image. We remove all -# dependencies first to ensure we end up with the smallest possible image. -RUN rm -rf node_modules && \ - corepack enable && \ - mkdir /tmp/pnpm-store && \ - pnpm install --frozen-lockfile --store-dir /tmp/pnpm-store --prod && \ - rm -rf /tmp/pnpm-store - -# Build the final image. -FROM node:18.12.1-bullseye-slim - -WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] - -# Install tini. -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - "tini" \ - && \ - rm -rf /var/lib/apt/lists/* - -# Copy the SQLx CLI binary from the previous stage. -COPY --from=sqlx-cli-build --link /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx - -# Copy the built CDP server from the previous stage. -COPY --from=cdp-build --link /code/node_modules/ ./node_modules/ -COPY --from=cdp-build --link /code/dist/ ./dist/ -COPY --link ./migrations/ ./migrations/ - -# Set [Tini](https://github.com/krallin/tini) as the entrypoint. -ENTRYPOINT ["/usr/bin/tini", "--"] -CMD ["node", "dist/rest.js"] diff --git a/cdp/README.md b/cdp/README.md deleted file mode 100644 index 772414abc9778..0000000000000 --- a/cdp/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Customer Data Pipeline - -Handles delivering event streams to destinations. - -TODO: fill this in a bit more. Very much a work in progress at the moment. diff --git a/cdp/functional_tests/destination-api.test.ts b/cdp/functional_tests/destination-api.test.ts deleted file mode 100644 index 0bc2e496759cd..0000000000000 --- a/cdp/functional_tests/destination-api.test.ts +++ /dev/null @@ -1,498 +0,0 @@ -/* - -Tests for a basic CRUD API for destinations supporting GET, POST, PUT, and -DELETE, corresponding to creating, reading, updating, and deleting destinations -as well as other similar operations. - -We also have an API for the list of destination types, which provides a list of -types along with the schema for the configuration for each type. This is used -to validate the configuration for each destination. - -We do not attempt to handle e.g. idempotency of requests although that may be a -good idea if we hit issues with e.g. retry logic and concurrency. See for -example https://www.mscharhag.com/api-design/rest-making-post-patch-idempotent -for an example way to implement this. - -*/ - -import { describe, test, expect } from '@jest/globals' -import jwt from 'jsonwebtoken' - -describe('DestinationType API', () => { - describe('GET destination types', () => { - test.concurrent('should be able to retrieve a list of destination types', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destinationTypes = await listDestinationTypesOk(token, projectId) - expect(destinationTypes).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - type: 'webhook', - configSchema: expect.any(Object), - }), - ]) - ) - }) - - test.concurrent('project id must be a number', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const response = await listDestinationTypes(token, 'invalid') - expect(response.status).toEqual(400) - }) - - test.concurrent( - "should not be able to retrieve a list of destination types if you don't have access to the project", - async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [], userId: 1 }) - const response = await listDestinationTypes(token, projectId) - expect(response.status).toEqual(403) - } - ) - }) -}) - -describe('Destination API', () => { - describe('POST destination', () => { - test.concurrent('should be able to create a destination', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const response = await postDestination(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { - url: 'https://example.com', - }, - }) - expect(response.status).toEqual(201) - }) - - test.concurrent('should not be able to create a destination with an invalid config schema', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const response = await postDestination(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { - invalid: 'config', - }, - }) - expect(response.status).toEqual(400) - }) - - test.concurrent( - "should not be able to create a destination if you don't have access to the project", - async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [], userId: 1 }) - const response = await postDestination(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { - url: 'https://example.com', - }, - }) - expect(response.status).toEqual(403) - } - ) - }) - - describe('GET destination', () => { - test.concurrent('should be able to retrieve a destination', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - const retrievedDestination = await getDestinationOk(token, projectId, destinationId) - expect(retrievedDestination).toEqual(expect.objectContaining(destination)) - }) - - test.concurrent('should not be able to retrieve a destination from another project', async () => { - const projectId = (await createProjectOk()).id - const otherProjectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId, otherProjectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const response = await getDestination(token, otherProjectId, destinationId) - expect(response.status).toEqual(404) - }) - - test.concurrent( - "should not be able to retrieve a destination if you don't have access to the project", - async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const unauthorizedToken = await generateJwt({ projectIds: [], userId: 1 }) - const response = await getDestination(unauthorizedToken, projectId, destinationId) - expect(response.status).toEqual(403) - } - ) - }) - - describe('PUT destination', () => { - test.concurrent('should be able to update a destination', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - const updatedDestination = await putDestinationOk(token, projectId, destinationId, { - name: 'Updated Destination', - description: 'Updated Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - expect(updatedDestination).toEqual( - expect.objectContaining({ - id: destinationId, - name: 'Updated Destination', - description: 'Updated Description', - }) - ) - }) - - test.concurrent('should not be able to update a destination with an invalid config schema', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - const response = await putDestination(token, projectId, destinationId, { - name: 'Updated Destination', - description: 'Updated Description', - type: 'webhook', - config: { invalid: 'config' }, - }) - expect(response.status).toEqual(400) - }) - - test.concurrent('should not be able to change the destination type', async () => { - // For simplicity of handling e.g. the schema of `config` do not - // want to allow changing the destination type rather the user - // should delete and recreate a distination. - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - const response = await putDestination(token, projectId, destinationId, { - name: 'Updated Destination', - description: 'Updated Description', - type: 'email', - config: { url: 'https://example.com' }, - }) - expect(response.status).toEqual(400) - }) - - test.concurrent('should not be able to update a destination with an invalid id', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const response = await putDestination(token, projectId, 'invalid', { - name: 'Updated Destination', - description: 'Updated Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - expect(response.status).toEqual(400) - }) - - test.concurrent('should not be able to update a destination from another project', async () => { - const projectId = (await createProjectOk()).id - const otherProjectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId, otherProjectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const response = await putDestination(token, otherProjectId, destinationId, { - name: 'Updated Destination', - description: 'Updated Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - expect(response.status).toEqual(404) - }) - - test.concurrent( - "should not be able to update a destination if you don't have access to the project", - async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const unauthorizedToken = await generateJwt({ projectIds: [], userId: 1 }) - const response = await putDestination(unauthorizedToken, projectId, destinationId, { - name: 'Updated Destination', - description: 'Updated Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - expect(response.status).toEqual(403) - } - ) - }) - - describe('DELETE destination', () => { - test.concurrent('should be able to delete a destination', async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const response = await deleteDestination(token, projectId, destinationId) - expect(response.status).toEqual(204) - - // Check that the destination is no longer retrievable - const getResponse = await getDestination(token, projectId, destinationId) - expect(getResponse.status).toEqual(404) - }) - - test.concurrent('should not be able to delete a destination with an invalid id', async () => { - const id = 'invalid' - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const response = await deleteDestination(token, projectId, id) - expect(response.status).toEqual(400) - }) - - test.concurrent('should not be able to delete a destination from another project', async () => { - const projectId = (await createProjectOk()).id - const otherProjectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId, otherProjectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const response = await deleteDestination(token, otherProjectId, destinationId) - expect(response.status).toEqual(404) - - // Check that the destination is still retrievable - const getResponse = await getDestination(token, projectId, destinationId) - expect(getResponse.status).toEqual(200) - }) - - test.concurrent( - "should not be able to delete a destination if you don't have access to the project", - async () => { - const projectId = (await createProjectOk()).id - const token = await generateJwt({ projectIds: [projectId], userId: 1 }) - const destination = await postDestinationOk(token, projectId, { - name: 'Test Destination', - description: 'Test Description', - type: 'webhook', - config: { url: 'https://example.com' }, - }) - const destinationId = destination.id - expect(destinationId).toBeDefined() - - const unauthorizedToken = await generateJwt({ projectIds: [], userId: 1 }) - const response = await deleteDestination(unauthorizedToken, projectId, destinationId) - expect(response.status).toEqual(403) - - // Check that the destination is still retrievable - const getResponse = await getDestination(token, projectId, destinationId) - expect(getResponse.status).toEqual(200) - } - ) - }) -}) - -const listDestinationTypes = async (token: string, projectId: any): Promise => { - return await fetch(`http://localhost:3000/api/projects/${projectId}/destination-types`, { - headers: { - Authorization: `Bearer ${token}`, - }, - }) -} - -const listDestinationTypesOk = async (token: string, projectId: number): Promise => { - const response = await listDestinationTypes(token, projectId) - if (!response.ok) { - throw new Error(`Failed to list destination types: ${response.statusText}`) - } - return await response.json() -} - -const postDestination = async ( - token: string, - projectId: number, - destinationData: DestinationCreate -): Promise => { - return await fetch(`http://localhost:3000/api/projects/${projectId}/destinations`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - body: JSON.stringify(destinationData), - }) -} - -const postDestinationOk = async ( - token: string, - projectId: number, - destinationData: DestinationCreate -): Promise => { - const response = await postDestination(token, projectId, destinationData) - if (!response.ok) { - throw new Error(`Failed to create destination: ${response.statusText}`) - } - return await response.json() -} - -const putDestination = async ( - token: string, - projectId: number, - id: string, - destinationData: DestinationUpdate -): Promise => { - return await fetch(`http://localhost:3000/api/projects/${projectId}/destinations/${id}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - body: JSON.stringify(destinationData), - }) -} - -const putDestinationOk = async ( - token: string, - projectId: number, - id: string, - destinationData: DestinationUpdate -): Promise => { - const response = await putDestination(token, projectId, id, destinationData) - if (!response.ok) { - throw new Error(`Failed to update destination: ${response.statusText}`) - } - return await response.json() -} - -const getDestination = async (token: string, projectId: number, id: string): Promise => { - return await fetch(`http://localhost:3000/api/projects/${projectId}/destinations/${id}`, { - headers: { - Authorization: `Bearer ${token}`, - }, - }) -} - -const getDestinationOk = async (token: string, projectId: number, id: string): Promise => { - const response = await getDestination(token, projectId, id) - if (!response.ok) { - throw new Error(`Failed to retrieve destination: ${response.statusText}`) - } - return await response.json() -} - -const deleteDestination = async (token: string, projectId: number, id: string): Promise => { - return await fetch(`http://localhost:3000/api/projects/${projectId}/destinations/${id}`, { - method: 'DELETE', - headers: { - Authorization: `Bearer ${token}`, - }, - }) -} - -const createProjectOk = async (): Promise<{ id: number }> => { - // This isn't really an API method but rather a helper method to create a - // projectId. - return { id: Math.floor(Math.random() * 100000) } -} - -const generateJwt = async (claims: Record): Promise => { - // Generate a token to use for HTTP requests, with the given claims using - // the jsonwebtoken library. We use the SECRET_KEY environment variable to - // sign the token. - const secret = process.env.SECRET_KEY - if (!secret) { - throw new Error('Missing SECRET_KEY environment variable') - } - - return jwt.sign(claims, secret, { algorithm: 'HS256' }) -} - -type DestinationType = { - type: string - name: string - description: string - schema: Record // A JSONSchema describing the configuration -} - -type DestinationCreate = { - name: string // Name displayed to the user - description: string // Description displayed to the user - type: string // Type of destination, e.g. webhook, email, Stripe etc. - config: Record // Configuration for the destination, e.g. webhook URL, email address, Stripe API key etc. -} - -type DestinationUpdate = DestinationCreate - -type Destination = DestinationCreate & { - id: string - created_at: string // ISO 8601 timestamp - updated_at: string // ISO 8601 timestamp -} diff --git a/cdp/jest.config.json b/cdp/jest.config.json deleted file mode 100644 index a6837fe95ec7e..0000000000000 --- a/cdp/jest.config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "roots": ["functional_tests"], - "preset": "ts-jest", - "testEnvironment": "node" -} diff --git a/cdp/migrations/20230406092618_create-destinations.sql b/cdp/migrations/20230406092618_create-destinations.sql deleted file mode 100644 index bc18918d9b453..0000000000000 --- a/cdp/migrations/20230406092618_create-destinations.sql +++ /dev/null @@ -1,23 +0,0 @@ -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -CREATE TABLE destinations ( - primary_key integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - -- A unique identifier for this destination that does not expose - -- cardinality. - id uuid NOT NULL DEFAULT uuid_generate_v4() UNIQUE, - -- NOTE: we use team_id here to be consistent with the rest of the app, - -- but this is the id of a project. - team_id bigint NOT NULL, - name text NOT NULL, - description text NOT NULL, - -- The type of destination. This is used to determine which - -- destination-specific configuration to use. - type text NOT NULL, - -- The destination-specific configuration. This is a JSON object - -- that is specific to the destination type. - config jsonb NOT NULL, - -- Metadata about the destination. - created_at timestamp NOT NULL DEFAULT now(), - created_by_id bigint NOT NULL, - updated_at timestamp NOT NULL DEFAULT now(), - is_deleted boolean NOT NULL DEFAULT false -); diff --git a/cdp/package.json b/cdp/package.json deleted file mode 100644 index 24a1f24897eb7..0000000000000 --- a/cdp/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "cdp", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "dev": "NODE_ENV=dev nodemon -w src/ src/rest.ts | pino-pretty", - "test": "jest", - "build": "swc ./src/ --out-dir ./dist/", - "migrate": "sqlx migrate run" - }, - "keywords": [], - "author": "", - "license": "ISC", - "devDependencies": { - "@jest/globals": "^29.5.0", - "@swc/cli": "^0.1.62", - "@swc/core": "^1.3.46", - "@swc/helpers": "^0.5.0", - "@types/jest": "^29.5.0", - "@types/jsonwebtoken": "^9.0.1", - "@types/koa": "^2.13.6", - "@types/koa-bodyparser": "^4.3.10", - "@types/koa-pino-logger": "^3.0.1", - "@types/koa-router": "^7.4.4", - "@types/node": "^18.15.11", - "@types/pg": "^8.6.6", - "jest": "^29.5.0", - "nodemon": "^2.0.22", - "pino-pretty": "^10.0.0", - "swc-node": "^1.0.0", - "ts-jest": "^29.1.0", - "ts-node": "^10.9.1", - "typescript": "^5.0.3" - }, - "dependencies": { - "@opentelemetry/api": "^1.4.1", - "@opentelemetry/instrumentation-pg": "^0.35.0", - "@opentelemetry/instrumentation-pino": "^0.33.1", - "@opentelemetry/sdk-node": "^0.37.0", - "@opentelemetry/sdk-trace-base": "^1.11.0", - "@opentelemetry/sdk-trace-node": "^1.11.0", - "ajv": "^8.12.0", - "jsonwebtoken": "^9.0.0", - "koa": "^2.14.1", - "koa-bodyparser": "^4.4.0", - "koa-jwt": "^4.0.4", - "koa-pino-logger": "^4.0.0", - "koa-router": "^12.0.0", - "pg": "^8.10.0" - } -} diff --git a/cdp/pnpm-lock.yaml b/cdp/pnpm-lock.yaml deleted file mode 100644 index fa1f5199b2795..0000000000000 --- a/cdp/pnpm-lock.yaml +++ /dev/null @@ -1,4837 +0,0 @@ -lockfileVersion: '6.0' - -dependencies: - '@opentelemetry/api': - specifier: ^1.4.1 - version: 1.4.1 - '@opentelemetry/instrumentation-pg': - specifier: ^0.35.0 - version: 0.35.0(@opentelemetry/api@1.4.1) - '@opentelemetry/instrumentation-pino': - specifier: ^0.33.1 - version: 0.33.1(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-node': - specifier: ^0.37.0 - version: 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': - specifier: ^1.11.0 - version: 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-node': - specifier: ^1.11.0 - version: 1.11.0(@opentelemetry/api@1.4.1) - ajv: - specifier: ^8.12.0 - version: 8.12.0 - jsonwebtoken: - specifier: ^9.0.0 - version: 9.0.0 - koa: - specifier: ^2.14.1 - version: 2.14.1 - koa-bodyparser: - specifier: ^4.4.0 - version: 4.4.0 - koa-jwt: - specifier: ^4.0.4 - version: 4.0.4 - koa-pino-logger: - specifier: ^4.0.0 - version: 4.0.0 - koa-router: - specifier: ^12.0.0 - version: 12.0.0 - pg: - specifier: ^8.10.0 - version: 8.10.0 - -devDependencies: - '@jest/globals': - specifier: ^29.5.0 - version: 29.5.0 - '@swc/cli': - specifier: ^0.1.62 - version: 0.1.62(@swc/core@1.3.46) - '@swc/core': - specifier: ^1.3.46 - version: 1.3.46(@swc/helpers@0.5.0) - '@swc/helpers': - specifier: ^0.5.0 - version: 0.5.0 - '@types/jest': - specifier: ^29.5.0 - version: 29.5.0 - '@types/jsonwebtoken': - specifier: ^9.0.1 - version: 9.0.1 - '@types/koa': - specifier: ^2.13.6 - version: 2.13.6 - '@types/koa-bodyparser': - specifier: ^4.3.10 - version: 4.3.10 - '@types/koa-pino-logger': - specifier: ^3.0.1 - version: 3.0.1 - '@types/koa-router': - specifier: ^7.4.4 - version: 7.4.4 - '@types/node': - specifier: ^18.15.11 - version: 18.15.11 - '@types/pg': - specifier: ^8.6.6 - version: 8.6.6 - jest: - specifier: ^29.5.0 - version: 29.5.0(@types/node@18.15.11)(ts-node@10.9.1) - nodemon: - specifier: ^2.0.22 - version: 2.0.22 - pino-pretty: - specifier: ^10.0.0 - version: 10.0.0 - swc-node: - specifier: ^1.0.0 - version: 1.0.0(@swc/core@1.3.46)(typescript@5.0.3) - ts-jest: - specifier: ^29.1.0 - version: 29.1.0(@babel/core@7.21.4)(jest@29.5.0)(typescript@5.0.3) - ts-node: - specifier: ^10.9.1 - version: 10.9.1(@swc/core@1.3.46)(@types/node@18.15.11)(typescript@5.0.3) - typescript: - specifier: ^5.0.3 - version: 5.0.3 - -packages: - - /@ampproject/remapping@2.2.0: - resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/gen-mapping': 0.1.1 - '@jridgewell/trace-mapping': 0.3.17 - dev: true - - /@babel/code-frame@7.21.4: - resolution: {integrity: sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/highlight': 7.18.6 - dev: true - - /@babel/compat-data@7.21.4: - resolution: {integrity: sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/core@7.21.4: - resolution: {integrity: sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==} - engines: {node: '>=6.9.0'} - dependencies: - '@ampproject/remapping': 2.2.0 - '@babel/code-frame': 7.21.4 - '@babel/generator': 7.21.4 - '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) - '@babel/helper-module-transforms': 7.21.2 - '@babel/helpers': 7.21.0 - '@babel/parser': 7.21.4 - '@babel/template': 7.20.7 - '@babel/traverse': 7.21.4 - '@babel/types': 7.21.4 - convert-source-map: 1.9.0 - debug: 4.3.4 - gensync: 1.0.0-beta.2 - json5: 2.2.3 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/generator@7.21.4: - resolution: {integrity: sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.21.4 - '@jridgewell/gen-mapping': 0.3.2 - '@jridgewell/trace-mapping': 0.3.17 - jsesc: 2.5.2 - dev: true - - /@babel/helper-compilation-targets@7.21.4(@babel/core@7.21.4): - resolution: {integrity: sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/compat-data': 7.21.4 - '@babel/core': 7.21.4 - '@babel/helper-validator-option': 7.21.0 - browserslist: 4.21.5 - lru-cache: 5.1.1 - semver: 6.3.0 - dev: true - - /@babel/helper-environment-visitor@7.18.9: - resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-function-name@7.21.0: - resolution: {integrity: sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.20.7 - '@babel/types': 7.21.4 - dev: true - - /@babel/helper-hoist-variables@7.18.6: - resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@babel/helper-module-imports@7.21.4: - resolution: {integrity: sha512-orajc5T2PsRYUN3ZryCEFeMDYwyw09c/pZeaQEZPH0MpKzSvn3e0uXsDBu3k03VI+9DBiRo+l22BfKTpKwa/Wg==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@babel/helper-module-transforms@7.21.2: - resolution: {integrity: sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-module-imports': 7.21.4 - '@babel/helper-simple-access': 7.20.2 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/helper-validator-identifier': 7.19.1 - '@babel/template': 7.20.7 - '@babel/traverse': 7.21.4 - '@babel/types': 7.21.4 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/helper-plugin-utils@7.20.2: - resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-simple-access@7.20.2: - resolution: {integrity: sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@babel/helper-split-export-declaration@7.18.6: - resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@babel/helper-string-parser@7.19.4: - resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-validator-identifier@7.19.1: - resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helper-validator-option@7.21.0: - resolution: {integrity: sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==} - engines: {node: '>=6.9.0'} - dev: true - - /@babel/helpers@7.21.0: - resolution: {integrity: sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/template': 7.20.7 - '@babel/traverse': 7.21.4 - '@babel/types': 7.21.4 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/highlight@7.18.6: - resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-validator-identifier': 7.19.1 - chalk: 2.4.2 - js-tokens: 4.0.0 - dev: true - - /@babel/parser@7.21.4: - resolution: {integrity: sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw==} - engines: {node: '>=6.0.0'} - hasBin: true - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.21.4): - resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.21.4): - resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.21.4): - resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-jsx@7.21.4(@babel/core@7.21.4): - resolution: {integrity: sha512-5hewiLct5OKyh6PLKEYaFclcqtIgCb6bmELouxjF6up5q3Sov7rOayW4RwhbaBL0dit8rA80GNfY+UuDp2mBbQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.21.4): - resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.21.4): - resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.21.4): - resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.21.4): - resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/plugin-syntax-typescript@7.21.4(@babel/core@7.21.4): - resolution: {integrity: sha512-xz0D39NvhQn4t4RNsHmDnnsaQizIlUkdtYvLs8La1BlfjQ6JEwxkJGeqJMW2tAXx+q6H+WFuUTXNdYVpEya0YA==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - dependencies: - '@babel/core': 7.21.4 - '@babel/helper-plugin-utils': 7.20.2 - dev: true - - /@babel/template@7.20.7: - resolution: {integrity: sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.21.4 - '@babel/parser': 7.21.4 - '@babel/types': 7.21.4 - dev: true - - /@babel/traverse@7.21.4: - resolution: {integrity: sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/code-frame': 7.21.4 - '@babel/generator': 7.21.4 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.21.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.21.4 - '@babel/types': 7.21.4 - debug: 4.3.4 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@babel/types@7.21.4: - resolution: {integrity: sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==} - engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.19.4 - '@babel/helper-validator-identifier': 7.19.1 - to-fast-properties: 2.0.0 - dev: true - - /@bcoe/v8-coverage@0.2.3: - resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - dev: true - - /@cspotcode/source-map-support@0.8.1: - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - dev: true - - /@grpc/grpc-js@1.8.13: - resolution: {integrity: sha512-iY3jsdfbc0ARoCLFvbvUB8optgyb0r1XLPb142u+QtgBcKJYkCIFt3Fd/881KqjLYWjsBJF57N3b8Eop9NDfUA==} - engines: {node: ^8.13.0 || >=10.10.0} - dependencies: - '@grpc/proto-loader': 0.7.6 - '@types/node': 18.15.11 - dev: false - - /@grpc/proto-loader@0.7.6: - resolution: {integrity: sha512-QyAXR8Hyh7uMDmveWxDSUcJr9NAWaZ2I6IXgAYvQmfflwouTM+rArE2eEaCtLlRqO81j7pRLCt81IefUei6Zbw==} - engines: {node: '>=6'} - hasBin: true - dependencies: - '@types/long': 4.0.2 - lodash.camelcase: 4.3.0 - long: 4.0.0 - protobufjs: 7.2.3 - yargs: 16.2.0 - dev: false - - /@istanbuljs/load-nyc-config@1.1.0: - resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} - engines: {node: '>=8'} - dependencies: - camelcase: 5.3.1 - find-up: 4.1.0 - get-package-type: 0.1.0 - js-yaml: 3.14.1 - resolve-from: 5.0.0 - dev: true - - /@istanbuljs/schema@0.1.3: - resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} - engines: {node: '>=8'} - dev: true - - /@jest/console@29.5.0: - resolution: {integrity: sha512-NEpkObxPwyw/XxZVLPmAGKE89IQRp4puc6IQRPru6JKd1M3fW9v1xM1AnzIJE65hbCkzQAdnL8P47e9hzhiYLQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - chalk: 4.1.2 - jest-message-util: 29.5.0 - jest-util: 29.5.0 - slash: 3.0.0 - dev: true - - /@jest/core@29.5.0(ts-node@10.9.1): - resolution: {integrity: sha512-28UzQc7ulUrOQw1IsN/kv1QES3q2kkbl/wGslyhAclqZ/8cMdB5M68BffkIdSJgKBUt50d3hbwJ92XESlE7LiQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/console': 29.5.0 - '@jest/reporters': 29.5.0 - '@jest/test-result': 29.5.0 - '@jest/transform': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - ci-info: 3.8.0 - exit: 0.1.2 - graceful-fs: 4.2.11 - jest-changed-files: 29.5.0 - jest-config: 29.5.0(@types/node@18.15.11)(ts-node@10.9.1) - jest-haste-map: 29.5.0 - jest-message-util: 29.5.0 - jest-regex-util: 29.4.3 - jest-resolve: 29.5.0 - jest-resolve-dependencies: 29.5.0 - jest-runner: 29.5.0 - jest-runtime: 29.5.0 - jest-snapshot: 29.5.0 - jest-util: 29.5.0 - jest-validate: 29.5.0 - jest-watcher: 29.5.0 - micromatch: 4.0.5 - pretty-format: 29.5.0 - slash: 3.0.0 - strip-ansi: 6.0.1 - transitivePeerDependencies: - - supports-color - - ts-node - dev: true - - /@jest/environment@29.5.0: - resolution: {integrity: sha512-5FXw2+wD29YU1d4I2htpRX7jYnAyTRjP2CsXQdo9SAM8g3ifxWPSV0HnClSn71xwctr0U3oZIIH+dtbfmnbXVQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/fake-timers': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - jest-mock: 29.5.0 - dev: true - - /@jest/expect-utils@29.5.0: - resolution: {integrity: sha512-fmKzsidoXQT2KwnrwE0SQq3uj8Z763vzR8LnLBwC2qYWEFpjX8daRsk6rHUM1QvNlEW/UJXNXm59ztmJJWs2Mg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.4.3 - dev: true - - /@jest/expect@29.5.0: - resolution: {integrity: sha512-PueDR2HGihN3ciUNGr4uelropW7rqUfTiOn+8u0leg/42UhblPxHkfoh0Ruu3I9Y1962P3u2DY4+h7GVTSVU6g==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - expect: 29.5.0 - jest-snapshot: 29.5.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/fake-timers@29.5.0: - resolution: {integrity: sha512-9ARvuAAQcBwDAqOnglWq2zwNIRUDtk/SCkp/ToGEhFv5r86K21l+VEs0qNTaXtyiY0lEePl3kylijSYJQqdbDg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - '@sinonjs/fake-timers': 10.0.2 - '@types/node': 18.15.11 - jest-message-util: 29.5.0 - jest-mock: 29.5.0 - jest-util: 29.5.0 - dev: true - - /@jest/globals@29.5.0: - resolution: {integrity: sha512-S02y0qMWGihdzNbUiqSAiKSpSozSuHX5UYc7QbnHP+D9Lyw8DgGGCinrN9uSuHPeKgSSzvPom2q1nAtBvUsvPQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.5.0 - '@jest/expect': 29.5.0 - '@jest/types': 29.5.0 - jest-mock: 29.5.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/reporters@29.5.0: - resolution: {integrity: sha512-D05STXqj/M8bP9hQNSICtPqz97u7ffGzZu+9XLucXhkOFBqKcXe04JLZOgIekOxdb73MAoBUFnqvf7MCpKk5OA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@bcoe/v8-coverage': 0.2.3 - '@jest/console': 29.5.0 - '@jest/test-result': 29.5.0 - '@jest/transform': 29.5.0 - '@jest/types': 29.5.0 - '@jridgewell/trace-mapping': 0.3.17 - '@types/node': 18.15.11 - chalk: 4.1.2 - collect-v8-coverage: 1.0.1 - exit: 0.1.2 - glob: 7.2.3 - graceful-fs: 4.2.11 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-instrument: 5.2.1 - istanbul-lib-report: 3.0.0 - istanbul-lib-source-maps: 4.0.1 - istanbul-reports: 3.1.5 - jest-message-util: 29.5.0 - jest-util: 29.5.0 - jest-worker: 29.5.0 - slash: 3.0.0 - string-length: 4.0.2 - strip-ansi: 6.0.1 - v8-to-istanbul: 9.1.0 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/schemas@29.4.3: - resolution: {integrity: sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@sinclair/typebox': 0.25.24 - dev: true - - /@jest/source-map@29.4.3: - resolution: {integrity: sha512-qyt/mb6rLyd9j1jUts4EQncvS6Yy3PM9HghnNv86QBlV+zdL2inCdK1tuVlL+J+lpiw2BI67qXOrX3UurBqQ1w==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jridgewell/trace-mapping': 0.3.17 - callsites: 3.1.0 - graceful-fs: 4.2.11 - dev: true - - /@jest/test-result@29.5.0: - resolution: {integrity: sha512-fGl4rfitnbfLsrfx1uUpDEESS7zM8JdgZgOCQuxQvL1Sn/I6ijeAVQWGfXI9zb1i9Mzo495cIpVZhA0yr60PkQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.5.0 - '@jest/types': 29.5.0 - '@types/istanbul-lib-coverage': 2.0.4 - collect-v8-coverage: 1.0.1 - dev: true - - /@jest/test-sequencer@29.5.0: - resolution: {integrity: sha512-yPafQEcKjkSfDXyvtgiV4pevSeyuA6MQr6ZIdVkWJly9vkqjnFfcfhRQqpD5whjoU8EORki752xQmjaqoFjzMQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.5.0 - graceful-fs: 4.2.11 - jest-haste-map: 29.5.0 - slash: 3.0.0 - dev: true - - /@jest/transform@29.5.0: - resolution: {integrity: sha512-8vbeZWqLJOvHaDfeMuoHITGKSz5qWc9u04lnWrQE3VyuSw604PzQM824ZeX9XSjUCeDiE3GuxZe5UKa8J61NQw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.21.4 - '@jest/types': 29.5.0 - '@jridgewell/trace-mapping': 0.3.17 - babel-plugin-istanbul: 6.1.1 - chalk: 4.1.2 - convert-source-map: 2.0.0 - fast-json-stable-stringify: 2.1.0 - graceful-fs: 4.2.11 - jest-haste-map: 29.5.0 - jest-regex-util: 29.4.3 - jest-util: 29.5.0 - micromatch: 4.0.5 - pirates: 4.0.5 - slash: 3.0.0 - write-file-atomic: 4.0.2 - transitivePeerDependencies: - - supports-color - dev: true - - /@jest/types@29.5.0: - resolution: {integrity: sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.4.3 - '@types/istanbul-lib-coverage': 2.0.4 - '@types/istanbul-reports': 3.0.1 - '@types/node': 18.15.11 - '@types/yargs': 17.0.24 - chalk: 4.1.2 - dev: true - - /@jridgewell/gen-mapping@0.1.1: - resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@jridgewell/gen-mapping@0.3.2: - resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} - engines: {node: '>=6.0.0'} - dependencies: - '@jridgewell/set-array': 1.1.2 - '@jridgewell/sourcemap-codec': 1.4.14 - '@jridgewell/trace-mapping': 0.3.17 - dev: true - - /@jridgewell/resolve-uri@3.1.0: - resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/set-array@1.1.2: - resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} - engines: {node: '>=6.0.0'} - dev: true - - /@jridgewell/sourcemap-codec@1.4.14: - resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} - dev: true - - /@jridgewell/trace-mapping@0.3.17: - resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@jridgewell/trace-mapping@0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - dependencies: - '@jridgewell/resolve-uri': 3.1.0 - '@jridgewell/sourcemap-codec': 1.4.14 - dev: true - - /@mole-inc/bin-wrapper@8.0.1: - resolution: {integrity: sha512-sTGoeZnjI8N4KS+sW2AN95gDBErhAguvkw/tWdCjeM8bvxpz5lqrnd0vOJABA1A+Ic3zED7PYoLP/RANLgVotA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - bin-check: 4.1.0 - bin-version-check: 5.0.0 - content-disposition: 0.5.4 - ext-name: 5.0.0 - file-type: 17.1.6 - filenamify: 5.1.1 - got: 11.8.6 - os-filter-obj: 2.0.0 - dev: true - - /@nodelib/fs.scandir@2.1.5: - resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - - /@nodelib/fs.stat@2.0.5: - resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} - engines: {node: '>= 8'} - dev: true - - /@nodelib/fs.walk@1.2.8: - resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} - engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - - /@opentelemetry/api@1.4.1: - resolution: {integrity: sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==} - engines: {node: '>=8.0.0'} - dev: false - - /@opentelemetry/context-async-hooks@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-Ao1z7p+Au7A10SvQ6NCo5h2dAb3cujy+1VUZrd6gZuqMTxADYEWw/yjDbkHM/NAAaBphDGhqNg2MxGYIdgQs8w==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - dev: false - - /@opentelemetry/core@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-aP1wHSb+YfU0pM63UAkizYPuS4lZxzavHHw5KJfFNN2oWQ79HSm6JR3CzwFKHwKhSzHN8RE9fgP1IdVJ8zmo1w==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/semantic-conventions': 1.11.0 - dev: false - - /@opentelemetry/exporter-jaeger@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-Q3zVjzgVTevgxX9sEf8dnIpbM9ymtiDx56wsQ/lVoWI2sEWZpq/JfyW9X8mMBYvqh000D3f32NZ1//wYz8Xppg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - jaeger-client: 3.19.0 - dev: false - - /@opentelemetry/exporter-trace-otlp-grpc@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-dI0uiH8rcInyCmiZcRErinnFKNUG8HX0QFcCzvUx+1mZ9k4rI/pu3FaZYPYCEXnNg5mbVET/bw1SGLw8/84lEA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@grpc/grpc-js': 1.8.13 - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-grpc-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-transformer': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/exporter-trace-otlp-http@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-+oVV/h6UTLMF4IRtCGkLk2kQImMgC0ARFCfz+XXGNksP+awh/NXsDtJ3mHrn8Gtudrf3+pKVe/FWptBRqicm5Q==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-transformer': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/exporter-trace-otlp-proto@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-xOXs28I8EhZpQhjIyJyfDUJUsM6GbsGmQRIwhB5LFsbs1sPKiTWlcXZLJN16Ipeqz9Am8Rgbk5HdL99aQ6SEBA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-proto-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-transformer': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/exporter-zipkin@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-iDRwWtOYu7CZ5zfGKJje6kOrz4l2Ylrh/M668pc6arOxD8ceamPq4uDHDdJBXf6y+2XoPVgZdHjLfanP4wVJFg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - dev: false - - /@opentelemetry/instrumentation-pg@0.35.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-vNcnILF9c+SJVZr0R0xKY9HzbATLwRVbKrrIbkD6Oj4uzfarlA6n2bF3LJAYGMMcDSdxUN+KaTMeW9byLKqqTg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/instrumentation': 0.35.1(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - '@types/pg': 8.6.1 - '@types/pg-pool': 2.0.3 - transitivePeerDependencies: - - supports-color - dev: false - - /@opentelemetry/instrumentation-pino@0.33.1(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-Rajum1USKFE3khFSg7JRqoI+2BK2BpC2SiB0mjXdQ5s31IxaNuc6qiXdNz6mRzbdzMb/ydsJchlQiSNwB2iVeQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/instrumentation': 0.35.1(@opentelemetry/api@1.4.1) - transitivePeerDependencies: - - supports-color - dev: false - - /@opentelemetry/instrumentation@0.35.1(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-EZsvXqxenbRTSNsft6LDcrT4pjAiyZOx3rkDNeqKpwZZe6GmZtsXaZZKuDkJtz9fTjOGjDHjZj9/h80Ya9iIJw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - dependencies: - '@opentelemetry/api': 1.4.1 - require-in-the-middle: 5.2.0 - semver: 7.3.8 - shimmer: 1.2.1 - transitivePeerDependencies: - - supports-color - dev: false - - /@opentelemetry/instrumentation@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-QAHIYTeVHcvP5NcI8r0WbvF5KCojZSzQLO9G73/OpiXLy/t8hIUXHq0nuuSB5zP5dKQ8h9sORi/3suGBNHnsjw==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.3.0 - dependencies: - '@opentelemetry/api': 1.4.1 - require-in-the-middle: 6.0.0 - semver: 7.3.8 - shimmer: 1.2.1 - transitivePeerDependencies: - - supports-color - dev: false - - /@opentelemetry/otlp-exporter-base@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-dLbv7nr7d14xrHzd+S1eW+RpXh7IC0onktc23pwzETh6J7Ytzf0+QwLV5iRatoNtwPU2hX1VGOipwEnC/BjXxg==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/otlp-grpc-exporter-base@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-S5mhcl+Cdf9skIuEO3amfn7WtzXKZ8PVzyptwD1II2x8kvtD5z1lejXeMxTcsrb1lCc9raYVolohjnKF1uwccQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@grpc/grpc-js': 1.8.13 - '@grpc/proto-loader': 0.7.6 - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/otlp-proto-exporter-base@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-KU1aJetRLQWwtoiXiuJcsW2H8e6H3qn9gvoT2SLyA3f566C/i2z1aJRqJnO+ZEo87SBxDf/gk2R300PohDOBrQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': ^1.0.0 - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/otlp-exporter-base': 0.37.0(@opentelemetry/api@1.4.1) - protobufjs: 7.2.3 - dev: false - - /@opentelemetry/otlp-transformer@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-cIzV9x2DhJ5gN0mld8OqN+XM95sDiuAJJvXsRjVuz9vu8TSNbbao/QCKNfJLOXqe8l3Ge05nKzQ6Q2gDDEN36w==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-metrics': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/propagator-b3@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-Nnlu2PUSMqB8Lc3OzjznqpwuUdej1LrYYNjHItk9g7jJ9SAjiLdkB1cDALO+xhISTON8VrELh1rh0XIMUts2og==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/propagator-jaeger@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-/9XDcBnB6Y2INMWACs0UniY+aV7LReMRzPN6Q0SI7SlXZLZPTUnaZt51Tb/TyixjzAkHsj86K27XSCv3ctB2UQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - dev: false - - /@opentelemetry/resources@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-y0z2YJTqk0ag+hGT4EXbxH/qPhDe8PfwltYb4tXIEsozgEFfut/bqW7H7pDvylmCjBRMG4NjtLp57V1Ev++brA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - dev: false - - /@opentelemetry/sdk-metrics@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-knuq3pwU0+46FEMdw9Ses+alXL9cbcLUUTdYBBBsaKkqKwoVMHfhBufW7u6YCu4i+47Wg6ZZTN/eGc4LbTbK5Q==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - lodash.merge: 4.6.2 - dev: false - - /@opentelemetry/sdk-node@0.37.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-Qh78JL28DLqQOgANVtrGtdvhWOO+3Ezi9uw7K1c2Yi/HiRKYNeITkYTA/G2GnyDPZvSB+weTGCuZYMcIbGWIgA==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/exporter-jaeger': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/exporter-trace-otlp-grpc': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/exporter-trace-otlp-http': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/exporter-trace-otlp-proto': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/exporter-zipkin': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/instrumentation': 0.37.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-metrics': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-node': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - transitivePeerDependencies: - - supports-color - dev: false - - /@opentelemetry/sdk-trace-base@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-DV8e5/Qo42V8FMBlQ0Y0Liv6Hl/Pp5bAZ73s7r1euX8w4bpRes1B7ACiA4yujADbWMJxBgSo4fGbi4yjmTMG2A==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/resources': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/semantic-conventions': 1.11.0 - dev: false - - /@opentelemetry/sdk-trace-node@1.11.0(@opentelemetry/api@1.4.1): - resolution: {integrity: sha512-BzcrLl6m10Gc9Za5hNCRAClgpetGd5di5bG4RDChhVfqrecl98ok+BKunWxWxrZUMpXpJqfXteHo0su3SEXvXQ==} - engines: {node: '>=14'} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.5.0' - dependencies: - '@opentelemetry/api': 1.4.1 - '@opentelemetry/context-async-hooks': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/core': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/propagator-b3': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/propagator-jaeger': 1.11.0(@opentelemetry/api@1.4.1) - '@opentelemetry/sdk-trace-base': 1.11.0(@opentelemetry/api@1.4.1) - semver: 7.3.8 - dev: false - - /@opentelemetry/semantic-conventions@1.11.0: - resolution: {integrity: sha512-fG4D0AktoHyHwGhFGv+PzKrZjxbKJfckJauTJdq2A+ej5cTazmNYjJVAODXXkYyrsI10muMl+B1iO2q1R6Lp+w==} - engines: {node: '>=14'} - dev: false - - /@protobufjs/aspromise@1.1.2: - resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} - dev: false - - /@protobufjs/base64@1.1.2: - resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} - dev: false - - /@protobufjs/codegen@2.0.4: - resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} - dev: false - - /@protobufjs/eventemitter@1.1.0: - resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} - dev: false - - /@protobufjs/fetch@1.1.0: - resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/inquire': 1.1.0 - dev: false - - /@protobufjs/float@1.0.2: - resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} - dev: false - - /@protobufjs/inquire@1.1.0: - resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} - dev: false - - /@protobufjs/path@1.1.2: - resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} - dev: false - - /@protobufjs/pool@1.1.0: - resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} - dev: false - - /@protobufjs/utf8@1.1.0: - resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} - dev: false - - /@sinclair/typebox@0.25.24: - resolution: {integrity: sha512-XJfwUVUKDHF5ugKwIcxEgc9k8b7HbznCp6eUfWgu710hMPNIO4aw4/zB5RogDQz8nd6gyCDpU9O/m6qYEWY6yQ==} - dev: true - - /@sindresorhus/is@4.6.0: - resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} - engines: {node: '>=10'} - dev: true - - /@sinonjs/commons@2.0.0: - resolution: {integrity: sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==} - dependencies: - type-detect: 4.0.8 - dev: true - - /@sinonjs/fake-timers@10.0.2: - resolution: {integrity: sha512-SwUDyjWnah1AaNl7kxsa7cfLhlTYoiyhDAIgyh+El30YvXs/o7OLXpYH88Zdhyx9JExKrmHDJ+10bwIcY80Jmw==} - dependencies: - '@sinonjs/commons': 2.0.0 - dev: true - - /@swc-node/core@1.10.2(@swc/core@1.3.46): - resolution: {integrity: sha512-3rx5Wid5BfsBLV6anUC78Dzc23gNDre4AbrH9+QlCg17SKqElNcDojU6MVwlB7knzu+3w8aRJO+4to1wzuCY3Q==} - engines: {node: '>= 10'} - peerDependencies: - '@swc/core': '>= 1.3' - dependencies: - '@swc/core': 1.3.46(@swc/helpers@0.5.0) - dev: true - - /@swc-node/register@1.6.3(@swc/core@1.3.46)(typescript@5.0.3): - resolution: {integrity: sha512-B690PKyws/c1aJWU2VfgOMyJ3OJ2fHi54keUOsaxUwNkT5YkuqK8nXqQI5B+glQTv05wuSSlrBzyCYgFzmw2Vg==} - peerDependencies: - '@swc/core': '>= 1.3' - typescript: '>= 4.3' - dependencies: - '@swc-node/core': 1.10.2(@swc/core@1.3.46) - '@swc-node/sourcemap-support': 0.3.0 - '@swc/core': 1.3.46(@swc/helpers@0.5.0) - colorette: 2.0.19 - debug: 4.3.4 - pirates: 4.0.5 - tslib: 2.5.0 - typescript: 5.0.3 - transitivePeerDependencies: - - supports-color - dev: true - - /@swc-node/sourcemap-support@0.3.0: - resolution: {integrity: sha512-gqBJSmJMWomZFxlppaKea7NeAqFrDrrS0RMt24No92M3nJWcyI9YKGEQKl+EyJqZ5gh6w1s0cTklMHMzRwA1NA==} - dependencies: - source-map-support: 0.5.21 - tslib: 2.5.0 - dev: true - - /@swc/cli@0.1.62(@swc/core@1.3.46): - resolution: {integrity: sha512-kOFLjKY3XH1DWLfXL1/B5MizeNorHR8wHKEi92S/Zi9Md/AK17KSqR8MgyRJ6C1fhKHvbBCl8wboyKAFXStkYw==} - engines: {node: '>= 12.13'} - hasBin: true - peerDependencies: - '@swc/core': ^1.2.66 - chokidar: ^3.5.1 - peerDependenciesMeta: - chokidar: - optional: true - dependencies: - '@mole-inc/bin-wrapper': 8.0.1 - '@swc/core': 1.3.46(@swc/helpers@0.5.0) - commander: 7.2.0 - fast-glob: 3.2.12 - semver: 7.3.8 - slash: 3.0.0 - source-map: 0.7.4 - dev: true - - /@swc/core-darwin-arm64@1.3.46: - resolution: {integrity: sha512-kY4ASe7SsntDw2B1T70H9K1CFmK8POi+LyIpeCyC96EB9wbH2Sax+ploBB/wZALbYzr/dMJzOCU8QXzdmVS4Rg==} - engines: {node: '>=10'} - cpu: [arm64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@swc/core-darwin-x64@1.3.46: - resolution: {integrity: sha512-kE3PMk8xW+2BZ3oZiTxxsUU/GzrGwM+qS4frOBz9TYHZe+W1dTtj4F9vBit4PFJ+tv4O6DPt9neGobzdq0UmRw==} - engines: {node: '>=10'} - cpu: [x64] - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /@swc/core-linux-arm-gnueabihf@1.3.46: - resolution: {integrity: sha512-7TbiUr9MYxT+mC7sVrayag/isFoaZUG/ogkEK8B/ouA1pnIYqWh3N5ifqCzfcSRiOURt+vVqPyoO1puSiNzVuQ==} - engines: {node: '>=10'} - cpu: [arm] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@swc/core-linux-arm64-gnu@1.3.46: - resolution: {integrity: sha512-Ycw4LU/wsUK9R+Y/2qFOPQseZDfM5D5gbWGrrYj5RoTm57FbnUsSsO26QeZxUNvams1oAQDkZDuerCc9qBRzIQ==} - engines: {node: '>=10'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@swc/core-linux-arm64-musl@1.3.46: - resolution: {integrity: sha512-cBclyr6IW1PBr8l9D4FkebgbqlkiIYnSJCbY84J/6PfTzQlD6w9a1TAoYxdGZpJ7SGHdmB0oDiZS1rhxCSCV/Q==} - engines: {node: '>=10'} - cpu: [arm64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@swc/core-linux-x64-gnu@1.3.46: - resolution: {integrity: sha512-amqMhTA2CXB6t11hVAZSSPKq4DZ9/sWbW3wYYQHxzqrMJML0726OJs4pt0XnlU7FzdP/9M9j2B/gWCRaCMxXVA==} - engines: {node: '>=10'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@swc/core-linux-x64-musl@1.3.46: - resolution: {integrity: sha512-WOQZTIkJ9khIj5Z2unf6OTrWV9k8br+HZ93RvnamEmJBlLPUuT9IjB+agNhjaDgOpz9/ZldSGqV7vzl5FGQl1Q==} - engines: {node: '>=10'} - cpu: [x64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@swc/core-win32-arm64-msvc@1.3.46: - resolution: {integrity: sha512-4JSREbqaTRQ6QO0EeoiB6G5vuFT8zI8aTOLu5At7Cvlw+X7bOGNO+wJ3Tqw7O+68OL+0bPHzHGTXKL9kUccY1A==} - engines: {node: '>=10'} - cpu: [arm64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@swc/core-win32-ia32-msvc@1.3.46: - resolution: {integrity: sha512-kC8dIDzcArm1e85yHJsEZFxcNq5NztLkrqkP1nVOQ+9QXD9DKhjbZtWy2gnpclinii6KEGng8SieWiJiOA0CBQ==} - engines: {node: '>=10'} - cpu: [ia32] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@swc/core-win32-x64-msvc@1.3.46: - resolution: {integrity: sha512-rrSAfq+DvpJioBxUsnuH+sKl0eXid1DwkwNzkVGHEreN9GoP7GospWtFq7VDcO6DrS/s3HtR4/TzoIYFEBCRIg==} - engines: {node: '>=10'} - cpu: [x64] - os: [win32] - requiresBuild: true - dev: true - optional: true - - /@swc/core@1.3.46(@swc/helpers@0.5.0): - resolution: {integrity: sha512-WxzgJMWUBVJ95HsvEqlWzM3Qxp2FQrPa4QdAkQQuuvCMnfdctGUbhX/c3LiSRlWrl2LIkYAi4bLansTOol4QcQ==} - engines: {node: '>=10'} - requiresBuild: true - peerDependencies: - '@swc/helpers': ^0.5.0 - dependencies: - '@swc/helpers': 0.5.0 - optionalDependencies: - '@swc/core-darwin-arm64': 1.3.46 - '@swc/core-darwin-x64': 1.3.46 - '@swc/core-linux-arm-gnueabihf': 1.3.46 - '@swc/core-linux-arm64-gnu': 1.3.46 - '@swc/core-linux-arm64-musl': 1.3.46 - '@swc/core-linux-x64-gnu': 1.3.46 - '@swc/core-linux-x64-musl': 1.3.46 - '@swc/core-win32-arm64-msvc': 1.3.46 - '@swc/core-win32-ia32-msvc': 1.3.46 - '@swc/core-win32-x64-msvc': 1.3.46 - dev: true - - /@swc/helpers@0.5.0: - resolution: {integrity: sha512-SjY/p4MmECVVEWspzSRpQEM3sjR17sP8PbGxELWrT+YZMBfiUyt1MRUNjMV23zohwlG2HYtCQOsCwsTHguXkyg==} - dependencies: - tslib: 2.5.0 - dev: true - - /@szmarczak/http-timer@4.0.6: - resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==} - engines: {node: '>=10'} - dependencies: - defer-to-connect: 2.0.1 - dev: true - - /@tokenizer/token@0.3.0: - resolution: {integrity: sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==} - dev: true - - /@tsconfig/node10@1.0.9: - resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} - dev: true - - /@tsconfig/node12@1.0.11: - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - dev: true - - /@tsconfig/node14@1.0.3: - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - dev: true - - /@tsconfig/node16@1.0.3: - resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} - dev: true - - /@types/accepts@1.3.5: - resolution: {integrity: sha512-jOdnI/3qTpHABjM5cx1Hc0sKsPoYCp+DP/GJRGtDlPd7fiV9oXGGIcjW/ZOxLIvjGz8MA+uMZI9metHlgqbgwQ==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/babel__core@7.20.0: - resolution: {integrity: sha512-+n8dL/9GWblDO0iU6eZAwEIJVr5DWigtle+Q6HLOrh/pdbXOhOtqzq8VPPE2zvNJzSKY4vH/z3iT3tn0A3ypiQ==} - dependencies: - '@babel/parser': 7.21.4 - '@babel/types': 7.21.4 - '@types/babel__generator': 7.6.4 - '@types/babel__template': 7.4.1 - '@types/babel__traverse': 7.18.3 - dev: true - - /@types/babel__generator@7.6.4: - resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@types/babel__template@7.4.1: - resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} - dependencies: - '@babel/parser': 7.21.4 - '@babel/types': 7.21.4 - dev: true - - /@types/babel__traverse@7.18.3: - resolution: {integrity: sha512-1kbcJ40lLB7MHsj39U4Sh1uTd2E7rLEa79kmDpI6cy+XiXsteB3POdQomoq4FxszMrO3ZYchkhYJw7A2862b3w==} - dependencies: - '@babel/types': 7.21.4 - dev: true - - /@types/body-parser@1.19.2: - resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==} - dependencies: - '@types/connect': 3.4.35 - '@types/node': 18.15.11 - dev: true - - /@types/cacheable-request@6.0.3: - resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} - dependencies: - '@types/http-cache-semantics': 4.0.1 - '@types/keyv': 3.1.4 - '@types/node': 18.15.11 - '@types/responselike': 1.0.0 - dev: true - - /@types/connect@3.4.35: - resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/content-disposition@0.5.5: - resolution: {integrity: sha512-v6LCdKfK6BwcqMo+wYW05rLS12S0ZO0Fl4w1h4aaZMD7bqT3gVUns6FvLJKGZHQmYn3SX55JWGpziwJRwVgutA==} - dev: true - - /@types/cookies@0.7.7: - resolution: {integrity: sha512-h7BcvPUogWbKCzBR2lY4oqaZbO3jXZksexYJVFvkrFeLgbZjQkU4x8pRq6eg2MHXQhY0McQdqmmsxRWlVAHooA==} - dependencies: - '@types/connect': 3.4.35 - '@types/express': 4.17.17 - '@types/keygrip': 1.0.2 - '@types/node': 18.15.11 - dev: true - - /@types/express-serve-static-core@4.17.33: - resolution: {integrity: sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==} - dependencies: - '@types/node': 18.15.11 - '@types/qs': 6.9.7 - '@types/range-parser': 1.2.4 - dev: true - - /@types/express@4.17.17: - resolution: {integrity: sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==} - dependencies: - '@types/body-parser': 1.19.2 - '@types/express-serve-static-core': 4.17.33 - '@types/qs': 6.9.7 - '@types/serve-static': 1.15.1 - dev: true - - /@types/graceful-fs@4.1.6: - resolution: {integrity: sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/http-assert@1.5.3: - resolution: {integrity: sha512-FyAOrDuQmBi8/or3ns4rwPno7/9tJTijVW6aQQjK02+kOQ8zmoNg2XJtAuQhvQcy1ASJq38wirX5//9J1EqoUA==} - dev: true - - /@types/http-cache-semantics@4.0.1: - resolution: {integrity: sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==} - dev: true - - /@types/http-errors@2.0.1: - resolution: {integrity: sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==} - dev: true - - /@types/istanbul-lib-coverage@2.0.4: - resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} - dev: true - - /@types/istanbul-lib-report@3.0.0: - resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} - dependencies: - '@types/istanbul-lib-coverage': 2.0.4 - dev: true - - /@types/istanbul-reports@3.0.1: - resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} - dependencies: - '@types/istanbul-lib-report': 3.0.0 - dev: true - - /@types/jest@29.5.0: - resolution: {integrity: sha512-3Emr5VOl/aoBwnWcH/EFQvlSAmjV+XtV9GGu5mwdYew5vhQh0IUZx/60x0TzHDu09Bi7HMx10t/namdJw5QIcg==} - dependencies: - expect: 29.5.0 - pretty-format: 29.5.0 - dev: true - - /@types/jsonwebtoken@9.0.1: - resolution: {integrity: sha512-c5ltxazpWabia/4UzhIoaDcIza4KViOQhdbjRlfcIGVnsE3c3brkz9Z+F/EeJIECOQP7W7US2hNE930cWWkPiw==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/keygrip@1.0.2: - resolution: {integrity: sha512-GJhpTepz2udxGexqos8wgaBx4I/zWIDPh/KOGEwAqtuGDkOUJu5eFvwmdBX4AmB8Odsr+9pHCQqiAqDL/yKMKw==} - dev: true - - /@types/keyv@3.1.4: - resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/koa-bodyparser@4.3.10: - resolution: {integrity: sha512-6ae05pjhmrmGhUR8GYD5qr5p9LTEMEGfGXCsK8VaSL+totwigm8+H/7MHW7K4854CMeuwRAubT8qcc/EagaeIA==} - dependencies: - '@types/koa': 2.13.6 - dev: true - - /@types/koa-compose@3.2.5: - resolution: {integrity: sha512-B8nG/OoE1ORZqCkBVsup/AKcvjdgoHnfi4pZMn5UwAPCbhk/96xyv284eBYW8JlQbQ7zDmnpFr68I/40mFoIBQ==} - dependencies: - '@types/koa': 2.13.6 - dev: true - - /@types/koa-pino-logger@3.0.1: - resolution: {integrity: sha512-KqGej3mNnP94KA8RBiSr4w0rqcQVUvInG6PjnGPIjKPex6iUWiVOL4c6UeoiUQYDMb/WQVZRmaC7dzPsev1IKw==} - dependencies: - '@types/koa': 2.13.6 - '@types/pino': 6.3.12 - '@types/pino-http': 5.8.1 - dev: true - - /@types/koa-router@7.4.4: - resolution: {integrity: sha512-3dHlZ6CkhgcWeF6wafEUvyyqjWYfKmev3vy1PtOmr0mBc3wpXPU5E8fBBd4YQo5bRpHPfmwC5yDaX7s4jhIN6A==} - dependencies: - '@types/koa': 2.13.6 - dev: true - - /@types/koa@2.13.6: - resolution: {integrity: sha512-diYUfp/GqfWBAiwxHtYJ/FQYIXhlEhlyaU7lB/bWQrx4Il9lCET5UwpFy3StOAohfsxxvEQ11qIJgT1j2tfBvw==} - dependencies: - '@types/accepts': 1.3.5 - '@types/content-disposition': 0.5.5 - '@types/cookies': 0.7.7 - '@types/http-assert': 1.5.3 - '@types/http-errors': 2.0.1 - '@types/keygrip': 1.0.2 - '@types/koa-compose': 3.2.5 - '@types/node': 18.15.11 - dev: true - - /@types/long@4.0.2: - resolution: {integrity: sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==} - dev: false - - /@types/mime@3.0.1: - resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} - dev: true - - /@types/node@18.15.11: - resolution: {integrity: sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==} - - /@types/pg-pool@2.0.3: - resolution: {integrity: sha512-fwK5WtG42Yb5RxAwxm3Cc2dJ39FlgcaNiXKvtTLAwtCn642X7dgel+w1+cLWwpSOFImR3YjsZtbkfjxbHtFAeg==} - dependencies: - '@types/pg': 8.6.6 - dev: false - - /@types/pg@8.6.1: - resolution: {integrity: sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==} - dependencies: - '@types/node': 18.15.11 - pg-protocol: 1.6.0 - pg-types: 2.2.0 - dev: false - - /@types/pg@8.6.6: - resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} - dependencies: - '@types/node': 18.15.11 - pg-protocol: 1.6.0 - pg-types: 2.2.0 - - /@types/pino-http@5.8.1: - resolution: {integrity: sha512-A9MW6VCnx5ii7s+Fs5aFIw+aSZcBCpsZ/atpxamu8tTsvWFacxSf2Hrn1Ohn1jkVRB/LiPGOapRXcFawDBnDnA==} - dependencies: - '@types/pino': 6.3.12 - dev: true - - /@types/pino-pretty@5.0.0: - resolution: {integrity: sha512-N1uzqSzioqz8R3AkDbSJwcfDWeI3YMPNapSQQhnB2ISU4NYgUIcAh+hYT5ygqBM+klX4htpEhXMmoJv3J7GrdA==} - deprecated: This is a stub types definition. pino-pretty provides its own type definitions, so you do not need this installed. - dependencies: - pino-pretty: 10.0.0 - dev: true - - /@types/pino-std-serializers@4.0.0: - resolution: {integrity: sha512-gXfUZx2xIBbFYozGms53fT0nvkacx/+62c8iTxrEqH5PkIGAQvDbXg2774VWOycMPbqn5YJBQ3BMsg4Li3dWbg==} - deprecated: This is a stub types definition. pino-std-serializers provides its own type definitions, so you do not need this installed. - dependencies: - pino-std-serializers: 5.6.0 - dev: true - - /@types/pino@6.3.12: - resolution: {integrity: sha512-dsLRTq8/4UtVSpJgl9aeqHvbh6pzdmjYD3C092SYgLD2TyoCqHpTJk6vp8DvCTGGc7iowZ2MoiYiVUUCcu7muw==} - dependencies: - '@types/node': 18.15.11 - '@types/pino-pretty': 5.0.0 - '@types/pino-std-serializers': 4.0.0 - sonic-boom: 2.8.0 - dev: true - - /@types/prettier@2.7.2: - resolution: {integrity: sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==} - dev: true - - /@types/qs@6.9.7: - resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==} - dev: true - - /@types/range-parser@1.2.4: - resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==} - dev: true - - /@types/responselike@1.0.0: - resolution: {integrity: sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==} - dependencies: - '@types/node': 18.15.11 - dev: true - - /@types/serve-static@1.15.1: - resolution: {integrity: sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==} - dependencies: - '@types/mime': 3.0.1 - '@types/node': 18.15.11 - dev: true - - /@types/stack-utils@2.0.1: - resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} - dev: true - - /@types/yargs-parser@21.0.0: - resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} - dev: true - - /@types/yargs@17.0.24: - resolution: {integrity: sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw==} - dependencies: - '@types/yargs-parser': 21.0.0 - dev: true - - /abbrev@1.1.1: - resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} - dev: true - - /abort-controller@3.0.0: - resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} - engines: {node: '>=6.5'} - dependencies: - event-target-shim: 5.0.1 - dev: true - - /accepts@1.3.8: - resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} - engines: {node: '>= 0.6'} - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - dev: false - - /acorn-walk@8.2.0: - resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} - engines: {node: '>=0.4.0'} - dev: true - - /acorn@8.8.2: - resolution: {integrity: sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==} - engines: {node: '>=0.4.0'} - hasBin: true - dev: true - - /aggregate-error@3.1.0: - resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} - engines: {node: '>=8'} - dependencies: - clean-stack: 2.2.0 - indent-string: 4.0.0 - dev: false - - /ajv@8.12.0: - resolution: {integrity: sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==} - dependencies: - fast-deep-equal: 3.1.3 - json-schema-traverse: 1.0.0 - require-from-string: 2.0.2 - uri-js: 4.4.1 - dev: false - - /ansi-color@0.2.1: - resolution: {integrity: sha512-bF6xLaZBLpOQzgYUtYEhJx090nPSZk1BQ/q2oyBK9aMMcJHzx9uXGCjI2Y+LebsN4Jwoykr0V9whbPiogdyHoQ==} - dev: false - - /ansi-escapes@4.3.2: - resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} - engines: {node: '>=8'} - dependencies: - type-fest: 0.21.3 - dev: true - - /ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - - /ansi-styles@3.2.1: - resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} - engines: {node: '>=4'} - dependencies: - color-convert: 1.9.3 - dev: true - - /ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - dependencies: - color-convert: 2.0.1 - - /ansi-styles@5.2.0: - resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} - engines: {node: '>=10'} - dev: true - - /anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true - - /arch@2.2.0: - resolution: {integrity: sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==} - dev: true - - /arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - dev: true - - /argparse@1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} - dependencies: - sprintf-js: 1.0.3 - dev: true - - /atomic-sleep@1.0.0: - resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==} - engines: {node: '>=8.0.0'} - - /babel-jest@29.5.0(@babel/core@7.21.4): - resolution: {integrity: sha512-mA4eCDh5mSo2EcA9xQjVTpmbbNk32Zb3Q3QFQsNhaK56Q+yoXowzFodLux30HRgyOho5rsQ6B0P9QpMkvvnJ0Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.8.0 - dependencies: - '@babel/core': 7.21.4 - '@jest/transform': 29.5.0 - '@types/babel__core': 7.20.0 - babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.5.0(@babel/core@7.21.4) - chalk: 4.1.2 - graceful-fs: 4.2.11 - slash: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-istanbul@6.1.1: - resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} - engines: {node: '>=8'} - dependencies: - '@babel/helper-plugin-utils': 7.20.2 - '@istanbuljs/load-nyc-config': 1.1.0 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-instrument: 5.2.1 - test-exclude: 6.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /babel-plugin-jest-hoist@29.5.0: - resolution: {integrity: sha512-zSuuuAlTMT4mzLj2nPnUm6fsE6270vdOfnpbJ+RmruU75UhLFvL0N2NgI7xpeS7NaB6hGqmd5pVpGTDYvi4Q3w==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/template': 7.20.7 - '@babel/types': 7.21.4 - '@types/babel__core': 7.20.0 - '@types/babel__traverse': 7.18.3 - dev: true - - /babel-preset-current-node-syntax@1.0.1(@babel/core@7.21.4): - resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.21.4 - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.21.4) - '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.21.4) - '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.21.4) - '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.21.4) - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.21.4) - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.21.4) - '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.21.4) - dev: true - - /babel-preset-jest@29.5.0(@babel/core@7.21.4): - resolution: {integrity: sha512-JOMloxOqdiBSxMAzjRaH023/vvcaSaec49zvg+2LmNsktC7ei39LTJGw02J+9uUtTZUq6xbLyJ4dxe9sSmIuAg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@babel/core': ^7.0.0 - dependencies: - '@babel/core': 7.21.4 - babel-plugin-jest-hoist: 29.5.0 - babel-preset-current-node-syntax: 1.0.1(@babel/core@7.21.4) - dev: true - - /balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - dev: true - - /base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: true - - /bin-check@4.1.0: - resolution: {integrity: sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==} - engines: {node: '>=4'} - dependencies: - execa: 0.7.0 - executable: 4.1.1 - dev: true - - /bin-version-check@5.0.0: - resolution: {integrity: sha512-Q3FMQnS5eZmrBGqmDXLs4dbAn/f+52voP6ykJYmweSA60t6DyH4UTSwZhtbK5UH+LBoWvDljILUQMLRUtsynsA==} - engines: {node: '>=12'} - dependencies: - bin-version: 6.0.0 - semver: 7.3.8 - semver-truncate: 2.0.0 - dev: true - - /bin-version@6.0.0: - resolution: {integrity: sha512-nk5wEsP4RiKjG+vF+uG8lFsEn4d7Y6FVDamzzftSunXOoOcOOkzcWdKVlGgFFwlUQCj63SgnUkLLGF8v7lufhw==} - engines: {node: '>=12'} - dependencies: - execa: 5.1.1 - find-versions: 5.1.0 - dev: true - - /binary-extensions@2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} - engines: {node: '>=8'} - dev: true - - /brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} - dependencies: - balanced-match: 1.0.2 - concat-map: 0.0.1 - dev: true - - /brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} - dependencies: - balanced-match: 1.0.2 - dev: true - - /braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} - dependencies: - fill-range: 7.0.1 - dev: true - - /browserslist@4.21.5: - resolution: {integrity: sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001474 - electron-to-chromium: 1.4.350 - node-releases: 2.0.10 - update-browserslist-db: 1.0.10(browserslist@4.21.5) - dev: true - - /bs-logger@0.2.6: - resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} - engines: {node: '>= 6'} - dependencies: - fast-json-stable-stringify: 2.1.0 - dev: true - - /bser@2.1.1: - resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} - dependencies: - node-int64: 0.4.0 - dev: true - - /buffer-equal-constant-time@1.0.1: - resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} - dev: false - - /buffer-from@1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - dev: true - - /buffer-writer@2.0.0: - resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} - engines: {node: '>=4'} - dev: false - - /buffer@6.0.3: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - dev: true - - /bufrw@1.3.0: - resolution: {integrity: sha512-jzQnSbdJqhIltU9O5KUiTtljP9ccw2u5ix59McQy4pV2xGhVLhRZIndY8GIrgh5HjXa6+QJ9AQhOd2QWQizJFQ==} - engines: {node: '>= 0.10.x'} - dependencies: - ansi-color: 0.2.1 - error: 7.0.2 - hexer: 1.5.0 - xtend: 4.0.2 - dev: false - - /bytes@3.1.2: - resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} - engines: {node: '>= 0.8'} - dev: false - - /cache-content-type@1.0.1: - resolution: {integrity: sha512-IKufZ1o4Ut42YUrZSo8+qnMTrFuKkvyoLXUywKz9GJ5BrhOFGhLdkx9sG4KAnVvbY6kEcSFjLQul+DVmBm2bgA==} - engines: {node: '>= 6.0.0'} - dependencies: - mime-types: 2.1.35 - ylru: 1.3.2 - dev: false - - /cacheable-lookup@5.0.4: - resolution: {integrity: sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==} - engines: {node: '>=10.6.0'} - dev: true - - /cacheable-request@7.0.2: - resolution: {integrity: sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==} - engines: {node: '>=8'} - dependencies: - clone-response: 1.0.3 - get-stream: 5.2.0 - http-cache-semantics: 4.1.1 - keyv: 4.5.2 - lowercase-keys: 2.0.0 - normalize-url: 6.1.0 - responselike: 2.0.1 - dev: true - - /call-bind@1.0.2: - resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} - dependencies: - function-bind: 1.1.1 - get-intrinsic: 1.2.0 - dev: false - - /callsites@3.1.0: - resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} - engines: {node: '>=6'} - dev: true - - /camelcase@5.3.1: - resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} - engines: {node: '>=6'} - dev: true - - /camelcase@6.3.0: - resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} - engines: {node: '>=10'} - dev: true - - /caniuse-lite@1.0.30001474: - resolution: {integrity: sha512-iaIZ8gVrWfemh5DG3T9/YqarVZoYf0r188IjaGwx68j4Pf0SGY6CQkmJUIE+NZHkkecQGohzXmBGEwWDr9aM3Q==} - dev: true - - /chalk@2.4.2: - resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} - engines: {node: '>=4'} - dependencies: - ansi-styles: 3.2.1 - escape-string-regexp: 1.0.5 - supports-color: 5.5.0 - dev: true - - /chalk@4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - dev: true - - /char-regex@1.0.2: - resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} - engines: {node: '>=10'} - dev: true - - /chokidar@3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} - engines: {node: '>= 8.10.0'} - dependencies: - anymatch: 3.1.3 - braces: 3.0.2 - glob-parent: 5.1.2 - is-binary-path: 2.1.0 - is-glob: 4.0.3 - normalize-path: 3.0.0 - readdirp: 3.6.0 - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /ci-info@3.8.0: - resolution: {integrity: sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==} - engines: {node: '>=8'} - dev: true - - /cjs-module-lexer@1.2.2: - resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} - dev: true - - /clean-stack@2.2.0: - resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} - engines: {node: '>=6'} - dev: false - - /cliui@7.0.4: - resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - dev: false - - /cliui@8.0.1: - resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} - engines: {node: '>=12'} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - dev: true - - /clone-response@1.0.3: - resolution: {integrity: sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==} - dependencies: - mimic-response: 1.0.1 - dev: true - - /co-body@6.1.0: - resolution: {integrity: sha512-m7pOT6CdLN7FuXUcpuz/8lfQ/L77x8SchHCF4G0RBTJO20Wzmhn5Sp4/5WsKy8OSpifBSUrmg83qEqaDHdyFuQ==} - dependencies: - inflation: 2.0.0 - qs: 6.11.1 - raw-body: 2.5.2 - type-is: 1.6.18 - dev: false - - /co@4.6.0: - resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} - engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} - - /collect-v8-coverage@1.0.1: - resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} - dev: true - - /color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - dependencies: - color-name: 1.1.3 - dev: true - - /color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - dependencies: - color-name: 1.1.4 - - /color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - dev: true - - /color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - - /colorette@2.0.19: - resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} - dev: true - - /commander@7.2.0: - resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} - engines: {node: '>= 10'} - dev: true - - /concat-map@0.0.1: - resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} - dev: true - - /content-disposition@0.5.4: - resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} - engines: {node: '>= 0.6'} - dependencies: - safe-buffer: 5.2.1 - - /content-type@1.0.5: - resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} - engines: {node: '>= 0.6'} - dev: false - - /convert-source-map@1.9.0: - resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} - dev: true - - /convert-source-map@2.0.0: - resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} - dev: true - - /cookies@0.8.0: - resolution: {integrity: sha512-8aPsApQfebXnuI+537McwYsDtjVxGm8gTIzQI3FDW6t5t/DAhERxtnbEPN/8RX+uZthoz4eCOgloXaE5cYyNow==} - engines: {node: '>= 0.8'} - dependencies: - depd: 2.0.0 - keygrip: 1.1.0 - dev: false - - /copy-to@2.0.1: - resolution: {integrity: sha512-3DdaFaU/Zf1AnpLiFDeNCD4TOWe3Zl2RZaTzUvWiIk5ERzcCodOE20Vqq4fzCbNoHURFHT4/us/Lfq+S2zyY4w==} - dev: false - - /create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - dev: true - - /cross-spawn@5.1.0: - resolution: {integrity: sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==} - dependencies: - lru-cache: 4.1.5 - shebang-command: 1.2.0 - which: 1.3.1 - dev: true - - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 - dev: true - - /dateformat@4.6.3: - resolution: {integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==} - dev: true - - /debug@3.2.7(supports-color@5.5.0): - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - supports-color: 5.5.0 - dev: true - - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.2 - - /decompress-response@6.0.0: - resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} - engines: {node: '>=10'} - dependencies: - mimic-response: 3.1.0 - dev: true - - /dedent@0.7.0: - resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} - dev: true - - /deep-equal@1.0.1: - resolution: {integrity: sha512-bHtC0iYvWhyaTzvV3CZgPeZQqCOBGyGsVV7v4eevpdkLHfiSrXUdBG+qAuSz4RI70sszvjQ1QSZ98An1yNwpSw==} - dev: false - - /deepmerge@4.3.1: - resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} - engines: {node: '>=0.10.0'} - dev: true - - /defer-to-connect@2.0.1: - resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} - engines: {node: '>=10'} - dev: true - - /delegates@1.0.0: - resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} - dev: false - - /depd@1.1.2: - resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==} - engines: {node: '>= 0.6'} - dev: false - - /depd@2.0.0: - resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} - engines: {node: '>= 0.8'} - dev: false - - /destroy@1.2.0: - resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} - engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - dev: false - - /detect-newline@3.1.0: - resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} - engines: {node: '>=8'} - dev: true - - /diff-sequences@29.4.3: - resolution: {integrity: sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - dev: true - - /duplexify@4.1.2: - resolution: {integrity: sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==} - dependencies: - end-of-stream: 1.4.4 - inherits: 2.0.4 - readable-stream: 3.6.2 - stream-shift: 1.0.1 - dev: false - - /ecdsa-sig-formatter@1.0.11: - resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} - dependencies: - safe-buffer: 5.2.1 - dev: false - - /ee-first@1.1.1: - resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - dev: false - - /electron-to-chromium@1.4.350: - resolution: {integrity: sha512-XnXcWpVnOfHZ4C3NPiL+SubeoGV8zc/pg8GEubRtc1dPA/9jKS2vsOPmtClJHhWxUb2RSGC1OBLCbgNUJMtZPw==} - dev: true - - /emittery@0.13.1: - resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} - engines: {node: '>=12'} - dev: true - - /emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - - /encodeurl@1.0.2: - resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} - engines: {node: '>= 0.8'} - dev: false - - /end-of-stream@1.4.4: - resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} - dependencies: - once: 1.4.0 - - /error-ex@1.3.2: - resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} - dependencies: - is-arrayish: 0.2.1 - dev: true - - /error@7.0.2: - resolution: {integrity: sha512-UtVv4l5MhijsYUxPJo4390gzfZvAnTHreNnDjnTZaKIiZ/SemXxAhBkYSKtWa5RtBXbLP8tMgn/n0RUa/H7jXw==} - dependencies: - string-template: 0.2.1 - xtend: 4.0.2 - dev: false - - /escalade@3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - - /escape-html@1.0.3: - resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} - dev: false - - /escape-string-regexp@1.0.5: - resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} - engines: {node: '>=0.8.0'} - dev: true - - /escape-string-regexp@2.0.0: - resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} - engines: {node: '>=8'} - dev: true - - /escape-string-regexp@5.0.0: - resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} - engines: {node: '>=12'} - dev: true - - /esprima@4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /event-target-shim@5.0.1: - resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} - engines: {node: '>=6'} - dev: true - - /events@3.3.0: - resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} - engines: {node: '>=0.8.x'} - dev: true - - /execa@0.7.0: - resolution: {integrity: sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==} - engines: {node: '>=4'} - dependencies: - cross-spawn: 5.1.0 - get-stream: 3.0.0 - is-stream: 1.1.0 - npm-run-path: 2.0.2 - p-finally: 1.0.0 - signal-exit: 3.0.7 - strip-eof: 1.0.0 - dev: true - - /execa@5.1.1: - resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} - engines: {node: '>=10'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 6.0.1 - human-signals: 2.1.0 - is-stream: 2.0.1 - merge-stream: 2.0.0 - npm-run-path: 4.0.1 - onetime: 5.1.2 - signal-exit: 3.0.7 - strip-final-newline: 2.0.0 - dev: true - - /executable@4.1.1: - resolution: {integrity: sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==} - engines: {node: '>=4'} - dependencies: - pify: 2.3.0 - dev: true - - /exit@0.1.2: - resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} - engines: {node: '>= 0.8.0'} - dev: true - - /expect@29.5.0: - resolution: {integrity: sha512-yM7xqUrCO2JdpFo4XpM82t+PJBFybdqoQuJLDGeDX2ij8NZzqRHyu3Hp188/JX7SWqud+7t4MUdvcgGBICMHZg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/expect-utils': 29.5.0 - jest-get-type: 29.4.3 - jest-matcher-utils: 29.5.0 - jest-message-util: 29.5.0 - jest-util: 29.5.0 - dev: true - - /ext-list@2.2.2: - resolution: {integrity: sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==} - engines: {node: '>=0.10.0'} - dependencies: - mime-db: 1.52.0 - dev: true - - /ext-name@5.0.0: - resolution: {integrity: sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==} - engines: {node: '>=4'} - dependencies: - ext-list: 2.2.2 - sort-keys-length: 1.0.1 - dev: true - - /fast-copy@3.0.1: - resolution: {integrity: sha512-Knr7NOtK3HWRYGtHoJrjkaWepqT8thIVGAwt0p0aUs1zqkAzXZV4vo9fFNwyb5fcqK1GKYFYxldQdIDVKhUAfA==} - dev: true - - /fast-deep-equal@3.1.3: - resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} - dev: false - - /fast-glob@3.2.12: - resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} - engines: {node: '>=8.6.0'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - '@nodelib/fs.walk': 1.2.8 - glob-parent: 5.1.2 - merge2: 1.4.1 - micromatch: 4.0.5 - dev: true - - /fast-json-stable-stringify@2.1.0: - resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} - dev: true - - /fast-redact@3.1.2: - resolution: {integrity: sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw==} - engines: {node: '>=6'} - dev: false - - /fast-safe-stringify@2.1.1: - resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} - dev: true - - /fast-url-parser@1.1.3: - resolution: {integrity: sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==} - dependencies: - punycode: 1.4.1 - dev: false - - /fastq@1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} - dependencies: - reusify: 1.0.4 - dev: true - - /fb-watchman@2.0.2: - resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} - dependencies: - bser: 2.1.1 - dev: true - - /file-type@17.1.6: - resolution: {integrity: sha512-hlDw5Ev+9e883s0pwUsuuYNu4tD7GgpUnOvykjv1Gya0ZIjuKumthDRua90VUn6/nlRKAjcxLUnHNTIUWwWIiw==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - readable-web-to-node-stream: 3.0.2 - strtok3: 7.0.0 - token-types: 5.0.1 - dev: true - - /filename-reserved-regex@3.0.0: - resolution: {integrity: sha512-hn4cQfU6GOT/7cFHXBqeBg2TbrMBgdD0kcjLhvSQYYwm3s4B6cjvBfb7nBALJLAXqmU5xajSa7X2NnUud/VCdw==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true - - /filenamify@5.1.1: - resolution: {integrity: sha512-M45CbrJLGACfrPOkrTp3j2EcO9OBkKUYME0eiqOCa7i2poaklU0jhlIaMlr8ijLorT0uLAzrn3qXOp5684CkfA==} - engines: {node: '>=12.20'} - dependencies: - filename-reserved-regex: 3.0.0 - strip-outer: 2.0.0 - trim-repeated: 2.0.0 - dev: true - - /fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} - dependencies: - to-regex-range: 5.0.1 - dev: true - - /find-up@4.1.0: - resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} - engines: {node: '>=8'} - dependencies: - locate-path: 5.0.0 - path-exists: 4.0.0 - dev: true - - /find-versions@5.1.0: - resolution: {integrity: sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==} - engines: {node: '>=12'} - dependencies: - semver-regex: 4.0.5 - dev: true - - /fresh@0.5.2: - resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} - engines: {node: '>= 0.6'} - dev: false - - /fs.realpath@1.0.0: - resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} - dev: true - - /fsevents@2.3.2: - resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - dev: true - optional: true - - /function-bind@1.1.1: - resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - - /gensync@1.0.0-beta.2: - resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} - engines: {node: '>=6.9.0'} - dev: true - - /get-caller-file@2.0.5: - resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} - engines: {node: 6.* || 8.* || >= 10.*} - - /get-intrinsic@1.2.0: - resolution: {integrity: sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==} - dependencies: - function-bind: 1.1.1 - has: 1.0.3 - has-symbols: 1.0.3 - dev: false - - /get-package-type@0.1.0: - resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} - engines: {node: '>=8.0.0'} - dev: true - - /get-stream@3.0.0: - resolution: {integrity: sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==} - engines: {node: '>=4'} - dev: true - - /get-stream@5.2.0: - resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} - engines: {node: '>=8'} - dependencies: - pump: 3.0.0 - dev: true - - /get-stream@6.0.1: - resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} - engines: {node: '>=10'} - dev: true - - /glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} - dependencies: - is-glob: 4.0.3 - dev: true - - /glob@7.2.3: - resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 3.1.2 - once: 1.4.0 - path-is-absolute: 1.0.1 - dev: true - - /glob@8.1.0: - resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} - engines: {node: '>=12'} - dependencies: - fs.realpath: 1.0.0 - inflight: 1.0.6 - inherits: 2.0.4 - minimatch: 5.1.6 - once: 1.4.0 - dev: true - - /globals@11.12.0: - resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} - engines: {node: '>=4'} - dev: true - - /got@11.8.6: - resolution: {integrity: sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==} - engines: {node: '>=10.19.0'} - dependencies: - '@sindresorhus/is': 4.6.0 - '@szmarczak/http-timer': 4.0.6 - '@types/cacheable-request': 6.0.3 - '@types/responselike': 1.0.0 - cacheable-lookup: 5.0.4 - cacheable-request: 7.0.2 - decompress-response: 6.0.0 - http2-wrapper: 1.0.3 - lowercase-keys: 2.0.0 - p-cancelable: 2.1.1 - responselike: 2.0.1 - dev: true - - /graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - dev: true - - /has-flag@3.0.0: - resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} - engines: {node: '>=4'} - dev: true - - /has-flag@4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - dev: true - - /has-symbols@1.0.3: - resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} - engines: {node: '>= 0.4'} - dev: false - - /has-tostringtag@1.0.0: - resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} - engines: {node: '>= 0.4'} - dependencies: - has-symbols: 1.0.3 - dev: false - - /has@1.0.3: - resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} - engines: {node: '>= 0.4.0'} - dependencies: - function-bind: 1.1.1 - - /help-me@4.2.0: - resolution: {integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==} - dependencies: - glob: 8.1.0 - readable-stream: 3.6.2 - dev: true - - /hexer@1.5.0: - resolution: {integrity: sha512-dyrPC8KzBzUJ19QTIo1gXNqIISRXQ0NwteW6OeQHRN4ZuZeHkdODfj0zHBdOlHbRY8GqbqK57C9oWSvQZizFsg==} - engines: {node: '>= 0.10.x'} - hasBin: true - dependencies: - ansi-color: 0.2.1 - minimist: 1.2.8 - process: 0.10.1 - xtend: 4.0.2 - dev: false - - /html-escaper@2.0.2: - resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} - dev: true - - /http-assert@1.5.0: - resolution: {integrity: sha512-uPpH7OKX4H25hBmU6G1jWNaqJGpTXxey+YOUizJUAgu0AjLUeC8D73hTrhvDS5D+GJN1DN1+hhc/eF/wpxtp0w==} - engines: {node: '>= 0.8'} - dependencies: - deep-equal: 1.0.1 - http-errors: 1.8.1 - dev: false - - /http-cache-semantics@4.1.1: - resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} - dev: true - - /http-errors@1.8.1: - resolution: {integrity: sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==} - engines: {node: '>= 0.6'} - dependencies: - depd: 1.1.2 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 1.5.0 - toidentifier: 1.0.1 - dev: false - - /http-errors@2.0.0: - resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} - engines: {node: '>= 0.8'} - dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - dev: false - - /http2-wrapper@1.0.3: - resolution: {integrity: sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==} - engines: {node: '>=10.19.0'} - dependencies: - quick-lru: 5.1.1 - resolve-alpn: 1.2.1 - dev: true - - /human-signals@2.1.0: - resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} - engines: {node: '>=10.17.0'} - dev: true - - /iconv-lite@0.4.24: - resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} - engines: {node: '>=0.10.0'} - dependencies: - safer-buffer: 2.1.2 - dev: false - - /ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true - - /ignore-by-default@1.0.1: - resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==} - dev: true - - /import-local@3.1.0: - resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} - engines: {node: '>=8'} - hasBin: true - dependencies: - pkg-dir: 4.2.0 - resolve-cwd: 3.0.0 - dev: true - - /imurmurhash@0.1.4: - resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} - engines: {node: '>=0.8.19'} - dev: true - - /indent-string@4.0.0: - resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} - engines: {node: '>=8'} - dev: false - - /inflation@2.0.0: - resolution: {integrity: sha512-m3xv4hJYR2oXw4o4Y5l6P5P16WYmazYof+el6Al3f+YlggGj6qT9kImBAnzDelRALnP5d3h4jGBPKzYCizjZZw==} - engines: {node: '>= 0.8.0'} - dev: false - - /inflight@1.0.6: - resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} - dependencies: - once: 1.4.0 - wrappy: 1.0.2 - dev: true - - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - - /is-arrayish@0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - dev: true - - /is-binary-path@2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} - dependencies: - binary-extensions: 2.2.0 - dev: true - - /is-core-module@2.11.0: - resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} - dependencies: - has: 1.0.3 - - /is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - dev: true - - /is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - - /is-generator-fn@2.1.0: - resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} - engines: {node: '>=6'} - dev: true - - /is-generator-function@1.0.10: - resolution: {integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==} - engines: {node: '>= 0.4'} - dependencies: - has-tostringtag: 1.0.0 - dev: false - - /is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} - dependencies: - is-extglob: 2.1.1 - dev: true - - /is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - dev: true - - /is-plain-obj@1.1.0: - resolution: {integrity: sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==} - engines: {node: '>=0.10.0'} - dev: true - - /is-stream@1.1.0: - resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==} - engines: {node: '>=0.10.0'} - dev: true - - /is-stream@2.0.1: - resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} - engines: {node: '>=8'} - dev: true - - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true - - /istanbul-lib-coverage@3.2.0: - resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} - engines: {node: '>=8'} - dev: true - - /istanbul-lib-instrument@5.2.1: - resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} - engines: {node: '>=8'} - dependencies: - '@babel/core': 7.21.4 - '@babel/parser': 7.21.4 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-coverage: 3.2.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-lib-report@3.0.0: - resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} - engines: {node: '>=8'} - dependencies: - istanbul-lib-coverage: 3.2.0 - make-dir: 3.1.0 - supports-color: 7.2.0 - dev: true - - /istanbul-lib-source-maps@4.0.1: - resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} - engines: {node: '>=10'} - dependencies: - debug: 4.3.4 - istanbul-lib-coverage: 3.2.0 - source-map: 0.6.1 - transitivePeerDependencies: - - supports-color - dev: true - - /istanbul-reports@3.1.5: - resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} - engines: {node: '>=8'} - dependencies: - html-escaper: 2.0.2 - istanbul-lib-report: 3.0.0 - dev: true - - /jaeger-client@3.19.0: - resolution: {integrity: sha512-M0c7cKHmdyEUtjemnJyx/y9uX16XHocL46yQvyqDlPdvAcwPDbHrIbKjQdBqtiE4apQ/9dmr+ZLJYYPGnurgpw==} - engines: {node: '>=10'} - dependencies: - node-int64: 0.4.0 - opentracing: 0.14.7 - thriftrw: 3.12.0 - uuid: 8.3.2 - xorshift: 1.2.0 - dev: false - - /jest-changed-files@29.5.0: - resolution: {integrity: sha512-IFG34IUMUaNBIxjQXF/iu7g6EcdMrGRRxaUSw92I/2g2YC6vCdTltl4nHvt7Ci5nSJwXIkCu8Ka1DKF+X7Z1Ag==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - execa: 5.1.1 - p-limit: 3.1.0 - dev: true - - /jest-circus@29.5.0: - resolution: {integrity: sha512-gq/ongqeQKAplVxqJmbeUOJJKkW3dDNPY8PjhJ5G0lBRvu0e3EWGxGy5cI4LAGA7gV2UHCtWBI4EMXK8c9nQKA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.5.0 - '@jest/expect': 29.5.0 - '@jest/test-result': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - chalk: 4.1.2 - co: 4.6.0 - dedent: 0.7.0 - is-generator-fn: 2.1.0 - jest-each: 29.5.0 - jest-matcher-utils: 29.5.0 - jest-message-util: 29.5.0 - jest-runtime: 29.5.0 - jest-snapshot: 29.5.0 - jest-util: 29.5.0 - p-limit: 3.1.0 - pretty-format: 29.5.0 - pure-rand: 6.0.1 - slash: 3.0.0 - stack-utils: 2.0.6 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-cli@29.5.0(@types/node@18.15.11)(ts-node@10.9.1): - resolution: {integrity: sha512-L1KcP1l4HtfwdxXNFCL5bmUbLQiKrakMUriBEcc1Vfz6gx31ORKdreuWvmQVBit+1ss9NNR3yxjwfwzZNdQXJw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.5.0(ts-node@10.9.1) - '@jest/test-result': 29.5.0 - '@jest/types': 29.5.0 - chalk: 4.1.2 - exit: 0.1.2 - graceful-fs: 4.2.11 - import-local: 3.1.0 - jest-config: 29.5.0(@types/node@18.15.11)(ts-node@10.9.1) - jest-util: 29.5.0 - jest-validate: 29.5.0 - prompts: 2.4.2 - yargs: 17.7.1 - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /jest-config@29.5.0(@types/node@18.15.11)(ts-node@10.9.1): - resolution: {integrity: sha512-kvDUKBnNJPNBmFFOhDbm59iu1Fii1Q6SxyhXfvylq3UTHbg6o7j/g8k2dZyXWLvfdKB1vAPxNZnMgtKJcmu3kA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - peerDependencies: - '@types/node': '*' - ts-node: '>=9.0.0' - peerDependenciesMeta: - '@types/node': - optional: true - ts-node: - optional: true - dependencies: - '@babel/core': 7.21.4 - '@jest/test-sequencer': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - babel-jest: 29.5.0(@babel/core@7.21.4) - chalk: 4.1.2 - ci-info: 3.8.0 - deepmerge: 4.3.1 - glob: 7.2.3 - graceful-fs: 4.2.11 - jest-circus: 29.5.0 - jest-environment-node: 29.5.0 - jest-get-type: 29.4.3 - jest-regex-util: 29.4.3 - jest-resolve: 29.5.0 - jest-runner: 29.5.0 - jest-util: 29.5.0 - jest-validate: 29.5.0 - micromatch: 4.0.5 - parse-json: 5.2.0 - pretty-format: 29.5.0 - slash: 3.0.0 - strip-json-comments: 3.1.1 - ts-node: 10.9.1(@swc/core@1.3.46)(@types/node@18.15.11)(typescript@5.0.3) - transitivePeerDependencies: - - supports-color - dev: true - - /jest-diff@29.5.0: - resolution: {integrity: sha512-LtxijLLZBduXnHSniy0WMdaHjmQnt3g5sa16W4p0HqukYTTsyTW3GD1q41TyGl5YFXj/5B2U6dlh5FM1LIMgxw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - diff-sequences: 29.4.3 - jest-get-type: 29.4.3 - pretty-format: 29.5.0 - dev: true - - /jest-docblock@29.4.3: - resolution: {integrity: sha512-fzdTftThczeSD9nZ3fzA/4KkHtnmllawWrXO69vtI+L9WjEIuXWs4AmyME7lN5hU7dB0sHhuPfcKofRsUb/2Fg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - detect-newline: 3.1.0 - dev: true - - /jest-each@29.5.0: - resolution: {integrity: sha512-HM5kIJ1BTnVt+DQZ2ALp3rzXEl+g726csObrW/jpEGl+CDSSQpOJJX2KE/vEg8cxcMXdyEPu6U4QX5eruQv5hA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - chalk: 4.1.2 - jest-get-type: 29.4.3 - jest-util: 29.5.0 - pretty-format: 29.5.0 - dev: true - - /jest-environment-node@29.5.0: - resolution: {integrity: sha512-ExxuIK/+yQ+6PRGaHkKewYtg6hto2uGCgvKdb2nfJfKXgZ17DfXjvbZ+jA1Qt9A8EQSfPnt5FKIfnOO3u1h9qw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.5.0 - '@jest/fake-timers': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - jest-mock: 29.5.0 - jest-util: 29.5.0 - dev: true - - /jest-get-type@29.4.3: - resolution: {integrity: sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-haste-map@29.5.0: - resolution: {integrity: sha512-IspOPnnBro8YfVYSw6yDRKh/TiCdRngjxeacCps1cQ9cgVN6+10JUcuJ1EabrgYLOATsIAigxA0rLR9x/YlrSA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - '@types/graceful-fs': 4.1.6 - '@types/node': 18.15.11 - anymatch: 3.1.3 - fb-watchman: 2.0.2 - graceful-fs: 4.2.11 - jest-regex-util: 29.4.3 - jest-util: 29.5.0 - jest-worker: 29.5.0 - micromatch: 4.0.5 - walker: 1.0.8 - optionalDependencies: - fsevents: 2.3.2 - dev: true - - /jest-leak-detector@29.5.0: - resolution: {integrity: sha512-u9YdeeVnghBUtpN5mVxjID7KbkKE1QU4f6uUwuxiY0vYRi9BUCLKlPEZfDGR67ofdFmDz9oPAy2G92Ujrntmow==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-get-type: 29.4.3 - pretty-format: 29.5.0 - dev: true - - /jest-matcher-utils@29.5.0: - resolution: {integrity: sha512-lecRtgm/rjIK0CQ7LPQwzCs2VwW6WAahA55YBuI+xqmhm7LAaxokSB8C97yJeYyT+HvQkH741StzpU41wohhWw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - jest-diff: 29.5.0 - jest-get-type: 29.4.3 - pretty-format: 29.5.0 - dev: true - - /jest-message-util@29.5.0: - resolution: {integrity: sha512-Kijeg9Dag6CKtIDA7O21zNTACqD5MD/8HfIV8pdD94vFyFuer52SigdC3IQMhab3vACxXMiFk+yMHNdbqtyTGA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/code-frame': 7.21.4 - '@jest/types': 29.5.0 - '@types/stack-utils': 2.0.1 - chalk: 4.1.2 - graceful-fs: 4.2.11 - micromatch: 4.0.5 - pretty-format: 29.5.0 - slash: 3.0.0 - stack-utils: 2.0.6 - dev: true - - /jest-mock@29.5.0: - resolution: {integrity: sha512-GqOzvdWDE4fAV2bWQLQCkujxYWL7RxjCnj71b5VhDAGOevB3qj3Ovg26A5NI84ZpODxyzaozXLOh2NCgkbvyaw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - jest-util: 29.5.0 - dev: true - - /jest-pnp-resolver@1.2.3(jest-resolve@29.5.0): - resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} - engines: {node: '>=6'} - peerDependencies: - jest-resolve: '*' - peerDependenciesMeta: - jest-resolve: - optional: true - dependencies: - jest-resolve: 29.5.0 - dev: true - - /jest-regex-util@29.4.3: - resolution: {integrity: sha512-O4FglZaMmWXbGHSQInfXewIsd1LMn9p3ZXB/6r4FOkyhX2/iP/soMG98jGvk/A3HAN78+5VWcBGO0BJAPRh4kg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dev: true - - /jest-resolve-dependencies@29.5.0: - resolution: {integrity: sha512-sjV3GFr0hDJMBpYeUuGduP+YeCRbd7S/ck6IvL3kQ9cpySYKqcqhdLLC2rFwrcL7tz5vYibomBrsFYWkIGGjOg==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - jest-regex-util: 29.4.3 - jest-snapshot: 29.5.0 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-resolve@29.5.0: - resolution: {integrity: sha512-1TzxJ37FQq7J10jPtQjcc+MkCkE3GBpBecsSUWJ0qZNJpmg6m0D9/7II03yJulm3H/fvVjgqLh/k2eYg+ui52w==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - chalk: 4.1.2 - graceful-fs: 4.2.11 - jest-haste-map: 29.5.0 - jest-pnp-resolver: 1.2.3(jest-resolve@29.5.0) - jest-util: 29.5.0 - jest-validate: 29.5.0 - resolve: 1.22.1 - resolve.exports: 2.0.2 - slash: 3.0.0 - dev: true - - /jest-runner@29.5.0: - resolution: {integrity: sha512-m7b6ypERhFghJsslMLhydaXBiLf7+jXy8FwGRHO3BGV1mcQpPbwiqiKUR2zU2NJuNeMenJmlFZCsIqzJCTeGLQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/console': 29.5.0 - '@jest/environment': 29.5.0 - '@jest/test-result': 29.5.0 - '@jest/transform': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - chalk: 4.1.2 - emittery: 0.13.1 - graceful-fs: 4.2.11 - jest-docblock: 29.4.3 - jest-environment-node: 29.5.0 - jest-haste-map: 29.5.0 - jest-leak-detector: 29.5.0 - jest-message-util: 29.5.0 - jest-resolve: 29.5.0 - jest-runtime: 29.5.0 - jest-util: 29.5.0 - jest-watcher: 29.5.0 - jest-worker: 29.5.0 - p-limit: 3.1.0 - source-map-support: 0.5.13 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-runtime@29.5.0: - resolution: {integrity: sha512-1Hr6Hh7bAgXQP+pln3homOiEZtCDZFqwmle7Ew2j8OlbkIu6uE3Y/etJQG8MLQs3Zy90xrp2C0BRrtPHG4zryw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/environment': 29.5.0 - '@jest/fake-timers': 29.5.0 - '@jest/globals': 29.5.0 - '@jest/source-map': 29.4.3 - '@jest/test-result': 29.5.0 - '@jest/transform': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - chalk: 4.1.2 - cjs-module-lexer: 1.2.2 - collect-v8-coverage: 1.0.1 - glob: 7.2.3 - graceful-fs: 4.2.11 - jest-haste-map: 29.5.0 - jest-message-util: 29.5.0 - jest-mock: 29.5.0 - jest-regex-util: 29.4.3 - jest-resolve: 29.5.0 - jest-snapshot: 29.5.0 - jest-util: 29.5.0 - slash: 3.0.0 - strip-bom: 4.0.0 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-snapshot@29.5.0: - resolution: {integrity: sha512-x7Wolra5V0tt3wRs3/ts3S6ciSQVypgGQlJpz2rsdQYoUKxMxPNaoHMGJN6qAuPJqS+2iQ1ZUn5kl7HCyls84g==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@babel/core': 7.21.4 - '@babel/generator': 7.21.4 - '@babel/plugin-syntax-jsx': 7.21.4(@babel/core@7.21.4) - '@babel/plugin-syntax-typescript': 7.21.4(@babel/core@7.21.4) - '@babel/traverse': 7.21.4 - '@babel/types': 7.21.4 - '@jest/expect-utils': 29.5.0 - '@jest/transform': 29.5.0 - '@jest/types': 29.5.0 - '@types/babel__traverse': 7.18.3 - '@types/prettier': 2.7.2 - babel-preset-current-node-syntax: 1.0.1(@babel/core@7.21.4) - chalk: 4.1.2 - expect: 29.5.0 - graceful-fs: 4.2.11 - jest-diff: 29.5.0 - jest-get-type: 29.4.3 - jest-matcher-utils: 29.5.0 - jest-message-util: 29.5.0 - jest-util: 29.5.0 - natural-compare: 1.4.0 - pretty-format: 29.5.0 - semver: 7.3.8 - transitivePeerDependencies: - - supports-color - dev: true - - /jest-util@29.5.0: - resolution: {integrity: sha512-RYMgG/MTadOr5t8KdhejfvUU82MxsCu5MF6KuDUHl+NuwzUt+Sm6jJWxTJVrDR1j5M/gJVCPKQEpWXY+yIQ6lQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - chalk: 4.1.2 - ci-info: 3.8.0 - graceful-fs: 4.2.11 - picomatch: 2.3.1 - dev: true - - /jest-validate@29.5.0: - resolution: {integrity: sha512-pC26etNIi+y3HV8A+tUGr/lph9B18GnzSRAkPaaZJIE1eFdiYm6/CewuiJQ8/RlfHd1u/8Ioi8/sJ+CmbA+zAQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/types': 29.5.0 - camelcase: 6.3.0 - chalk: 4.1.2 - jest-get-type: 29.4.3 - leven: 3.1.0 - pretty-format: 29.5.0 - dev: true - - /jest-watcher@29.5.0: - resolution: {integrity: sha512-KmTojKcapuqYrKDpRwfqcQ3zjMlwu27SYext9pt4GlF5FUgB+7XE1mcCnSm6a4uUpFyQIkb6ZhzZvHl+jiBCiA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/test-result': 29.5.0 - '@jest/types': 29.5.0 - '@types/node': 18.15.11 - ansi-escapes: 4.3.2 - chalk: 4.1.2 - emittery: 0.13.1 - jest-util: 29.5.0 - string-length: 4.0.2 - dev: true - - /jest-worker@29.5.0: - resolution: {integrity: sha512-NcrQnevGoSp4b5kg+akIpthoAFHxPBcb5P6mYPY0fUNT+sSvmtu6jlkEle3anczUKIKEbMxFimk9oTP/tpIPgA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@types/node': 18.15.11 - jest-util: 29.5.0 - merge-stream: 2.0.0 - supports-color: 8.1.1 - dev: true - - /jest@29.5.0(@types/node@18.15.11)(ts-node@10.9.1): - resolution: {integrity: sha512-juMg3he2uru1QoXX078zTa7pO85QyB9xajZc6bU+d9yEGwrKX6+vGmJQ3UdVZsvTEUARIdObzH68QItim6OSSQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 - peerDependenciesMeta: - node-notifier: - optional: true - dependencies: - '@jest/core': 29.5.0(ts-node@10.9.1) - '@jest/types': 29.5.0 - import-local: 3.1.0 - jest-cli: 29.5.0(@types/node@18.15.11)(ts-node@10.9.1) - transitivePeerDependencies: - - '@types/node' - - supports-color - - ts-node - dev: true - - /joycon@3.1.1: - resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} - engines: {node: '>=10'} - dev: true - - /js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - dev: true - - /js-yaml@3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} - hasBin: true - dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true - - /jsesc@2.5.2: - resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} - engines: {node: '>=4'} - hasBin: true - dev: true - - /json-buffer@3.0.1: - resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} - dev: true - - /json-parse-even-better-errors@2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - dev: true - - /json-schema-traverse@1.0.0: - resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} - dev: false - - /json5@2.2.3: - resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} - engines: {node: '>=6'} - hasBin: true - dev: true - - /jsonwebtoken@9.0.0: - resolution: {integrity: sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==} - engines: {node: '>=12', npm: '>=6'} - dependencies: - jws: 3.2.2 - lodash: 4.17.21 - ms: 2.1.2 - semver: 7.3.8 - dev: false - - /jwa@1.4.1: - resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} - dependencies: - buffer-equal-constant-time: 1.0.1 - ecdsa-sig-formatter: 1.0.11 - safe-buffer: 5.2.1 - dev: false - - /jws@3.2.2: - resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} - dependencies: - jwa: 1.4.1 - safe-buffer: 5.2.1 - dev: false - - /keygrip@1.1.0: - resolution: {integrity: sha512-iYSchDJ+liQ8iwbSI2QqsQOvqv58eJCEanyJPJi+Khyu8smkcKSFUCbPwzFcL7YVtZ6eONjqRX/38caJ7QjRAQ==} - engines: {node: '>= 0.6'} - dependencies: - tsscmp: 1.0.6 - dev: false - - /keyv@4.5.2: - resolution: {integrity: sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==} - dependencies: - json-buffer: 3.0.1 - dev: true - - /kleur@3.0.3: - resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} - engines: {node: '>=6'} - dev: true - - /koa-bodyparser@4.4.0: - resolution: {integrity: sha512-AXPY7wwKZUmbgb8VkTEUFoRNOlx6aWRJwEnQD+zfNf33/7KSAkN4Oo9BqlIk80D+5TvuqlhpQT5dPVcyxl5Zsw==} - engines: {node: '>=8.0.0'} - dependencies: - co-body: 6.1.0 - copy-to: 2.0.1 - dev: false - - /koa-compose@4.1.0: - resolution: {integrity: sha512-8ODW8TrDuMYvXRwra/Kh7/rJo9BtOfPc6qO8eAfC80CnCvSjSl0bkRM24X6/XBBEyj0v1nRUQ1LyOy3dbqOWXw==} - dev: false - - /koa-convert@2.0.0: - resolution: {integrity: sha512-asOvN6bFlSnxewce2e/DK3p4tltyfC4VM7ZwuTuepI7dEQVcvpyFuBcEARu1+Hxg8DIwytce2n7jrZtRlPrARA==} - engines: {node: '>= 10'} - dependencies: - co: 4.6.0 - koa-compose: 4.1.0 - dev: false - - /koa-jwt@4.0.4: - resolution: {integrity: sha512-Tid9BQfpVtUG/8YZV38a+hDKll0pfVhfl7A/2cNaYThS1cxMFXylZzfARqHQqvNhHy9qM+qkxd4/z6EaIV4SAQ==} - engines: {node: '>= 8'} - dependencies: - jsonwebtoken: 9.0.0 - koa-unless: 1.0.7 - p-any: 2.1.0 - dev: false - - /koa-pino-logger@4.0.0: - resolution: {integrity: sha512-YI/LB9ajyLPpjvf6e+7Ewmn+OQkCJpu/Y9eI1n7fnipu5Y1NchuNlke0mqh3/2z+5oDYr7pijjOWruEDIfua2A==} - dependencies: - pino-http: 6.6.0 - dev: false - - /koa-router@12.0.0: - resolution: {integrity: sha512-zGrdiXygGYW8WvrzeGsHZvKnHs4DzyGoqJ9a8iHlRkiwuEAOAPyI27//OlhoWdgFAEIM3qbUgr0KCuRaP/TCag==} - engines: {node: '>= 12'} - dependencies: - http-errors: 2.0.0 - koa-compose: 4.1.0 - methods: 1.1.2 - path-to-regexp: 6.2.1 - dev: false - - /koa-unless@1.0.7: - resolution: {integrity: sha512-NKiz+nk4KxSJFskiJMuJvxeA41Lcnx3d8Zy+8QETgifm4ab4aOeGD3RgR6bIz0FGNWwo3Fz0DtnK77mEIqHWxA==} - dev: false - - /koa@2.14.1: - resolution: {integrity: sha512-USJFyZgi2l0wDgqkfD27gL4YGno7TfUkcmOe6UOLFOVuN+J7FwnNu4Dydl4CUQzraM1lBAiGed0M9OVJoT0Kqw==} - engines: {node: ^4.8.4 || ^6.10.1 || ^7.10.1 || >= 8.1.4} - dependencies: - accepts: 1.3.8 - cache-content-type: 1.0.1 - content-disposition: 0.5.4 - content-type: 1.0.5 - cookies: 0.8.0 - debug: 4.3.4 - delegates: 1.0.0 - depd: 2.0.0 - destroy: 1.2.0 - encodeurl: 1.0.2 - escape-html: 1.0.3 - fresh: 0.5.2 - http-assert: 1.5.0 - http-errors: 1.8.1 - is-generator-function: 1.0.10 - koa-compose: 4.1.0 - koa-convert: 2.0.0 - on-finished: 2.4.1 - only: 0.0.2 - parseurl: 1.3.3 - statuses: 1.5.0 - type-is: 1.6.18 - vary: 1.1.2 - transitivePeerDependencies: - - supports-color - dev: false - - /leven@3.1.0: - resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} - engines: {node: '>=6'} - dev: true - - /lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - dev: true - - /locate-path@5.0.0: - resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} - engines: {node: '>=8'} - dependencies: - p-locate: 4.1.0 - dev: true - - /lodash.camelcase@4.3.0: - resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - dev: false - - /lodash.memoize@4.1.2: - resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} - dev: true - - /lodash.merge@4.6.2: - resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - dev: false - - /lodash@4.17.21: - resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} - dev: false - - /long@2.4.0: - resolution: {integrity: sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ==} - engines: {node: '>=0.6'} - dev: false - - /long@4.0.0: - resolution: {integrity: sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==} - dev: false - - /long@5.2.1: - resolution: {integrity: sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A==} - dev: false - - /lowercase-keys@2.0.0: - resolution: {integrity: sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==} - engines: {node: '>=8'} - dev: true - - /lru-cache@4.1.5: - resolution: {integrity: sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==} - dependencies: - pseudomap: 1.0.2 - yallist: 2.1.2 - dev: true - - /lru-cache@5.1.1: - resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} - dependencies: - yallist: 3.1.1 - dev: true - - /lru-cache@6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} - dependencies: - yallist: 4.0.0 - - /make-dir@3.1.0: - resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} - engines: {node: '>=8'} - dependencies: - semver: 6.3.0 - dev: true - - /make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - dev: true - - /makeerror@1.0.12: - resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} - dependencies: - tmpl: 1.0.5 - dev: true - - /media-typer@0.3.0: - resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} - engines: {node: '>= 0.6'} - dev: false - - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true - - /merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: true - - /methods@1.1.2: - resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} - engines: {node: '>= 0.6'} - dev: false - - /micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true - - /mime-db@1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - - /mime-types@2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - dev: false - - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true - - /mimic-response@1.0.1: - resolution: {integrity: sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==} - engines: {node: '>=4'} - dev: true - - /mimic-response@3.1.0: - resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} - engines: {node: '>=10'} - dev: true - - /minimatch@3.1.2: - resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - dependencies: - brace-expansion: 1.1.11 - dev: true - - /minimatch@5.1.6: - resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} - engines: {node: '>=10'} - dependencies: - brace-expansion: 2.0.1 - dev: true - - /minimist@1.2.8: - resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - - /module-details-from-path@1.0.3: - resolution: {integrity: sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A==} - dev: false - - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - - /natural-compare@1.4.0: - resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - dev: true - - /negotiator@0.6.3: - resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} - engines: {node: '>= 0.6'} - dev: false - - /node-int64@0.4.0: - resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} - - /node-releases@2.0.10: - resolution: {integrity: sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==} - dev: true - - /nodemon@2.0.22: - resolution: {integrity: sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==} - engines: {node: '>=8.10.0'} - hasBin: true - dependencies: - chokidar: 3.5.3 - debug: 3.2.7(supports-color@5.5.0) - ignore-by-default: 1.0.1 - minimatch: 3.1.2 - pstree.remy: 1.1.8 - semver: 5.7.1 - simple-update-notifier: 1.1.0 - supports-color: 5.5.0 - touch: 3.1.0 - undefsafe: 2.0.5 - dev: true - - /nopt@1.0.10: - resolution: {integrity: sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==} - hasBin: true - dependencies: - abbrev: 1.1.1 - dev: true - - /normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - dev: true - - /normalize-url@6.1.0: - resolution: {integrity: sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==} - engines: {node: '>=10'} - dev: true - - /npm-run-path@2.0.2: - resolution: {integrity: sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==} - engines: {node: '>=4'} - dependencies: - path-key: 2.0.1 - dev: true - - /npm-run-path@4.0.1: - resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} - engines: {node: '>=8'} - dependencies: - path-key: 3.1.1 - dev: true - - /object-inspect@1.12.3: - resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} - dev: false - - /on-exit-leak-free@0.2.0: - resolution: {integrity: sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg==} - dev: false - - /on-exit-leak-free@2.1.0: - resolution: {integrity: sha512-VuCaZZAjReZ3vUwgOB8LxAosIurDiAW0s13rI1YwmaP++jvcxP77AWoQvenZebpCA2m8WC1/EosPYPMjnRAp/w==} - dev: true - - /on-finished@2.4.1: - resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} - engines: {node: '>= 0.8'} - dependencies: - ee-first: 1.1.1 - dev: false - - /once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - dependencies: - wrappy: 1.0.2 - - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true - - /only@0.0.2: - resolution: {integrity: sha512-Fvw+Jemq5fjjyWz6CpKx6w9s7xxqo3+JCyM0WXWeCSOboZ8ABkyvP8ID4CZuChA/wxSx+XSJmdOm8rGVyJ1hdQ==} - dev: false - - /opentracing@0.14.7: - resolution: {integrity: sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==} - engines: {node: '>=0.10'} - dev: false - - /os-filter-obj@2.0.0: - resolution: {integrity: sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==} - engines: {node: '>=4'} - dependencies: - arch: 2.2.0 - dev: true - - /p-any@2.1.0: - resolution: {integrity: sha512-JAERcaMBLYKMq+voYw36+x5Dgh47+/o7yuv2oQYuSSUml4YeqJEFznBrY2UeEkoSHqBua6hz518n/PsowTYLLg==} - engines: {node: '>=8'} - dependencies: - p-cancelable: 2.1.1 - p-some: 4.1.0 - type-fest: 0.3.1 - dev: false - - /p-cancelable@2.1.1: - resolution: {integrity: sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==} - engines: {node: '>=8'} - - /p-finally@1.0.0: - resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} - engines: {node: '>=4'} - dev: true - - /p-limit@2.3.0: - resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} - engines: {node: '>=6'} - dependencies: - p-try: 2.2.0 - dev: true - - /p-limit@3.1.0: - resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} - engines: {node: '>=10'} - dependencies: - yocto-queue: 0.1.0 - dev: true - - /p-locate@4.1.0: - resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} - engines: {node: '>=8'} - dependencies: - p-limit: 2.3.0 - dev: true - - /p-some@4.1.0: - resolution: {integrity: sha512-MF/HIbq6GeBqTrTIl5OJubzkGU+qfFhAFi0gnTAK6rgEIJIknEiABHOTtQu4e6JiXjIwuMPMUFQzyHh5QjCl1g==} - engines: {node: '>=8'} - dependencies: - aggregate-error: 3.1.0 - p-cancelable: 2.1.1 - dev: false - - /p-try@2.2.0: - resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} - engines: {node: '>=6'} - dev: true - - /packet-reader@1.0.0: - resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} - dev: false - - /parse-json@5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - dependencies: - '@babel/code-frame': 7.21.4 - error-ex: 1.3.2 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - dev: true - - /parseurl@1.3.3: - resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} - engines: {node: '>= 0.8'} - dev: false - - /path-exists@4.0.0: - resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} - engines: {node: '>=8'} - dev: true - - /path-is-absolute@1.0.1: - resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} - engines: {node: '>=0.10.0'} - dev: true - - /path-key@2.0.1: - resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==} - engines: {node: '>=4'} - dev: true - - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true - - /path-parse@1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - - /path-to-regexp@6.2.1: - resolution: {integrity: sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==} - dev: false - - /peek-readable@5.0.0: - resolution: {integrity: sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==} - engines: {node: '>=14.16'} - dev: true - - /pg-connection-string@2.5.0: - resolution: {integrity: sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==} - dev: false - - /pg-int8@1.0.1: - resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} - engines: {node: '>=4.0.0'} - - /pg-pool@3.6.0(pg@8.10.0): - resolution: {integrity: sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ==} - peerDependencies: - pg: '>=8.0' - dependencies: - pg: 8.10.0 - dev: false - - /pg-protocol@1.6.0: - resolution: {integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==} - - /pg-types@2.2.0: - resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} - engines: {node: '>=4'} - dependencies: - pg-int8: 1.0.1 - postgres-array: 2.0.0 - postgres-bytea: 1.0.0 - postgres-date: 1.0.7 - postgres-interval: 1.2.0 - - /pg@8.10.0: - resolution: {integrity: sha512-ke7o7qSTMb47iwzOSaZMfeR7xToFdkE71ifIipOAAaLIM0DYzfOAXlgFFmYUIE2BcJtvnVlGCID84ZzCegE8CQ==} - engines: {node: '>= 8.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - dependencies: - buffer-writer: 2.0.0 - packet-reader: 1.0.0 - pg-connection-string: 2.5.0 - pg-pool: 3.6.0(pg@8.10.0) - pg-protocol: 1.6.0 - pg-types: 2.2.0 - pgpass: 1.0.5 - dev: false - - /pgpass@1.0.5: - resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} - dependencies: - split2: 4.2.0 - dev: false - - /picocolors@1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true - - /picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - dev: true - - /pify@2.3.0: - resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} - engines: {node: '>=0.10.0'} - dev: true - - /pino-abstract-transport@0.5.0: - resolution: {integrity: sha512-+KAgmVeqXYbTtU2FScx1XS3kNyfZ5TrXY07V96QnUSFqo2gAqlvmaxH67Lj7SWazqsMabf+58ctdTcBgnOLUOQ==} - dependencies: - duplexify: 4.1.2 - split2: 4.2.0 - dev: false - - /pino-abstract-transport@1.0.0: - resolution: {integrity: sha512-c7vo5OpW4wIS42hUVcT5REsL8ZljsUfBjqV/e2sFxmFEFZiq1XLUp5EYLtuDH6PEHq9W1egWqRbnLUP5FuZmOA==} - dependencies: - readable-stream: 4.3.0 - split2: 4.2.0 - dev: true - - /pino-http@6.6.0: - resolution: {integrity: sha512-PlItaK2MLpoIMLEcClhfb1VQk/o6fKppINl5s6sPE/4rvufkdO3kCSs/92EwrBsB1yssRCQqDV+w1xpYuPVnjg==} - dependencies: - fast-url-parser: 1.1.3 - get-caller-file: 2.0.5 - pino: 7.11.0 - pino-std-serializers: 5.6.0 - dev: false - - /pino-pretty@10.0.0: - resolution: {integrity: sha512-zKFjYXBzLaLTEAN1ayKpHXtL5UeRQC7R3lvhKe7fWs7hIVEjKGG/qIXwQt9HmeUp71ogUd/YcW+LmMwRp4KT6Q==} - hasBin: true - dependencies: - colorette: 2.0.19 - dateformat: 4.6.3 - fast-copy: 3.0.1 - fast-safe-stringify: 2.1.1 - help-me: 4.2.0 - joycon: 3.1.1 - minimist: 1.2.8 - on-exit-leak-free: 2.1.0 - pino-abstract-transport: 1.0.0 - pump: 3.0.0 - readable-stream: 4.3.0 - secure-json-parse: 2.7.0 - sonic-boom: 3.3.0 - strip-json-comments: 3.1.1 - dev: true - - /pino-std-serializers@4.0.0: - resolution: {integrity: sha512-cK0pekc1Kjy5w9V2/n+8MkZwusa6EyyxfeQCB799CQRhRt/CqYKiWs5adeu8Shve2ZNffvfC/7J64A2PJo1W/Q==} - dev: false - - /pino-std-serializers@5.6.0: - resolution: {integrity: sha512-VdUXCw8gO+xhir7sFuoYSjTnzB+TMDGxhAC/ph3YS3sdHnXNdsK0wMtADNUltfeGkn2KDxEM21fnjF3RwXyC8A==} - - /pino@7.11.0: - resolution: {integrity: sha512-dMACeu63HtRLmCG8VKdy4cShCPKaYDR4youZqoSWLxl5Gu99HUw8bw75thbPv9Nip+H+QYX8o3ZJbTdVZZ2TVg==} - hasBin: true - dependencies: - atomic-sleep: 1.0.0 - fast-redact: 3.1.2 - on-exit-leak-free: 0.2.0 - pino-abstract-transport: 0.5.0 - pino-std-serializers: 4.0.0 - process-warning: 1.0.0 - quick-format-unescaped: 4.0.4 - real-require: 0.1.0 - safe-stable-stringify: 2.4.3 - sonic-boom: 2.8.0 - thread-stream: 0.15.2 - dev: false - - /pirates@4.0.5: - resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} - engines: {node: '>= 6'} - dev: true - - /pkg-dir@4.2.0: - resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} - engines: {node: '>=8'} - dependencies: - find-up: 4.1.0 - dev: true - - /postgres-array@2.0.0: - resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} - engines: {node: '>=4'} - - /postgres-bytea@1.0.0: - resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} - engines: {node: '>=0.10.0'} - - /postgres-date@1.0.7: - resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} - engines: {node: '>=0.10.0'} - - /postgres-interval@1.2.0: - resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} - engines: {node: '>=0.10.0'} - dependencies: - xtend: 4.0.2 - - /pretty-format@29.5.0: - resolution: {integrity: sha512-V2mGkI31qdttvTFX7Mt4efOqHXqJWMu4/r66Xh3Z3BwZaPfPJgp6/gbwoujRpPUtfEF6AUUWx3Jim3GCw5g/Qw==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - dependencies: - '@jest/schemas': 29.4.3 - ansi-styles: 5.2.0 - react-is: 18.2.0 - dev: true - - /process-warning@1.0.0: - resolution: {integrity: sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q==} - dev: false - - /process@0.10.1: - resolution: {integrity: sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA==} - engines: {node: '>= 0.6.0'} - dev: false - - /process@0.11.10: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} - engines: {node: '>= 0.6.0'} - dev: true - - /prompts@2.4.2: - resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} - engines: {node: '>= 6'} - dependencies: - kleur: 3.0.3 - sisteransi: 1.0.5 - dev: true - - /protobufjs@7.2.3: - resolution: {integrity: sha512-TtpvOqwB5Gdz/PQmOjgsrGH1nHjAQVCN7JG4A6r1sXRWESL5rNMAiRcBQlCAdKxZcAbstExQePYG8xof/JVRgg==} - engines: {node: '>=12.0.0'} - requiresBuild: true - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 - '@protobufjs/eventemitter': 1.1.0 - '@protobufjs/fetch': 1.1.0 - '@protobufjs/float': 1.0.2 - '@protobufjs/inquire': 1.1.0 - '@protobufjs/path': 1.1.2 - '@protobufjs/pool': 1.1.0 - '@protobufjs/utf8': 1.1.0 - '@types/node': 18.15.11 - long: 5.2.1 - dev: false - - /pseudomap@1.0.2: - resolution: {integrity: sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==} - dev: true - - /pstree.remy@1.1.8: - resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==} - dev: true - - /pump@3.0.0: - resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 - dev: true - - /punycode@1.4.1: - resolution: {integrity: sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==} - dev: false - - /punycode@2.3.0: - resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} - engines: {node: '>=6'} - dev: false - - /pure-rand@6.0.1: - resolution: {integrity: sha512-t+x1zEHDjBwkDGY5v5ApnZ/utcd4XYDiJsaQQoptTXgUXX95sDg1elCdJghzicm7n2mbCBJ3uYWr6M22SO19rg==} - dev: true - - /qs@6.11.1: - resolution: {integrity: sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==} - engines: {node: '>=0.6'} - dependencies: - side-channel: 1.0.4 - dev: false - - /queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true - - /quick-format-unescaped@4.0.4: - resolution: {integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==} - dev: false - - /quick-lru@5.1.1: - resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} - engines: {node: '>=10'} - dev: true - - /raw-body@2.5.2: - resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} - engines: {node: '>= 0.8'} - dependencies: - bytes: 3.1.2 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - dev: false - - /react-is@18.2.0: - resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} - dev: true - - /readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - - /readable-stream@4.3.0: - resolution: {integrity: sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - abort-controller: 3.0.0 - buffer: 6.0.3 - events: 3.3.0 - process: 0.11.10 - dev: true - - /readable-web-to-node-stream@3.0.2: - resolution: {integrity: sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==} - engines: {node: '>=8'} - dependencies: - readable-stream: 3.6.2 - dev: true - - /readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} - dependencies: - picomatch: 2.3.1 - dev: true - - /real-require@0.1.0: - resolution: {integrity: sha512-r/H9MzAWtrv8aSVjPCMFpDMl5q66GqtmmRkRjpHTsp4zBAa+snZyiQNlMONiUmEJcsnaw0wCauJ2GWODr/aFkg==} - engines: {node: '>= 12.13.0'} - dev: false - - /require-directory@2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} - - /require-from-string@2.0.2: - resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} - engines: {node: '>=0.10.0'} - dev: false - - /require-in-the-middle@5.2.0: - resolution: {integrity: sha512-efCx3b+0Z69/LGJmm9Yvi4cqEdxnoGnxYxGxBghkkTTFeXRtTCmmhO0AnAfHz59k957uTSuy8WaHqOs8wbYUWg==} - engines: {node: '>=6'} - dependencies: - debug: 4.3.4 - module-details-from-path: 1.0.3 - resolve: 1.22.1 - transitivePeerDependencies: - - supports-color - dev: false - - /require-in-the-middle@6.0.0: - resolution: {integrity: sha512-+dtWQ7l2lqQDxheaG3jjyN1QI37gEwvzACSgjYi4/C2y+ZTUMeRW8BIOm+9NBKvwaMBUSZfPXVOt1skB0vBkRw==} - engines: {node: '>=8.6.0'} - dependencies: - debug: 4.3.4 - module-details-from-path: 1.0.3 - resolve: 1.22.1 - transitivePeerDependencies: - - supports-color - dev: false - - /resolve-alpn@1.2.1: - resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} - dev: true - - /resolve-cwd@3.0.0: - resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} - engines: {node: '>=8'} - dependencies: - resolve-from: 5.0.0 - dev: true - - /resolve-from@5.0.0: - resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} - engines: {node: '>=8'} - dev: true - - /resolve.exports@2.0.2: - resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} - engines: {node: '>=10'} - dev: true - - /resolve@1.22.1: - resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} - hasBin: true - dependencies: - is-core-module: 2.11.0 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - - /responselike@2.0.1: - resolution: {integrity: sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==} - dependencies: - lowercase-keys: 2.0.0 - dev: true - - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true - - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 - dev: true - - /safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - - /safe-stable-stringify@2.4.3: - resolution: {integrity: sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==} - engines: {node: '>=10'} - dev: false - - /safer-buffer@2.1.2: - resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - dev: false - - /secure-json-parse@2.7.0: - resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} - dev: true - - /semver-regex@4.0.5: - resolution: {integrity: sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==} - engines: {node: '>=12'} - dev: true - - /semver-truncate@2.0.0: - resolution: {integrity: sha512-Rh266MLDYNeML5h90ttdMwfXe1+Nc4LAWd9X1KdJe8pPHP4kFmvLZALtsMNHNdvTyQygbEC0D59sIz47DIaq8w==} - engines: {node: '>=8'} - dependencies: - semver: 6.3.0 - dev: true - - /semver@5.7.1: - resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} - hasBin: true - dev: true - - /semver@6.3.0: - resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} - hasBin: true - dev: true - - /semver@7.0.0: - resolution: {integrity: sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==} - hasBin: true - dev: true - - /semver@7.3.8: - resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - - /setprototypeof@1.2.0: - resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} - dev: false - - /shebang-command@1.2.0: - resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} - engines: {node: '>=0.10.0'} - dependencies: - shebang-regex: 1.0.0 - dev: true - - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} - dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex@1.0.0: - resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==} - engines: {node: '>=0.10.0'} - dev: true - - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true - - /shimmer@1.2.1: - resolution: {integrity: sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==} - dev: false - - /side-channel@1.0.4: - resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} - dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.2.0 - object-inspect: 1.12.3 - dev: false - - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true - - /simple-update-notifier@1.1.0: - resolution: {integrity: sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==} - engines: {node: '>=8.10.0'} - dependencies: - semver: 7.0.0 - dev: true - - /sisteransi@1.0.5: - resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} - dev: true - - /slash@3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} - dev: true - - /sonic-boom@2.8.0: - resolution: {integrity: sha512-kuonw1YOYYNOve5iHdSahXPOK49GqwA+LZhI6Wz/l0rP57iKyXXIHaRagOBHAPmGwJC6od2Z9zgvZ5loSgMlVg==} - dependencies: - atomic-sleep: 1.0.0 - - /sonic-boom@3.3.0: - resolution: {integrity: sha512-LYxp34KlZ1a2Jb8ZQgFCK3niIHzibdwtwNUWKg0qQRzsDoJ3Gfgkf8KdBTFU3SkejDEIlWwnSnpVdOZIhFMl/g==} - dependencies: - atomic-sleep: 1.0.0 - dev: true - - /sort-keys-length@1.0.1: - resolution: {integrity: sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==} - engines: {node: '>=0.10.0'} - dependencies: - sort-keys: 1.1.2 - dev: true - - /sort-keys@1.1.2: - resolution: {integrity: sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==} - engines: {node: '>=0.10.0'} - dependencies: - is-plain-obj: 1.1.0 - dev: true - - /source-map-support@0.5.13: - resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map-support@0.5.21: - resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - dev: true - - /source-map@0.6.1: - resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} - engines: {node: '>=0.10.0'} - dev: true - - /source-map@0.7.4: - resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} - engines: {node: '>= 8'} - dev: true - - /split2@4.2.0: - resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} - engines: {node: '>= 10.x'} - - /sprintf-js@1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true - - /stack-utils@2.0.6: - resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} - engines: {node: '>=10'} - dependencies: - escape-string-regexp: 2.0.0 - dev: true - - /statuses@1.5.0: - resolution: {integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==} - engines: {node: '>= 0.6'} - dev: false - - /statuses@2.0.1: - resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} - engines: {node: '>= 0.8'} - dev: false - - /stream-shift@1.0.1: - resolution: {integrity: sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==} - dev: false - - /string-length@4.0.2: - resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} - engines: {node: '>=10'} - dependencies: - char-regex: 1.0.2 - strip-ansi: 6.0.1 - dev: true - - /string-template@0.2.1: - resolution: {integrity: sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw==} - dev: false - - /string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - /string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - dependencies: - safe-buffer: 5.2.1 - - /strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - dependencies: - ansi-regex: 5.0.1 - - /strip-bom@4.0.0: - resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} - engines: {node: '>=8'} - dev: true - - /strip-eof@1.0.0: - resolution: {integrity: sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==} - engines: {node: '>=0.10.0'} - dev: true - - /strip-final-newline@2.0.0: - resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} - engines: {node: '>=6'} - dev: true - - /strip-json-comments@3.1.1: - resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} - engines: {node: '>=8'} - dev: true - - /strip-outer@2.0.0: - resolution: {integrity: sha512-A21Xsm1XzUkK0qK1ZrytDUvqsQWict2Cykhvi0fBQntGG5JSprESasEyV1EZ/4CiR5WB5KjzLTrP/bO37B0wPg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true - - /strtok3@7.0.0: - resolution: {integrity: sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==} - engines: {node: '>=14.16'} - dependencies: - '@tokenizer/token': 0.3.0 - peek-readable: 5.0.0 - dev: true - - /supports-color@5.5.0: - resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} - engines: {node: '>=4'} - dependencies: - has-flag: 3.0.0 - dev: true - - /supports-color@7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-color@8.1.1: - resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} - engines: {node: '>=10'} - dependencies: - has-flag: 4.0.0 - dev: true - - /supports-preserve-symlinks-flag@1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - - /swc-node@1.0.0(@swc/core@1.3.46)(typescript@5.0.3): - resolution: {integrity: sha512-4+kibROq06E7Yj3kEcCRN1Ki2C/5i+5P1B7An9iS9PO1BQY5VzWFuLhV7y7xNxkZjc8FEaLCh97NGzs/XBfOMQ==} - hasBin: true - dependencies: - '@swc-node/register': 1.6.3(@swc/core@1.3.46)(typescript@5.0.3) - transitivePeerDependencies: - - '@swc/core' - - supports-color - - typescript - dev: true - - /test-exclude@6.0.0: - resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} - engines: {node: '>=8'} - dependencies: - '@istanbuljs/schema': 0.1.3 - glob: 7.2.3 - minimatch: 3.1.2 - dev: true - - /thread-stream@0.15.2: - resolution: {integrity: sha512-UkEhKIg2pD+fjkHQKyJO3yoIvAP3N6RlNFt2dUhcS1FGvCD1cQa1M/PGknCLFIyZdtJOWQjejp7bdNqmN7zwdA==} - dependencies: - real-require: 0.1.0 - dev: false - - /thriftrw@3.12.0: - resolution: {integrity: sha512-4YZvR4DPEI41n4Opwr4jmrLGG4hndxr7387kzRFIIzxHQjarPusH4lGXrugvgb7TtPrfZVTpZCVe44/xUxowEw==} - engines: {node: '>= 0.10.x'} - hasBin: true - dependencies: - bufrw: 1.3.0 - error: 7.0.2 - long: 2.4.0 - dev: false - - /tmpl@1.0.5: - resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} - dev: true - - /to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - dev: true - - /to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} - dependencies: - is-number: 7.0.0 - dev: true - - /toidentifier@1.0.1: - resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} - engines: {node: '>=0.6'} - dev: false - - /token-types@5.0.1: - resolution: {integrity: sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==} - engines: {node: '>=14.16'} - dependencies: - '@tokenizer/token': 0.3.0 - ieee754: 1.2.1 - dev: true - - /touch@3.1.0: - resolution: {integrity: sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==} - hasBin: true - dependencies: - nopt: 1.0.10 - dev: true - - /trim-repeated@2.0.0: - resolution: {integrity: sha512-QUHBFTJGdOwmp0tbOG505xAgOp/YliZP/6UgafFXYZ26WT1bvQmSMJUvkeVSASuJJHbqsFbynTvkd5W8RBTipg==} - engines: {node: '>=12'} - dependencies: - escape-string-regexp: 5.0.0 - dev: true - - /ts-jest@29.1.0(@babel/core@7.21.4)(jest@29.5.0)(typescript@5.0.3): - resolution: {integrity: sha512-ZhNr7Z4PcYa+JjMl62ir+zPiNJfXJN6E8hSLnaUKhOgqcn8vb3e537cpkd0FuAfRK3sR1LSqM1MOhliXNgOFPA==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true - peerDependencies: - '@babel/core': '>=7.0.0-beta.0 <8' - '@jest/types': ^29.0.0 - babel-jest: ^29.0.0 - esbuild: '*' - jest: ^29.0.0 - typescript: '>=4.3 <6' - peerDependenciesMeta: - '@babel/core': - optional: true - '@jest/types': - optional: true - babel-jest: - optional: true - esbuild: - optional: true - dependencies: - '@babel/core': 7.21.4 - bs-logger: 0.2.6 - fast-json-stable-stringify: 2.1.0 - jest: 29.5.0(@types/node@18.15.11)(ts-node@10.9.1) - jest-util: 29.5.0 - json5: 2.2.3 - lodash.memoize: 4.1.2 - make-error: 1.3.6 - semver: 7.3.8 - typescript: 5.0.3 - yargs-parser: 21.1.1 - dev: true - - /ts-node@10.9.1(@swc/core@1.3.46)(@types/node@18.15.11)(typescript@5.0.3): - resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@swc/core': 1.3.46(@swc/helpers@0.5.0) - '@tsconfig/node10': 1.0.9 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.3 - '@types/node': 18.15.11 - acorn: 8.8.2 - acorn-walk: 8.2.0 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.0.3 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - dev: true - - /tslib@2.5.0: - resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} - dev: true - - /tsscmp@1.0.6: - resolution: {integrity: sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA==} - engines: {node: '>=0.6.x'} - dev: false - - /type-detect@4.0.8: - resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} - engines: {node: '>=4'} - dev: true - - /type-fest@0.21.3: - resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} - engines: {node: '>=10'} - dev: true - - /type-fest@0.3.1: - resolution: {integrity: sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==} - engines: {node: '>=6'} - dev: false - - /type-is@1.6.18: - resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} - engines: {node: '>= 0.6'} - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.35 - dev: false - - /typescript@5.0.3: - resolution: {integrity: sha512-xv8mOEDnigb/tN9PSMTwSEqAnUvkoXMQlicOb0IUVDBSQCgBSaAAROUZYy2IcUy5qU6XajK5jjjO7TMWqBTKZA==} - engines: {node: '>=12.20'} - hasBin: true - dev: true - - /undefsafe@2.0.5: - resolution: {integrity: sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==} - dev: true - - /unpipe@1.0.0: - resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} - engines: {node: '>= 0.8'} - dev: false - - /update-browserslist-db@1.0.10(browserslist@4.21.5): - resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - dependencies: - browserslist: 4.21.5 - escalade: 3.1.1 - picocolors: 1.0.0 - dev: true - - /uri-js@4.4.1: - resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - dependencies: - punycode: 2.3.0 - dev: false - - /util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - - /uuid@8.3.2: - resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} - hasBin: true - dev: false - - /v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - dev: true - - /v8-to-istanbul@9.1.0: - resolution: {integrity: sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==} - engines: {node: '>=10.12.0'} - dependencies: - '@jridgewell/trace-mapping': 0.3.17 - '@types/istanbul-lib-coverage': 2.0.4 - convert-source-map: 1.9.0 - dev: true - - /vary@1.1.2: - resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} - engines: {node: '>= 0.8'} - dev: false - - /walker@1.0.8: - resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} - dependencies: - makeerror: 1.0.12 - dev: true - - /which@1.3.1: - resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - isexe: 2.0.0 - dev: true - - /wrap-ansi@7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} - engines: {node: '>=10'} - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - /wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - - /write-file-atomic@4.0.2: - resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - dev: true - - /xorshift@1.2.0: - resolution: {integrity: sha512-iYgNnGyeeJ4t6U11NpA/QiKy+PXn5Aa3Azg5qkwIFz1tBLllQrjjsk9yzD7IAK0naNU4JxdeDgqW9ov4u/hc4g==} - dev: false - - /xtend@4.0.2: - resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} - engines: {node: '>=0.4'} - - /y18n@5.0.8: - resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} - engines: {node: '>=10'} - - /yallist@2.1.2: - resolution: {integrity: sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==} - dev: true - - /yallist@3.1.1: - resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} - dev: true - - /yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - - /yargs-parser@20.2.9: - resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} - engines: {node: '>=10'} - dev: false - - /yargs-parser@21.1.1: - resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} - engines: {node: '>=12'} - dev: true - - /yargs@16.2.0: - resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} - engines: {node: '>=10'} - dependencies: - cliui: 7.0.4 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 20.2.9 - dev: false - - /yargs@17.7.1: - resolution: {integrity: sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==} - engines: {node: '>=12'} - dependencies: - cliui: 8.0.1 - escalade: 3.1.1 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - dev: true - - /ylru@1.3.2: - resolution: {integrity: sha512-RXRJzMiK6U2ye0BlGGZnmpwJDPgakn6aNQ0A7gHRbD4I0uvK4TW6UqkK1V0pp9jskjJBAXd3dRrbzWkqJ+6cxA==} - engines: {node: '>= 4.0.0'} - dev: false - - /yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - dev: true - - /yocto-queue@0.1.0: - resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} - engines: {node: '>=10'} - dev: true diff --git a/cdp/src/destination-types/handlers.ts b/cdp/src/destination-types/handlers.ts deleted file mode 100644 index 57770f16a182e..0000000000000 --- a/cdp/src/destination-types/handlers.ts +++ /dev/null @@ -1,42 +0,0 @@ -// This file is responsible for handling the destination types API. It's a -// simple API that returns a list of all the available destination types. -// -// The destination types are defined in code for now, but it's possible that we -// will want to move them to the database in the future to allow dynamic -// addition of new destination types. - -import Koa from 'koa' - -type DestinationType = { - type: string - name: string - description: string - configSchema: Record // A JSONSchema describing the configuration -} - -const destinationTypes: { [type: string]: DestinationType } = { - webhook: { - type: 'webhook', - name: 'Webhook', - description: 'Send events to a webhook', - configSchema: { - type: 'object', - properties: { - url: { - type: 'string', - description: 'The URL to send the webhook to', - }, - }, - required: ['url'], - }, - }, -} - -export const listDestinationTypes = async (): Promise => { - return Object.values(destinationTypes) -} - -export const listDestinationTypesHandler = async (ctx: Koa.Context): Promise => { - ctx.status = 200 - ctx.body = await listDestinationTypes() -} diff --git a/cdp/src/destinations/handlers.ts b/cdp/src/destinations/handlers.ts deleted file mode 100644 index f967c0d949d17..0000000000000 --- a/cdp/src/destinations/handlers.ts +++ /dev/null @@ -1,245 +0,0 @@ -/* - * - * This file is responsible for handling the destination API. It provides - * handlers for creating, updating, and deleting destinations, as well as - * listing destinations. - * - * Note that we do not delete destinations, but instead mark them as deleted. This - * is to ensure that we can keep a history of destinations that have been used - * in the past. - * - */ - -import { randomUUID } from 'crypto' -import Koa from 'koa' -import pg from 'pg' -import Ajv, { JSONSchemaType } from 'ajv' -import { SQL } from '../sql-template-string' -import { listDestinationTypes } from '../destination-types/handlers' - -type DestinationData = { - name: string // Name displayed to the user - description: string // Description displayed to the user - type: string // Type of destination, e.g. webhook, email, Stripe etc. - config: Record // Configuration for the destination, e.g. webhook URL, email address, Stripe API key etc. -} - -type DestinationCreateRequest = DestinationData - -const ajv = new Ajv() - -const createDestinationRequestSchema: JSONSchemaType = { - type: 'object', - properties: { - name: { - type: 'string', - description: 'Name displayed to the user', - }, - description: { - type: 'string', - description: 'Description displayed to the user', - }, - type: { - type: 'string', - description: 'Type of destination, e.g. webhook, email, Stripe etc.', - }, - config: { - type: 'object', - description: 'Configuration for the destination, e.g. webhook URL, email address, Stripe API key etc.', - }, - }, - required: ['name', 'description', 'type', 'config'], -} - -const createDestinationRequestValidator = ajv.compile(createDestinationRequestSchema) - -export const createDestinationHandler = - (database: pg.Client) => - async (ctx: Koa.Context): Promise => { - const destination = ctx.request.body - // Validate the request body using Ajv - const requestValid = createDestinationRequestValidator(destination) - if (!requestValid) { - ctx.status = 400 - ctx.body = createDestinationRequestValidator.errors - return - } - - // Validate the config against the destination type schema - const config = destination.config - const destinationType = (await listDestinationTypes()).find( - (destinationType) => destinationType.type === destination.type - ) - // If the destination type doesn't exist, return a 400 - if (!destinationType) { - ctx.status = 400 - return - } - - // If the config doesn't match the schema, return a 400. We use AJV to - // perform validation. - const typeValidator = ajv.compile(destinationType.configSchema) - const typeValid = typeValidator(config) - if (!typeValid) { - ctx.status = 400 - ctx.body = typeValidator.errors - return - } - - const id = randomUUID() - const result = await database.query( - SQL` - INSERT INTO destinations ( - id, - team_id, - name, - description, - type, - config, - created_by_id - ) VALUES ( - ${id}, - ${ctx.params.projectId}, - ${destination.name}, - ${destination.description}, - ${destination.type}, - ${destination.config}, - ${ctx.state.jwtData.userId} - ) RETURNING * - ` - ) - - ctx.status = 201 - ctx.body = result.rows[0] - } - -export const getDestinationHandler = - (database: pg.Client) => - async (ctx: Koa.Context): Promise => { - const id = ctx.params.destinationId - // Validate id is a uuid - if (!id.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i)) { - ctx.status = 400 - return - } - - const result = await database.query( - SQL` - SELECT * - FROM destinations - WHERE - id = ${id} AND - team_id = ${ctx.params.projectId} AND - is_deleted = false - ` - ) - - if (result.rowCount === 0) { - ctx.status = 404 - return - } - - ctx.status = 200 - ctx.body = result.rows[0] - } - -type DestinationUpdateRequest = DestinationData - -const updateDestinationRequestSchema: JSONSchemaType = createDestinationRequestSchema - -const updateDestinationRequestValidator = ajv.compile(updateDestinationRequestSchema) - -export const updateDestinationHandler = - (database: pg.Client) => - async (ctx: Koa.Context): Promise => { - const destination = ctx.request.body - // Validate the request body using Ajv - const requestValid = updateDestinationRequestValidator(destination) - if (!requestValid) { - ctx.status = 400 - ctx.body = updateDestinationRequestValidator.errors - return - } - - const id = ctx.params.destinationId - // Validate id is a uuid - if (!id.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i)) { - ctx.status = 400 - return - } - - // Validate the config against the destination type schema - const config = destination.config - const destinationType = (await listDestinationTypes()).find( - (destinationType) => destinationType.type === destination.type - ) - // If the destination type doesn't exist, return a 400 - if (!destinationType) { - ctx.status = 400 - return - } - - // If the config doesn't match the schema, return a 400. We use AJV to - // perform validation. - const typeValidator = ajv.compile(destinationType.configSchema) - const typeValid = typeValidator(config) - if (!typeValid) { - ctx.status = 400 - ctx.body = typeValidator.errors - return - } - - // NOTE: you cannot update a deleted destination. In the case that you - // try to update a deleted destination, we will return a 404. This is - // detected by the update row count being 0. - const result = await database.query( - SQL` - UPDATE destinations - SET - name = ${destination.name}, - description = ${destination.description}, - type = ${destination.type}, - config = ${destination.config} - WHERE - id = ${id} AND - team_id = ${ctx.params.projectId} AND - is_deleted = false - RETURNING * - ` - ) - - if (result.rowCount === 0) { - ctx.status = 404 - return - } - - ctx.status = 200 - ctx.body = result.rows[0] - } - -export const deleteDestinationHandler = - (database: pg.Client) => - async (ctx: Koa.Context): Promise => { - // NOTE: we do not delete the destination, but instead mark it as deleted - const id = ctx.params.destinationId - // Validate id is a uuid - if (!id.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i)) { - ctx.status = 400 - return - } - - const result = await database.query(SQL` - UPDATE destinations - SET is_deleted = true - WHERE - id = ${id} AND - team_id = ${ctx.params.projectId} - `) - - if (result.rowCount === 0) { - ctx.status = 404 - return - } - - ctx.status = 204 - } diff --git a/cdp/src/rest.ts b/cdp/src/rest.ts deleted file mode 100644 index b5cf20f9b36ec..0000000000000 --- a/cdp/src/rest.ts +++ /dev/null @@ -1,121 +0,0 @@ -/* - -REST API for adding, updating, removing, and listing Destination. A Destination -represents a remote location that events should be sent to. The REST API tries -to be as simple as possible, and only supports JSON. It also tries to be of the -same response style as the API from the Django part of the application which -uses Django REST Framework. `Destination`s are stored in a separate logical -PostgreSQL database to the main application database to provide a clear -separation of concerns and limit the impact of e.g. heavy usage of the database -from the main application. - -We also provide a read only DestinationType resource, which is used to list -the available DestinationTypes. This is used to retrieve the available -DestinationTypes for use as `Destination.type` as well as the schema for the -`Destination.config` field. These types are defined in code for now, but it's -possible that we will want to move them to the database in the future to allow -dynamic addition of new `DestinationType`s. - -The implementation is based on Koajs, which is a popular Node.js web -application framework. Below we define the Koa application and the routes for -the REST API, using handlers defined in the `handlers.ts` files. - -We do not at this point separate out the implementation -into Services, Repositories, and Controllers, but instead keep it all in one -file, although that could be an improvement in the future if we find ourselves -using the destinations API in other parts of the CDP application. - -*/ - -import assert from 'assert' -import Koa from 'koa' -import Router from 'koa-router' -import bodyParser from 'koa-bodyparser' -import logger from 'koa-pino-logger' -import pg from 'pg' -import jwt from 'koa-jwt' -import { NodeSDK } from '@opentelemetry/sdk-node' - -import { ConsoleSpanExporter } from '@opentelemetry/sdk-trace-base' -import { PgInstrumentation } from '@opentelemetry/instrumentation-pg' -import { PinoInstrumentation } from '@opentelemetry/instrumentation-pino' - -import { listDestinationTypesHandler } from './destination-types/handlers' -import { - createDestinationHandler, - deleteDestinationHandler, - getDestinationHandler, - updateDestinationHandler, -} from './destinations/handlers' - -const getApp = async (config: NodeJS.ProcessEnv): Promise => { - const app = new Koa() - const router = new Router() - - assert(config.DATABASE_URL, 'DATABASE_URL environment variable must be set') - assert(config.SECRET_KEY, 'SECRET_KEY environment variable must be set') - - const database = new pg.Client({ - connectionString: config.DATABASE_URL, - statement_timeout: 1000, - connectionTimeoutMillis: 1000, - }) - await database.connect() - - const opentelemetry = new NodeSDK({ - traceExporter: new ConsoleSpanExporter(), - instrumentations: [new PgInstrumentation(), new PinoInstrumentation()], - }) - opentelemetry.start() - - app.use(jwt({ secret: config.SECRET_KEY, key: 'jwtData' })) - - // For any route matching /api/projects/:projectId/... we want to make sure - // that the JWT token contains the projectId as a claim. If it doesn't we - // return a 403 Forbidden response. - router.use('/api/projects/:projectId', async (ctx, next) => { - const projectId = Number.parseInt(ctx.params.projectId) - const jwtData = ctx.state.jwtData - if (jwtData.projectIds.indexOf(projectId) === -1) { - ctx.status = 403 - ctx.body = { - detail: 'You do not have permission to perform this action.', - } - return - } - await next() - }) - - router.param('projectId', (projectId, ctx, next) => { - if (projectId.match(/^[0-9]+$/)) { - return next() - } - ctx.status = 400 - ctx.body = { - detail: 'Invalid project ID.', - } - }) - - router.get('/api/projects/:projectId/destination-types', listDestinationTypesHandler) - router.post('/api/projects/:projectId/destinations', createDestinationHandler(database)) - router.get('/api/projects/:projectId/destinations/:destinationId', getDestinationHandler(database)) - router.put('/api/projects/:projectId/destinations/:destinationId', updateDestinationHandler(database)) - router.delete('/api/projects/:projectId/destinations/:destinationId', deleteDestinationHandler(database)) - - app.use(logger()) - app.use(bodyParser()) - app.use(router.routes()) - app.use(router.allowedMethods()) - - return app -} - -const config = { - DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/cdp', - SECRET_KEY: '', - ...process.env, -} - -getApp(config).then((app) => { - app.listen(3000) -}) diff --git a/cdp/src/sql-template-string.ts b/cdp/src/sql-template-string.ts deleted file mode 100644 index 70a31e5735b7b..0000000000000 --- a/cdp/src/sql-template-string.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const SQL = (sqlParts: TemplateStringsArray, ...args: any[]): { text: string; values: any[] } => { - // Generates a node-pq compatible query object given a tagged - // template literal. The intention is to remove the need to match up - // the positional arguments with the $1, $2, etc. placeholders in - // the query string. - const text = sqlParts.reduce((acc, part, i) => acc + '$' + i + part) - const values = args - return { text, values } -} diff --git a/cdp/tsconfig.json b/cdp/tsconfig.json deleted file mode 100644 index 6fe76cb8bf3ef..0000000000000 --- a/cdp/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "ts-node": { - "transpileOnly": true, - "transpiler": "ts-node/transpilers/swc-experimental" - }, - "compilerOptions": { - "module": "commonjs", - "esModuleInterop": true, - "target": "es6", - "noImplicitAny": true, - "moduleResolution": "node", - "sourceMap": true, - "outDir": "dist", - "baseUrl": ".", - "paths": { - "*": ["node_modules/*"] - } - }, - "include": ["src/**/*"] -} diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index ed3a92a465e04..959aa5118fb80 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -100,6 +100,40 @@ describe('Dashboard', () => { cy.get('[data-attr^="breadcrumb-Dashboard:"]').should('have.text', TEST_DASHBOARD_NAME + 'UnnamedCancelSave') }) + const assertVariablesConfigurationScreenIsShown = (): void => { + cy.get('[data-attr="new-dashboard-chooser"]').contains('Unique variable name').should('exist') + } + + it('Allow reselecting a dashboard after pressing back', () => { + cy.intercept('GET', /\/api\/projects\/\d+\/dashboard_templates/, (req) => { + req.reply((response) => { + response.body.results[0].variables = [ + { + id: 'id', + name: 'Unique variable name', + type: 'event', + default: {}, + required: true, + description: 'description', + }, + ] + return response + }) + }) + + // Request templates again. + cy.clickNavMenu('dashboards') + + cy.get('[data-attr="new-dashboard"]').click() + cy.get('[data-attr="create-dashboard-from-template"]').click() + assertVariablesConfigurationScreenIsShown() + + cy.contains('.LemonButton', 'Back').click() + + cy.get('[data-attr="create-dashboard-from-template"]').click() + assertVariablesConfigurationScreenIsShown() + }) + it('Click on a dashboard item dropdown and view graph', () => { cy.get('[data-attr=dashboard-name]').contains('Web Analytics').click() cy.get('.InsightCard [data-attr=more-button]').first().click() diff --git a/cypress/e2e/dashboardPremium.cy.ts b/cypress/e2e/dashboardPremium.cy.ts deleted file mode 100644 index 9ef3d3bf1d1d1..0000000000000 --- a/cypress/e2e/dashboardPremium.cy.ts +++ /dev/null @@ -1,38 +0,0 @@ -describe('Dashboards Premium Features', () => { - beforeEach(() => { - cy.clickNavMenu('dashboards') - cy.location('pathname').should('include', '/dashboard') - }) - - // Taggables are an enterprise feature. Cypress isn't setup with a scale license so these - // tests should fail now that we make that license check in the backend and return a 402. - xit('Tag dashboard', () => { - const newTag = `test-${Math.floor(Math.random() * 10000)}` - cy.get('[data-attr=dashboard-name]').contains('App Analytics').click() - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.ant-tag').should('contain', newTag) - - cy.wait(300) - cy.get('.new-tag-input').should('not.exist') // Input should disappear - - cy.clickNavMenu('dashboards') - cy.get('.ant-tag').should('contain', newTag) // Tag is shown in dashboard list too - }) - - xit('Cannot add duplicate tags', () => { - const newTag = `test2-${Math.floor(Math.random() * 10000)}` - cy.get('[data-attr=dashboard-name]').contains('App Analytics').click() - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.ant-tag').should('contain', newTag) - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.Toastify__toast--error').should('be.visible') - - cy.get('.dashboard').find('.ant-tag').contains(newTag).should('have.length', 1) - }) -}) diff --git a/cypress/e2e/insights.cy.ts b/cypress/e2e/insights.cy.ts index 01344c5719ed1..0ea7fc52d0f25 100644 --- a/cypress/e2e/insights.cy.ts +++ b/cypress/e2e/insights.cy.ts @@ -78,12 +78,6 @@ describe('Insights', () => { savedInsights.checkInsightIsInListView(insightName) }) - it('Shows not found error with invalid short URL', () => { - cy.visit('/i/i_dont_exist') - cy.location('pathname').should('contain', '/insights/i_dont_exist') - cy.get('.LemonSkeleton').should('exist') - }) - it('Stickiness graph', () => { cy.get('[role=tab]').contains('Stickiness').click() cy.get('[data-attr=add-action-event-button]').click() diff --git a/cypress/e2e/insightsPremium.cy.ts b/cypress/e2e/insightsPremium.cy.ts deleted file mode 100644 index 4b54dc22742f0..0000000000000 --- a/cypress/e2e/insightsPremium.cy.ts +++ /dev/null @@ -1,31 +0,0 @@ -describe('Insights Premium Features', () => { - beforeEach(() => { - cy.clickNavMenu('insight') - cy.location('pathname').should('include', '/insights') - }) - - xit('Tag insight', () => { - const newTag = `test-${Math.floor(Math.random() * 10000)}` - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.ant-tag').should('contain', newTag) - - cy.wait(300) - cy.get('.new-tag-input').should('not.exist') // Input should disappear - }) - - xit('Cannot add duplicate tags', () => { - const newTag = `test2-${Math.floor(Math.random() * 10000)}` - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.ant-tag').should('contain', newTag) - - cy.wait(300) - cy.get('[data-attr=button-add-tag]').click() - cy.focused().type(newTag) - cy.get('[data-attr=new-tag-option]').click() - cy.get('.Toastify__toast--error').should('be.visible') - }) -}) diff --git a/cypress/e2e/surveys.cy.ts b/cypress/e2e/surveys.cy.ts index e3efcd4c48a0f..fd3a60ca6bc1a 100644 --- a/cypress/e2e/surveys.cy.ts +++ b/cypress/e2e/surveys.cy.ts @@ -221,4 +221,64 @@ describe('Surveys', () => { cy.get('[data-attr=surveys-table]').should('contain', name) cy.get(`[data-row-key="${name}"]`).contains(name).click() }) + + it('duplicates a survey', () => { + // create survey + cy.get('[data-attr=new-survey]').click() + cy.get('[data-attr=new-blank-survey]').click() + cy.get('[data-attr=survey-name]').focus().type(name).should('have.value', name) + + // Add user targetting criteria + cy.get('.LemonCollapsePanel').contains('Targeting').click() + cy.contains('All users').click() + cy.get('.Popover__content').contains('Users who match').click() + cy.contains('Add user targeting').click() + cy.get('[data-attr="property-select-toggle-0"]').click() + cy.get('[data-attr="prop-filter-person_properties-0"]').click() + cy.get('[data-attr=prop-val] .LemonInput').click({ force: true }) + cy.get('[data-attr=prop-val-0]').click({ force: true }) + cy.get('[data-attr="rollout-percentage"]').type('100') + + cy.get('[data-attr=save-survey]').first().click() + + // Launch the survey first, the duplicated one should be in draft + cy.get('[data-attr="launch-survey"]').click() + + // try to duplicate survey + cy.get('[data-attr=more-button]').click() + cy.get('[data-attr=duplicate-survey]').click() + + // if the survey is duplicated, try to view it & verify a copy is created + cy.get('[data-attr=success-toast]').contains('duplicated').should('exist').siblings('button').click() + cy.get('[data-attr=top-bar-name]').contains(`${name} (copy)`).should('exist') + + // check if it launched in a draft state + cy.get('button[data-attr="launch-survey"]').should('have.text', 'Launch') + + // check if targetting criteria is copied + cy.contains('Release conditions summary').should('exist') + cy.get('.FeatureConditionCard').should('exist').should('contain.text', 'is_demo equals true') + cy.get('.FeatureConditionCard').should('contain.text', 'Rolled out to 100% of users in this set.') + + // delete the duplicated survey + cy.get('[data-attr=more-button]').click() + cy.get('[data-attr=delete-survey]').click() + + // Archive the original survey + cy.clickNavMenu('surveys') + cy.get('[data-attr=surveys-table]').find(`[data-row-key="${name}"]`).find('a').click() + cy.get('[data-attr=stop-survey]').click() + cy.get('[data-attr=more-button]').click() + cy.get('[data-attr=archive-survey]').click() + + // check if the duplicated survey is created with draft state + cy.get('[data-attr=more-button]').click() + cy.get('[data-attr=duplicate-survey]').click() + cy.clickNavMenu('surveys') + cy.get('[data-attr=surveys-table]') + .find(`[data-row-key="${name} (copy)"]`) + .find('[data-attr=status]') + .contains('DRAFT') + .should('exist') + }) }) diff --git a/cypress/e2e/trends.cy.ts b/cypress/e2e/trends.cy.ts index 36809958d7c25..5f464714c3eb9 100644 --- a/cypress/e2e/trends.cy.ts +++ b/cypress/e2e/trends.cy.ts @@ -1,4 +1,4 @@ -import { insight, interceptInsightLoad } from '../productAnalytics' +import { insight } from '../productAnalytics' describe('Trends', () => { beforeEach(() => { @@ -6,14 +6,14 @@ describe('Trends', () => { }) it('Can load a graph from a URL directly', () => { - const networkInterceptAlias = interceptInsightLoad('TRENDS') + cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') // regression test, the graph wouldn't load when going directly to a URL cy.visit( '/insights/new?insight=TRENDS&interval=day&display=ActionsLineGraph&events=%5B%7B"id"%3A"%24pageview"%2C"name"%3A"%24pageview"%2C"type"%3A"events"%2C"order"%3A0%7D%5D&filter_test_accounts=false&breakdown=%24referrer&breakdown_type=event&properties=%5B%7B"key"%3A"%24current_url"%2C"value"%3A"http%3A%2F%2Fhogflix.com"%2C"operator"%3A"icontains"%2C"type"%3A"event"%7D%5D' ) - cy.wait(`@${networkInterceptAlias}`) + cy.wait(`@loadNewQueryInsight`) cy.get('[data-attr=trend-line-graph]').should('exist') }) diff --git a/cypress/fixtures/api/billing-v2/billing-v2-subscribed-all.json b/cypress/fixtures/api/billing-v2/billing-v2-subscribed-all.json index a452ef9f5a3ac..fdb94e7e82eb9 100644 --- a/cypress/fixtures/api/billing-v2/billing-v2-subscribed-all.json +++ b/cypress/fixtures/api/billing-v2/billing-v2-subscribed-all.json @@ -204,16 +204,16 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "unit": null, "limit": null, "note": null @@ -2978,7 +2978,7 @@ "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", "screenshot_url": null, "icon_key": "IconBolt", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "subscribed": null, "plans": [ { @@ -2987,7 +2987,7 @@ "name": "Integrations", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -3026,8 +3026,8 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null @@ -3043,7 +3043,7 @@ "name": "Integrations", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -3082,16 +3082,16 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "unit": null, "limit": null, "note": null @@ -3122,8 +3122,8 @@ "features": [ { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "images": null, "icon_key": null, "type": null @@ -3162,8 +3162,8 @@ }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "images": null, "icon_key": null, "type": null diff --git a/cypress/fixtures/api/billing-v2/billing-v2-unsubscribed.json b/cypress/fixtures/api/billing-v2/billing-v2-unsubscribed.json index 56ef139985e30..0395ee7864845 100644 --- a/cypress/fixtures/api/billing-v2/billing-v2-unsubscribed.json +++ b/cypress/fixtures/api/billing-v2/billing-v2-unsubscribed.json @@ -164,8 +164,8 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null @@ -2660,7 +2660,7 @@ "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", "screenshot_url": null, "icon_key": "IconBolt", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "subscribed": null, "plans": [ { @@ -2669,7 +2669,7 @@ "name": "Free", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -2708,8 +2708,8 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null @@ -2727,7 +2727,7 @@ "name": "Paid", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -2766,16 +2766,16 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "unit": null, "limit": null, "note": null @@ -2808,8 +2808,8 @@ "features": [ { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "images": null, "icon_key": null, "type": null @@ -2848,8 +2848,8 @@ }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "images": null, "icon_key": null, "type": null diff --git a/cypress/fixtures/api/billing-v2/billing-v2.json b/cypress/fixtures/api/billing-v2/billing-v2.json index 0274d6e004276..242f3c2f1b77b 100644 --- a/cypress/fixtures/api/billing-v2/billing-v2.json +++ b/cypress/fixtures/api/billing-v2/billing-v2.json @@ -171,16 +171,16 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "unit": null, "limit": null, "note": null @@ -2787,7 +2787,7 @@ "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", "screenshot_url": null, "icon_key": "IconBolt", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "subscribed": null, "plans": [ { @@ -2796,7 +2796,7 @@ "name": "Free", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -2835,8 +2835,8 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null @@ -2854,7 +2854,7 @@ "name": "Paid", "description": "Connect PostHog to your favorite tools.", "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", - "docs_url": "https://posthog.com/docs/apps", + "docs_url": "https://posthog.com/docs/cdp", "note": null, "unit": null, "free_allocation": null, @@ -2893,16 +2893,16 @@ }, { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "unit": null, "limit": null, "note": null }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "unit": null, "limit": null, "note": null @@ -2935,8 +2935,8 @@ "features": [ { "key": "apps", - "name": "Apps", - "description": "Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)", + "name": "Transformations", + "description": "Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)", "images": null, "icon_key": null, "type": null @@ -2975,8 +2975,8 @@ }, { "key": "app_metrics", - "name": "App metrics", - "description": "Get metrics on your apps to see their usage, reliability, and more.", + "name": "Transformation & destination metrics", + "description": "Get metrics on your transformation and destination metrics to see their usage, reliability, and more.", "images": null, "icon_key": null, "type": null diff --git a/cypress/productAnalytics/index.ts b/cypress/productAnalytics/index.ts index 6fd8a5c521de5..584972cc63968 100644 --- a/cypress/productAnalytics/index.ts +++ b/cypress/productAnalytics/index.ts @@ -16,43 +16,6 @@ export const savedInsights = { }, } -export function interceptInsightLoad(insightType: string): string { - cy.intercept('POST', /api\/projects\/\d+\/insights\/trend\//).as('loadNewTrendsInsight') - cy.intercept('POST', /api\/projects\/\d+\/insights\/funnel\//).as('loadNewFunnelInsight') - cy.intercept('POST', /api\/projects\/\d+\/insights\/retention\//).as('loadNewRetentionInsight') - cy.intercept('POST', /api\/projects\/\d+\/insights\/path\//).as('loadNewPathsInsight') - cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') - - let networkInterceptAlias: string = '' - switch (insightType) { - case 'TRENDS': - case 'STICKINESS': - case 'LIFECYCLE': - networkInterceptAlias = 'loadNewTrendsInsight' - break - case 'FUNNELS': - networkInterceptAlias = 'loadNewFunnelInsight' - break - case 'RETENTION': - networkInterceptAlias = 'loadNewRetentionInsight' - break - case 'PATH': - case 'PATHS': - networkInterceptAlias = 'loadNewPathsInsight' - break - case 'SQL': - case 'JSON': - networkInterceptAlias = 'loadNewQueryInsight' - break - } - - if (networkInterceptAlias === '') { - throw new Error('Unknown insight type: ' + insightType) - } - - return networkInterceptAlias -} - export const insight = { applyFilter: (): void => { cy.get('[data-attr$=add-filter-group]').click() @@ -75,16 +38,16 @@ export const insight = { cy.url().should('not.include', '/new') }, clickTab: (tabName: string): void => { - const networkInterceptAlias = interceptInsightLoad(tabName) + cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') cy.get(`[data-attr="insight-${(tabName === 'PATHS' ? 'PATH' : tabName).toLowerCase()}-tab"]`).click() if (tabName !== 'FUNNELS') { // funnel insights require two steps before making an api call - cy.wait(`@${networkInterceptAlias}`) + cy.wait(`@loadNewQueryInsight`) } }, newInsight: (insightType: string = 'TRENDS'): void => { - const networkInterceptAlias = interceptInsightLoad(insightType) + cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') if (insightType === 'JSON') { cy.clickNavMenu('savedinsights') @@ -99,7 +62,7 @@ export const insight = { if (insightType !== 'FUNNELS') { // funnel insights require two steps before making an api call - cy.wait(`@${networkInterceptAlias}`) + cy.wait(`@loadNewQueryInsight`) } }, visitInsight: (insightName: string): void => { diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index bfb16863482ec..d7fba5c6cc063 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -29,13 +29,9 @@ beforeEach(() => { cy.intercept('**/decide/*', (req) => req.reply( decideResponse({ - // set feature flags here e.g. + // Feature flag to be treated as rolled out in E2E tests, e.g.: // 'toolbar-launch-side-action': true, - 'surveys-new-creation-flow': true, - 'auto-redirect': true, - hogql: true, - 'data-exploration-insights': true, - notebooks: true, + 'hogql-insights-preview': true, }) ) ) @@ -46,30 +42,20 @@ beforeEach(() => { req.reply({ statusCode: 404, body: 'Cypress forced 404' }) ) - if (Cypress.spec.name.includes('Premium')) { - cy.intercept('/api/users/@me/', { fixture: 'api/user-enterprise' }) + cy.intercept('GET', /\/api\/projects\/\d+\/insights\/?\?/).as('getInsights') - cy.request('POST', '/api/login/', { - email: 'test@posthog.com', - password: '12345678', - }) + cy.request('POST', '/api/login/', { + email: 'test@posthog.com', + password: '12345678', + }) + + if (Cypress.spec.name.includes('before-onboarding')) { cy.visit('/?no-preloaded-app-context=true') } else { - cy.intercept('GET', /\/api\/projects\/\d+\/insights\/?\?/).as('getInsights') - - cy.request('POST', '/api/login/', { - email: 'test@posthog.com', - password: '12345678', + cy.visit('/insights') + cy.wait('@getInsights').then(() => { + cy.get('.saved-insights tr').should('exist') }) - - if (Cypress.spec.name.includes('before-onboarding')) { - cy.visit('/?no-preloaded-app-context=true') - } else { - cy.visit('/insights') - cy.wait('@getInsights').then(() => { - cy.get('.saved-insights tr').should('exist') - }) - } } }) diff --git a/docker-compose.hobby.yml b/docker-compose.hobby.yml index 690da88e0e135..fa91737a4a8a7 100644 --- a/docker-compose.hobby.yml +++ b/docker-compose.hobby.yml @@ -139,6 +139,10 @@ services: - 7233:7233 volumes: - ./posthog/docker/temporal/dynamicconfig:/etc/temporal/config/dynamicconfig + elasticsearch: + extends: + file: docker-compose.base.yml + service: elasticsearch temporal-admin-tools: extends: file: docker-compose.base.yml diff --git a/ee/api/authentication.py b/ee/api/authentication.py index 2dfb6c7b9f053..f2850bcfb5f61 100644 --- a/ee/api/authentication.py +++ b/ee/api/authentication.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Union +from typing import Any, Union from django.core.exceptions import ValidationError as DjangoValidationError from django.http.response import HttpResponse @@ -91,8 +91,8 @@ def auth_url(self): def _get_attr( self, - response_attributes: Dict[str, Any], - attribute_names: List[str], + response_attributes: dict[str, Any], + attribute_names: list[str], optional: bool = False, ) -> str: """ diff --git a/ee/api/billing.py b/ee/api/billing.py index 6761f5ecd604e..1666541ffecd3 100644 --- a/ee/api/billing.py +++ b/ee/api/billing.py @@ -17,8 +17,8 @@ from ee.settings import BILLING_SERVICE_URL from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.cloud_utils import get_cached_instance_license -from posthog.models import Organization from posthog.event_usage import groups +from posthog.models import Organization logger = structlog.get_logger(__name__) @@ -75,9 +75,9 @@ def patch(self, request: Request, *args: Any, **kwargs: Any) -> Response: distinct_id, "billing limits updated", properties={**custom_limits_usd}, - groups=groups(org, self.request.user.team) - if hasattr(self.request.user, "team") - else groups(org), + groups=( + groups(org, self.request.user.team) if hasattr(self.request.user, "team") else groups(org) + ), ) posthoganalytics.group_identify( "organization", @@ -125,8 +125,17 @@ def deactivate(self, request: Request, *args: Any, **kwargs: Any) -> HttpRespons product = request.GET.get("products", None) if not product: raise ValidationError("Products must be specified") - - BillingManager(license).deactivate_products(organization, product) + try: + BillingManager(license).deactivate_products(organization, product) + except Exception as e: + if len(e.args) > 2: + detail_object = e.args[2] + return Response( + {"statusText": e.args[0], "detail": detail_object.get("error_message", detail_object)}, + status=status.HTTP_400_BAD_REQUEST, + ) + else: + raise e return self.list(request, *args, **kwargs) @action(methods=["PATCH"], detail=False) diff --git a/ee/api/dashboard_collaborator.py b/ee/api/dashboard_collaborator.py index 998eeba8238f9..6a004215d96a3 100644 --- a/ee/api/dashboard_collaborator.py +++ b/ee/api/dashboard_collaborator.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, cast +from typing import Any, cast from django.db import IntegrityError from rest_framework import exceptions, mixins, serializers, viewsets @@ -45,7 +45,7 @@ class Meta: ] read_only_fields = ["id", "dashboard_id", "user", "user"] - def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]: + def validate(self, attrs: dict[str, Any]) -> dict[str, Any]: dashboard: Dashboard = self.context["dashboard"] dashboard_permissions = self.user_permissions.dashboard(dashboard) if dashboard_permissions.effective_restriction_level <= Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT: @@ -96,7 +96,7 @@ class DashboardCollaboratorViewSet( serializer_class = DashboardCollaboratorSerializer filter_rewrite_rules = {"team_id": "dashboard__team_id"} - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() try: context["dashboard"] = Dashboard.objects.get(id=context["dashboard_id"]) diff --git a/ee/api/role.py b/ee/api/role.py index 44909f504eece..0c4894c2779ce 100644 --- a/ee/api/role.py +++ b/ee/api/role.py @@ -1,4 +1,4 @@ -from typing import List, cast +from typing import cast from django.db import IntegrityError from rest_framework import mixins, serializers, viewsets @@ -76,7 +76,7 @@ def get_members(self, role: Role): return RoleMembershipSerializer(members, many=True).data def get_associated_flags(self, role: Role): - associated_flags: List[dict] = [] + associated_flags: list[dict] = [] role_access_objects = FeatureFlagRoleAccess.objects.filter(role=role).values_list("feature_flag_id") flags = FeatureFlag.objects.filter(id__in=role_access_objects) diff --git a/ee/api/sentry_stats.py b/ee/api/sentry_stats.py index 52b16647c2cbf..06b4e53b1bd59 100644 --- a/ee/api/sentry_stats.py +++ b/ee/api/sentry_stats.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union import requests from django.http import HttpRequest, JsonResponse @@ -9,8 +9,8 @@ from posthog.models.instance_setting import get_instance_settings -def get_sentry_stats(start_time: str, end_time: str) -> Tuple[dict, int]: - sentry_config: Dict[str, str] = get_instance_settings(["SENTRY_AUTH_TOKEN", "SENTRY_ORGANIZATION"]) +def get_sentry_stats(start_time: str, end_time: str) -> tuple[dict, int]: + sentry_config: dict[str, str] = get_instance_settings(["SENTRY_AUTH_TOKEN", "SENTRY_ORGANIZATION"]) org_slug = sentry_config.get("SENTRY_ORGANIZATION") token = sentry_config.get("SENTRY_AUTH_TOKEN") @@ -41,9 +41,9 @@ def get_sentry_stats(start_time: str, end_time: str) -> Tuple[dict, int]: def get_tagged_issues_stats( - start_time: str, end_time: str, tags: Dict[str, str], target_issues: List[str] -) -> Dict[str, Any]: - sentry_config: Dict[str, str] = get_instance_settings(["SENTRY_AUTH_TOKEN", "SENTRY_ORGANIZATION"]) + start_time: str, end_time: str, tags: dict[str, str], target_issues: list[str] +) -> dict[str, Any]: + sentry_config: dict[str, str] = get_instance_settings(["SENTRY_AUTH_TOKEN", "SENTRY_ORGANIZATION"]) org_slug = sentry_config.get("SENTRY_ORGANIZATION") token = sentry_config.get("SENTRY_AUTH_TOKEN") @@ -58,7 +58,7 @@ def get_tagged_issues_stats( for tag, value in tags.items(): query += f" {tag}:{value}" - params: Dict[str, Union[list, str]] = { + params: dict[str, Union[list, str]] = { "start": start_time, "end": end_time, "sort": "freq", @@ -89,8 +89,8 @@ def get_stats_for_timerange( base_end_time: str, target_start_time: str, target_end_time: str, - tags: Optional[Dict[str, str]] = None, -) -> Tuple[int, int]: + tags: Optional[dict[str, str]] = None, +) -> tuple[int, int]: base_counts, base_total_count = get_sentry_stats(base_start_time, base_end_time) target_counts, target_total_count = get_sentry_stats(target_start_time, target_end_time) diff --git a/ee/api/subscription.py b/ee/api/subscription.py index 412ddc5cfaff3..9f8881026fbdb 100644 --- a/ee/api/subscription.py +++ b/ee/api/subscription.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any import jwt from django.db.models import QuerySet @@ -67,7 +67,7 @@ def validate(self, attrs): return attrs - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Subscription: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Subscription: request = self.context["request"] validated_data["team_id"] = self.context["team_id"] validated_data["created_by"] = request.user diff --git a/ee/api/test/base.py b/ee/api/test/base.py index 55e7930bfadf1..066dcc373d6d5 100644 --- a/ee/api/test/base.py +++ b/ee/api/test/base.py @@ -1,5 +1,5 @@ import datetime -from typing import Dict, Optional, cast +from typing import Optional, cast from zoneinfo import ZoneInfo @@ -20,7 +20,7 @@ class LicensedTestMixin: def license_required_response( self, message: str = "This feature is part of the premium PostHog offering. Self-hosted licenses are no longer available for purchase. Please contact sales@posthog.com to discuss options.", - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: return { "type": "server_error", "code": "payment_required", diff --git a/ee/api/test/fixtures/available_product_features.py b/ee/api/test/fixtures/available_product_features.py index 5be816a169ba3..8cc5413754db1 100644 --- a/ee/api/test/fixtures/available_product_features.py +++ b/ee/api/test/fixtures/available_product_features.py @@ -1,6 +1,6 @@ -from typing import Any, Dict, List +from typing import Any -AVAILABLE_PRODUCT_FEATURES: List[Dict[str, Any]] = [ +AVAILABLE_PRODUCT_FEATURES: list[dict[str, Any]] = [ { "description": "Create playlists of certain session recordings to easily find and watch them again in the future.", "key": "recordings_playlists", diff --git a/ee/api/test/test_authentication.py b/ee/api/test/test_authentication.py index 00fca66b2914b..451efdd3d3777 100644 --- a/ee/api/test/test_authentication.py +++ b/ee/api/test/test_authentication.py @@ -364,7 +364,6 @@ def test_can_login_with_saml(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -407,7 +406,6 @@ def test_saml_jit_provisioning_and_assertion_with_different_attribute_names(self with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_alt_attribute_names"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -474,7 +472,6 @@ def test_cannot_login_with_improperly_signed_payload(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -514,7 +511,6 @@ def test_cannot_signup_with_saml_if_jit_provisioning_is_disabled(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -552,7 +548,6 @@ def test_cannot_create_account_without_first_name_in_payload(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_no_first_name"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -594,7 +589,6 @@ def test_cannot_login_with_saml_on_unverified_domain(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), - "r", encoding="utf_8", ) as f: saml_response = f.read() @@ -683,7 +677,6 @@ def test_cannot_use_saml_without_enterprise_license(self): with open( os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), - "r", encoding="utf_8", ) as f: saml_response = f.read() diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index c1698bd1cae7f..94eed34d29d79 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, List +from typing import Any from unittest.mock import MagicMock, patch from uuid import uuid4 from zoneinfo import ZoneInfo @@ -22,7 +22,7 @@ from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events -def create_billing_response(**kwargs) -> Dict[str, Any]: +def create_billing_response(**kwargs) -> dict[str, Any]: data: Any = {"license": {"type": "cloud"}} data.update(kwargs) return data @@ -106,7 +106,7 @@ def create_billing_customer(**kwargs) -> CustomerInfo: return data -def create_billing_products_response(**kwargs) -> Dict[str, List[CustomerProduct]]: +def create_billing_products_response(**kwargs) -> dict[str, list[CustomerProduct]]: data: Any = { "products": [ CustomerProduct( diff --git a/ee/api/test/test_capture.py b/ee/api/test/test_capture.py index 891a9759a80c5..4f716d785098c 100644 --- a/ee/api/test/test_capture.py +++ b/ee/api/test/test_capture.py @@ -68,26 +68,26 @@ def test_produce_to_kafka(self, kafka_produce): self.assertEqual(event2_data["properties"]["distinct_id"], "id2") # Make sure we're producing data correctly in the way the plugin server expects - self.assertEquals(type(kafka_produce_call1["data"]["distinct_id"]), str) - self.assertEquals(type(kafka_produce_call2["data"]["distinct_id"]), str) + self.assertEqual(type(kafka_produce_call1["data"]["distinct_id"]), str) + self.assertEqual(type(kafka_produce_call2["data"]["distinct_id"]), str) self.assertIn(type(kafka_produce_call1["data"]["ip"]), [str, type(None)]) self.assertIn(type(kafka_produce_call2["data"]["ip"]), [str, type(None)]) - self.assertEquals(type(kafka_produce_call1["data"]["site_url"]), str) - self.assertEquals(type(kafka_produce_call2["data"]["site_url"]), str) + self.assertEqual(type(kafka_produce_call1["data"]["site_url"]), str) + self.assertEqual(type(kafka_produce_call2["data"]["site_url"]), str) - self.assertEquals(type(kafka_produce_call1["data"]["token"]), str) - self.assertEquals(type(kafka_produce_call2["data"]["token"]), str) + self.assertEqual(type(kafka_produce_call1["data"]["token"]), str) + self.assertEqual(type(kafka_produce_call2["data"]["token"]), str) - self.assertEquals(type(kafka_produce_call1["data"]["sent_at"]), str) - self.assertEquals(type(kafka_produce_call2["data"]["sent_at"]), str) + self.assertEqual(type(kafka_produce_call1["data"]["sent_at"]), str) + self.assertEqual(type(kafka_produce_call2["data"]["sent_at"]), str) - self.assertEquals(type(event1_data["properties"]), dict) - self.assertEquals(type(event2_data["properties"]), dict) + self.assertEqual(type(event1_data["properties"]), dict) + self.assertEqual(type(event2_data["properties"]), dict) - self.assertEquals(type(kafka_produce_call1["data"]["uuid"]), str) - self.assertEquals(type(kafka_produce_call2["data"]["uuid"]), str) + self.assertEqual(type(kafka_produce_call1["data"]["uuid"]), str) + self.assertEqual(type(kafka_produce_call2["data"]["uuid"]), str) @patch("posthog.kafka_client.client._KafkaProducer.produce") def test_capture_event_with_uuid_in_payload(self, kafka_produce): diff --git a/ee/api/test/test_dashboard.py b/ee/api/test/test_dashboard.py index 8c39a17135db0..e494dfbce7a44 100644 --- a/ee/api/test/test_dashboard.py +++ b/ee/api/test/test_dashboard.py @@ -106,7 +106,7 @@ def test_cannot_set_dashboard_to_restrict_editing_as_other_user_who_is_project_m response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEquals( + self.assertEqual( response_data, self.permission_denied_response( "Only the dashboard owner and project admins have the restriction rights required to change the dashboard's restriction level." @@ -178,7 +178,7 @@ def test_cannot_edit_restricted_dashboard_as_other_user_who_is_project_member(se response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEquals( + self.assertEqual( response_data, self.permission_denied_response("You don't have edit permissions for this dashboard."), ) @@ -262,7 +262,7 @@ def test_sharing_edits_limited_to_collaborators(self): response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEquals( + self.assertEqual( response_data, self.permission_denied_response("You don't have edit permissions for this dashboard."), ) diff --git a/ee/api/test/test_event_definition.py b/ee/api/test/test_event_definition.py index 6e3cbb8775fb9..2aa87e63e2e65 100644 --- a/ee/api/test/test_event_definition.py +++ b/ee/api/test/test_event_definition.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import cast, Optional, List, Dict, Any +from typing import cast, Optional, Any import dateutil.parser from django.utils import timezone @@ -26,7 +26,7 @@ class TestEventDefinitionEnterpriseAPI(APIBaseTest): Ignoring the verified field we'd expect ordering purchase, watched_movie, entered_free_trial, $pageview With it we expect watched_movie, entered_free_trial, purchase, $pageview """ - EXPECTED_EVENT_DEFINITIONS: List[Dict[str, Any]] = [ + EXPECTED_EVENT_DEFINITIONS: list[dict[str, Any]] = [ {"name": "purchase", "verified": None}, {"name": "entered_free_trial", "verified": True}, {"name": "watched_movie", "verified": True}, diff --git a/ee/api/test/test_insight.py b/ee/api/test/test_insight.py index 00863551500ee..7db46bf79dea1 100644 --- a/ee/api/test/test_insight.py +++ b/ee/api/test/test_insight.py @@ -1,6 +1,6 @@ import json from datetime import timedelta -from typing import cast, Optional, List, Dict +from typing import cast, Optional from django.test import override_settings from django.utils import timezone from freezegun import freeze_time @@ -305,7 +305,7 @@ def test_cannot_update_restricted_insight_as_other_user_who_is_project_member(se dashboard.refresh_from_db() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEquals( + self.assertEqual( response_data, self.permission_denied_response( "This insight is on a dashboard that can only be edited by its owner, team members invited to editing the dashboard, and project admins." @@ -547,7 +547,7 @@ def test_an_insight_on_restricted_dashboard_does_not_restrict_admin(self) -> Non @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) @snapshot_postgres_queries def test_listing_insights_does_not_nplus1(self) -> None: - query_counts: List[int] = [] + query_counts: list[int] = [] queries = [] for i in range(5): @@ -587,10 +587,10 @@ def test_listing_insights_does_not_nplus1(self) -> None: f"received query counts\n\n{query_counts}", ) - def assert_insight_activity(self, insight_id: Optional[int], expected: List[Dict]): + def assert_insight_activity(self, insight_id: Optional[int], expected: list[dict]): activity_response = self.dashboard_api.get_insight_activity(insight_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None assert activity == expected diff --git a/ee/api/test/test_integration.py b/ee/api/test/test_integration.py index d675415e4bd81..7f30635b5afdf 100644 --- a/ee/api/test/test_integration.py +++ b/ee/api/test/test_integration.py @@ -25,7 +25,7 @@ def _headers_for_payload(self, payload: Any): signature = ( "v0=" + hmac.new( - "not-so-secret".encode("utf-8"), + b"not-so-secret", sig_basestring.encode("utf-8"), digestmod=hashlib.sha256, ).hexdigest() diff --git a/ee/api/test/test_property_definition.py b/ee/api/test/test_property_definition.py index ef8d4dd928540..effa43a9f4b18 100644 --- a/ee/api/test/test_property_definition.py +++ b/ee/api/test/test_property_definition.py @@ -1,4 +1,4 @@ -from typing import cast, Optional, List, Dict +from typing import cast, Optional from freezegun import freeze_time import pytest from django.db.utils import IntegrityError @@ -450,7 +450,7 @@ def test_list_property_definitions(self): plan="enterprise", valid_until=timezone.datetime(2500, 1, 19, 3, 14, 7) ) - properties: List[Dict] = [ + properties: list[dict] = [ {"name": "1_when_verified", "verified": True}, {"name": "2_when_verified", "verified": True}, {"name": "3_when_verified", "verified": True}, diff --git a/ee/api/test/test_time_to_see_data.py b/ee/api/test/test_time_to_see_data.py index 4c5a50d51e58f..1ad6b4b08135b 100644 --- a/ee/api/test/test_time_to_see_data.py +++ b/ee/api/test/test_time_to_see_data.py @@ -1,6 +1,6 @@ import json from dataclasses import asdict, dataclass, field -from typing import Any, List +from typing import Any from unittest import mock import pytest @@ -64,7 +64,7 @@ def test_sessions_api(self): ) response = self.client.post("/api/time_to_see_data/sessions").json() - self.assertEquals( + self.assertEqual( response, [ { @@ -209,18 +209,18 @@ class QueryLogRow: query_time_range_days: int = 1 has_joins: int = 0 has_json_operations: int = 0 - filter_by_type: List[str] = field(default_factory=list) - breakdown_by: List[str] = field(default_factory=list) - entity_math: List[str] = field(default_factory=list) + filter_by_type: list[str] = field(default_factory=list) + breakdown_by: list[str] = field(default_factory=list) + entity_math: list[str] = field(default_factory=list) filter: str = "" ProfileEvents: dict = field(default_factory=dict) - tables: List[str] = field(default_factory=list) - columns: List[str] = field(default_factory=list) + tables: list[str] = field(default_factory=list) + columns: list[str] = field(default_factory=list) query: str = "" log_comment = "" -def insert(table: str, rows: List): +def insert(table: str, rows: list): columns = asdict(rows[0]).keys() all_values, params = [], {} diff --git a/ee/benchmarks/benchmarks.py b/ee/benchmarks/benchmarks.py index 83e82df068f9b..d999467779a4c 100644 --- a/ee/benchmarks/benchmarks.py +++ b/ee/benchmarks/benchmarks.py @@ -2,7 +2,6 @@ # Needs to be first to set up django environment from .helpers import benchmark_clickhouse, no_materialized_columns, now from datetime import timedelta -from typing import List, Tuple from ee.clickhouse.materialized_columns.analyze import ( backfill_materialized_columns, get_materialized_columns, @@ -29,7 +28,7 @@ from posthog.models.property import PropertyName, TableWithProperties from posthog.constants import FunnelCorrelationType -MATERIALIZED_PROPERTIES: List[Tuple[TableWithProperties, PropertyName]] = [ +MATERIALIZED_PROPERTIES: list[tuple[TableWithProperties, PropertyName]] = [ ("events", "$host"), ("events", "$current_url"), ("events", "$event_type"), diff --git a/ee/billing/billing_manager.py b/ee/billing/billing_manager.py index da95c0871f55a..53b625dff4256 100644 --- a/ee/billing/billing_manager.py +++ b/ee/billing/billing_manager.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast import jwt import requests @@ -44,7 +44,7 @@ def build_billing_token(license: License, organization: Organization): def handle_billing_service_error(res: requests.Response, valid_codes=(200, 404, 401)) -> None: if res.status_code not in valid_codes: logger.error(f"Billing service returned bad status code: {res.status_code}, body: {res.text}") - raise Exception(f"Billing service returned bad status code: {res.status_code}, body: {res.text}") + raise Exception(f"Billing service returned bad status code: {res.status_code}", f"body:", res.json()) class BillingManager: @@ -53,7 +53,7 @@ class BillingManager: def __init__(self, license): self.license = license or get_cached_instance_license() - def get_billing(self, organization: Optional[Organization], plan_keys: Optional[str]) -> Dict[str, Any]: + def get_billing(self, organization: Optional[Organization], plan_keys: Optional[str]) -> dict[str, Any]: if organization and self.license and self.license.is_v2_license: billing_service_response = self._get_billing(organization) @@ -63,7 +63,7 @@ def get_billing(self, organization: Optional[Organization], plan_keys: Optional[ if organization and billing_service_response: self.update_org_details(organization, billing_service_response) - response: Dict[str, Any] = {"available_features": []} + response: dict[str, Any] = {"available_features": []} response["license"] = {"plan": self.license.plan} @@ -102,7 +102,7 @@ def get_billing(self, organization: Optional[Organization], plan_keys: Optional[ return response - def update_billing(self, organization: Organization, data: Dict[str, Any]) -> None: + def update_billing(self, organization: Organization, data: dict[str, Any]) -> None: res = requests.patch( f"{BILLING_SERVICE_URL}/api/billing/", headers=self.get_auth_headers(organization), diff --git a/ee/billing/billing_types.py b/ee/billing/billing_types.py index 6151ad3288051..0761e02e807ef 100644 --- a/ee/billing/billing_types.py +++ b/ee/billing/billing_types.py @@ -1,5 +1,5 @@ from decimal import Decimal -from typing import Dict, List, Optional, TypedDict +from typing import Optional, TypedDict from posthog.constants import AvailableFeature @@ -18,7 +18,7 @@ class CustomerProduct(TypedDict): image_url: Optional[str] type: str free_allocation: int - tiers: List[Tier] + tiers: list[Tier] tiered: bool unit_amount_usd: Optional[Decimal] current_amount_usd: Decimal @@ -51,16 +51,16 @@ class CustomerInfo(TypedDict): deactivated: bool has_active_subscription: bool billing_period: BillingPeriod - available_features: List[AvailableFeature] + available_features: list[AvailableFeature] current_total_amount_usd: Optional[str] current_total_amount_usd_after_discount: Optional[str] - products: Optional[List[CustomerProduct]] - custom_limits_usd: Optional[Dict[str, str]] - usage_summary: Optional[Dict[str, Dict[str, Optional[int]]]] + products: Optional[list[CustomerProduct]] + custom_limits_usd: Optional[dict[str, str]] + usage_summary: Optional[dict[str, dict[str, Optional[int]]]] free_trial_until: Optional[str] discount_percent: Optional[int] discount_amount_usd: Optional[str] - customer_trust_scores: Dict[str, int] + customer_trust_scores: dict[str, int] class BillingStatus(TypedDict): diff --git a/ee/billing/quota_limiting.py b/ee/billing/quota_limiting.py index 1c50b69803a57..8f5864c3ed513 100644 --- a/ee/billing/quota_limiting.py +++ b/ee/billing/quota_limiting.py @@ -1,7 +1,8 @@ import copy from datetime import datetime, timedelta from enum import Enum -from typing import Dict, List, Mapping, Optional, Sequence, Tuple, TypedDict, cast +from typing import Optional, TypedDict, cast +from collections.abc import Mapping, Sequence import dateutil.parser import posthoganalytics @@ -66,13 +67,13 @@ def add_limited_team_tokens(resource: QuotaResource, tokens: Mapping[str, int], redis_client.zadd(f"{cache_key}{resource.value}", tokens) # type: ignore # (zadd takes a Mapping[str, int] but the derived Union type is wrong) -def remove_limited_team_tokens(resource: QuotaResource, tokens: List[str], cache_key: QuotaLimitingCaches) -> None: +def remove_limited_team_tokens(resource: QuotaResource, tokens: list[str], cache_key: QuotaLimitingCaches) -> None: redis_client = get_client() redis_client.zrem(f"{cache_key}{resource.value}", *tokens) @cache_for(timedelta(seconds=30), background_refresh=True) -def list_limited_team_attributes(resource: QuotaResource, cache_key: QuotaLimitingCaches) -> List[str]: +def list_limited_team_attributes(resource: QuotaResource, cache_key: QuotaLimitingCaches) -> list[str]: now = timezone.now() redis_client = get_client() results = redis_client.zrangebyscore(f"{cache_key}{resource.value}", min=now.timestamp(), max="+inf") @@ -86,7 +87,7 @@ class UsageCounters(TypedDict): def org_quota_limited_until( - organization: Organization, resource: QuotaResource, previously_quota_limited_team_tokens: List[str] + organization: Organization, resource: QuotaResource, previously_quota_limited_team_tokens: list[str] ) -> Optional[OrgQuotaLimitingInformation]: if not organization.usage: return None @@ -265,7 +266,7 @@ def sync_org_quota_limits(organization: Organization): def get_team_attribute_by_quota_resource(organization: Organization, resource: QuotaResource): if resource in [QuotaResource.EVENTS, QuotaResource.RECORDINGS]: - team_tokens: List[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] + team_tokens: list[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] if not team_tokens: capture_exception(Exception(f"quota_limiting: No team tokens found for organization: {organization.id}")) @@ -274,7 +275,7 @@ def get_team_attribute_by_quota_resource(organization: Organization, resource: Q return team_tokens if resource == QuotaResource.ROWS_SYNCED: - team_ids: List[str] = [x for x in list(organization.teams.values_list("id", flat=True)) if x] + team_ids: list[str] = [x for x in list(organization.teams.values_list("id", flat=True)) if x] if not team_ids: capture_exception(Exception(f"quota_limiting: No team ids found for organization: {organization.id}")) @@ -322,7 +323,7 @@ def set_org_usage_summary( def update_all_org_billing_quotas( dry_run: bool = False, -) -> Tuple[Dict[str, Dict[str, int]], Dict[str, Dict[str, int]]]: +) -> tuple[dict[str, dict[str, int]], dict[str, dict[str, int]]]: period = get_current_day() period_start, period_end = period @@ -352,8 +353,8 @@ def update_all_org_billing_quotas( ) ) - todays_usage_report: Dict[str, UsageCounters] = {} - orgs_by_id: Dict[str, Organization] = {} + todays_usage_report: dict[str, UsageCounters] = {} + orgs_by_id: dict[str, Organization] = {} # we iterate through all teams, and add their usage to the organization they belong to for team in teams: @@ -373,12 +374,12 @@ def update_all_org_billing_quotas( for field in team_report: org_report[field] += team_report[field] # type: ignore - quota_limited_orgs: Dict[str, Dict[str, int]] = {x.value: {} for x in QuotaResource} - quota_limiting_suspended_orgs: Dict[str, Dict[str, int]] = {x.value: {} for x in QuotaResource} + quota_limited_orgs: dict[str, dict[str, int]] = {x.value: {} for x in QuotaResource} + quota_limiting_suspended_orgs: dict[str, dict[str, int]] = {x.value: {} for x in QuotaResource} # Get the current quota limits so we can track to poshog if it changes orgs_with_changes = set() - previously_quota_limited_team_tokens: Dict[str, List[str]] = {x.value: [] for x in QuotaResource} + previously_quota_limited_team_tokens: dict[str, list[str]] = {x.value: [] for x in QuotaResource} for field in quota_limited_orgs: previously_quota_limited_team_tokens[field] = list_limited_team_attributes( @@ -405,8 +406,8 @@ def update_all_org_billing_quotas( elif quota_limited_until: quota_limited_orgs[field][org_id] = quota_limited_until - quota_limited_teams: Dict[str, Dict[str, int]] = {x.value: {} for x in QuotaResource} - quota_limiting_suspended_teams: Dict[str, Dict[str, int]] = {x.value: {} for x in QuotaResource} + quota_limited_teams: dict[str, dict[str, int]] = {x.value: {} for x in QuotaResource} + quota_limiting_suspended_teams: dict[str, dict[str, int]] = {x.value: {} for x in QuotaResource} # Convert the org ids to team tokens for team in teams: diff --git a/ee/clickhouse/materialized_columns/analyze.py b/ee/clickhouse/materialized_columns/analyze.py index dac1aa6abc0f4..e8801fe17f606 100644 --- a/ee/clickhouse/materialized_columns/analyze.py +++ b/ee/clickhouse/materialized_columns/analyze.py @@ -1,6 +1,7 @@ import re from datetime import timedelta -from typing import Dict, Generator, List, Optional, Set, Tuple +from typing import Optional +from collections.abc import Generator import structlog @@ -27,18 +28,18 @@ from posthog.models.property_definition import PropertyDefinition from posthog.models.team import Team -Suggestion = Tuple[TableWithProperties, TableColumn, PropertyName] +Suggestion = tuple[TableWithProperties, TableColumn, PropertyName] logger = structlog.get_logger(__name__) class TeamManager: @instance_memoize - def person_properties(self, team_id: str) -> Set[str]: + def person_properties(self, team_id: str) -> set[str]: return self._get_properties(GET_PERSON_PROPERTIES_COUNT, team_id) @instance_memoize - def event_properties(self, team_id: str) -> Set[str]: + def event_properties(self, team_id: str) -> set[str]: return set( PropertyDefinition.objects.filter(team_id=team_id, type=PropertyDefinition.Type.EVENT).values_list( "name", flat=True @@ -46,17 +47,17 @@ def event_properties(self, team_id: str) -> Set[str]: ) @instance_memoize - def person_on_events_properties(self, team_id: str) -> Set[str]: + def person_on_events_properties(self, team_id: str) -> set[str]: return self._get_properties(GET_EVENT_PROPERTIES_COUNT.format(column_name="person_properties"), team_id) @instance_memoize - def group_on_events_properties(self, group_type_index: int, team_id: str) -> Set[str]: + def group_on_events_properties(self, group_type_index: int, team_id: str) -> set[str]: return self._get_properties( GET_EVENT_PROPERTIES_COUNT.format(column_name=f"group{group_type_index}_properties"), team_id, ) - def _get_properties(self, query, team_id) -> Set[str]: + def _get_properties(self, query, team_id) -> set[str]: rows = sync_execute(query, {"team_id": team_id}) return {name for name, _ in rows} @@ -86,12 +87,12 @@ def team_id(self) -> Optional[str]: return matches[0] if matches else None @cached_property - def _all_properties(self) -> List[Tuple[str, PropertyName]]: + def _all_properties(self) -> list[tuple[str, PropertyName]]: return re.findall(r"JSONExtract\w+\((\S+), '([^']+)'\)", self.query_string) def properties( self, team_manager: TeamManager - ) -> Generator[Tuple[TableWithProperties, TableColumn, PropertyName], None, None]: + ) -> Generator[tuple[TableWithProperties, TableColumn, PropertyName], None, None]: # Reverse-engineer whether a property is an "event" or "person" property by getting their event definitions. # :KLUDGE: Note that the same property will be found on both tables if both are used. # We try to hone in on the right column by looking at the column from which the property is extracted. @@ -124,7 +125,7 @@ def properties( yield "events", "group4_properties", property -def _analyze(since_hours_ago: int, min_query_time: int) -> List[Suggestion]: +def _analyze(since_hours_ago: int, min_query_time: int) -> list[Suggestion]: "Finds columns that should be materialized" raw_queries = sync_execute( @@ -179,7 +180,7 @@ def _analyze(since_hours_ago: int, min_query_time: int) -> List[Suggestion]: def materialize_properties_task( - columns_to_materialize: Optional[List[Suggestion]] = None, + columns_to_materialize: Optional[list[Suggestion]] = None, time_to_analyze_hours: int = MATERIALIZE_COLUMNS_ANALYSIS_PERIOD_HOURS, maximum: int = MATERIALIZE_COLUMNS_MAX_AT_ONCE, min_query_time: int = MATERIALIZE_COLUMNS_MINIMUM_QUERY_TIME, @@ -203,7 +204,7 @@ def materialize_properties_task( else: logger.info("Found no columns to materialize.") - properties: Dict[TableWithProperties, List[Tuple[PropertyName, TableColumn]]] = { + properties: dict[TableWithProperties, list[tuple[PropertyName, TableColumn]]] = { "events": [], "person": [], } diff --git a/ee/clickhouse/materialized_columns/columns.py b/ee/clickhouse/materialized_columns/columns.py index 71bfd5adcc751..1340abde0a682 100644 --- a/ee/clickhouse/materialized_columns/columns.py +++ b/ee/clickhouse/materialized_columns/columns.py @@ -1,6 +1,6 @@ import re from datetime import timedelta -from typing import Dict, List, Literal, Tuple, Union, cast +from typing import Literal, Union, cast from clickhouse_driver.errors import ServerException from django.utils.timezone import now @@ -36,7 +36,7 @@ @cache_for(timedelta(minutes=15)) def get_materialized_columns( table: TablesWithMaterializedColumns, -) -> Dict[Tuple[PropertyName, TableColumn], ColumnName]: +) -> dict[tuple[PropertyName, TableColumn], ColumnName]: rows = sync_execute( """ SELECT comment, name @@ -141,7 +141,7 @@ def add_minmax_index(table: TablesWithMaterializedColumns, column_name: str): def backfill_materialized_columns( table: TableWithProperties, - properties: List[Tuple[PropertyName, TableColumn]], + properties: list[tuple[PropertyName, TableColumn]], backfill_period: timedelta, test_settings=None, ) -> None: @@ -215,7 +215,7 @@ def _materialized_column_name( return f"{prefix}{property_str}{suffix}" -def _extract_property(comment: str) -> Tuple[PropertyName, TableColumn]: +def _extract_property(comment: str) -> tuple[PropertyName, TableColumn]: # Old style comments have the format "column_materializer::property", dealing with the default table column. # Otherwise, it's "column_materializer::table_column::property" split_column = comment.split("::", 2) diff --git a/ee/clickhouse/models/test/test_action.py b/ee/clickhouse/models/test/test_action.py index 692844e55c1e4..4f06b3e871a88 100644 --- a/ee/clickhouse/models/test/test_action.py +++ b/ee/clickhouse/models/test/test_action.py @@ -1,5 +1,4 @@ import dataclasses -from typing import List from posthog.client import sync_execute from posthog.hogql.hogql import HogQLContext @@ -22,7 +21,7 @@ class MockEvent: distinct_id: str -def _get_events_for_action(action: Action) -> List[MockEvent]: +def _get_events_for_action(action: Action) -> list[MockEvent]: hogql_context = HogQLContext(team_id=action.team_id) formatted_query, params = format_action_filter( team_id=action.team_id, action=action, prepend="", hogql_context=hogql_context diff --git a/ee/clickhouse/models/test/test_property.py b/ee/clickhouse/models/test/test_property.py index 913058d4ae1bd..6348697d84435 100644 --- a/ee/clickhouse/models/test/test_property.py +++ b/ee/clickhouse/models/test/test_property.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Literal, Union, cast +from typing import Literal, Union, cast from uuid import UUID import pytest @@ -43,7 +43,7 @@ class TestPropFormat(ClickhouseTestMixin, BaseTest): CLASS_DATA_LEVEL_SETUP = False - def _run_query(self, filter: Filter, **kwargs) -> List: + def _run_query(self, filter: Filter, **kwargs) -> list: query, params = parse_prop_grouped_clauses( property_group=filter.property_groups, allow_denormalized_props=True, @@ -776,7 +776,7 @@ def test_parse_groups_persons(self): class TestPropDenormalized(ClickhouseTestMixin, BaseTest): CLASS_DATA_LEVEL_SETUP = False - def _run_query(self, filter: Filter, join_person_tables=False) -> List: + def _run_query(self, filter: Filter, join_person_tables=False) -> list: outer_properties = PropertyOptimizer().parse_property_groups(filter.property_groups).outer query, params = parse_prop_grouped_clauses( team_id=self.team.pk, @@ -1232,7 +1232,7 @@ def test_parse_groups_persons_edge_case_with_single_filter(snapshot): @pytest.mark.parametrize("breakdown, table, query_alias, column, expected", TEST_BREAKDOWN_PROCESSING) def test_breakdown_query_expression( clean_up_materialised_columns, - breakdown: Union[str, List[str]], + breakdown: Union[str, list[str]], table: TableWithProperties, query_alias: Literal["prop", "value"], column: str, @@ -1281,7 +1281,7 @@ def test_breakdown_query_expression( ) def test_breakdown_query_expression_materialised( clean_up_materialised_columns, - breakdown: Union[str, List[str]], + breakdown: Union[str, list[str]], table: TableWithProperties, query_alias: Literal["prop", "value"], column: str, @@ -1317,7 +1317,7 @@ def test_breakdown_query_expression_materialised( @pytest.fixture -def test_events(db, team) -> List[UUID]: +def test_events(db, team) -> list[UUID]: return [ _create_event( event="$pageview", @@ -1958,7 +1958,7 @@ def test_combine_group_properties(): ], } - combined_group = PropertyGroup(PropertyOperatorType.AND, cast(List[Property], [])).combine_properties( + combined_group = PropertyGroup(PropertyOperatorType.AND, cast(list[Property], [])).combine_properties( PropertyOperatorType.OR, [propertyC, propertyD] ) assert combined_group.to_dict() == { diff --git a/ee/clickhouse/queries/column_optimizer.py b/ee/clickhouse/queries/column_optimizer.py index dd62154dd2037..b1bf142aa3d1e 100644 --- a/ee/clickhouse/queries/column_optimizer.py +++ b/ee/clickhouse/queries/column_optimizer.py @@ -1,5 +1,5 @@ -from typing import Counter as TCounter -from typing import Set, cast +from collections import Counter as TCounter +from typing import cast from posthog.clickhouse.materialized_columns.column import ColumnName from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType @@ -20,16 +20,16 @@ class EnterpriseColumnOptimizer(FOSSColumnOptimizer): @cached_property - def group_types_to_query(self) -> Set[GroupTypeIndex]: + def group_types_to_query(self) -> set[GroupTypeIndex]: used_properties = self.used_properties_with_type("group") return {cast(GroupTypeIndex, group_type_index) for _, _, group_type_index in used_properties} @cached_property - def group_on_event_columns_to_query(self) -> Set[ColumnName]: + def group_on_event_columns_to_query(self) -> set[ColumnName]: "Returns a list of event table group columns containing materialized properties that this query needs" used_properties = self.used_properties_with_type("group") - columns_to_query: Set[ColumnName] = set() + columns_to_query: set[ColumnName] = set() for group_type_index in range(5): columns_to_query = columns_to_query.union( @@ -120,7 +120,7 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: counter += get_action_tables_and_properties(entity.get_action()) if ( - not isinstance(self.filter, (StickinessFilter, PropertiesTimelineFilter)) + not isinstance(self.filter, StickinessFilter | PropertiesTimelineFilter) and self.filter.correlation_type == FunnelCorrelationType.PROPERTIES and self.filter.correlation_property_names ): diff --git a/ee/clickhouse/queries/enterprise_cohort_query.py b/ee/clickhouse/queries/enterprise_cohort_query.py index a748a64adf06a..814b61e9a8bf5 100644 --- a/ee/clickhouse/queries/enterprise_cohort_query.py +++ b/ee/clickhouse/queries/enterprise_cohort_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Tuple, cast +from typing import Any, cast from posthog.constants import PropertyOperatorType from posthog.models.cohort.util import get_count_operator @@ -15,18 +15,18 @@ from posthog.schema import PersonsOnEventsMode -def check_negation_clause(prop: PropertyGroup) -> Tuple[bool, bool]: +def check_negation_clause(prop: PropertyGroup) -> tuple[bool, bool]: has_negation_clause = False has_primary_clase = False if len(prop.values): if isinstance(prop.values[0], PropertyGroup): - for p in cast(List[PropertyGroup], prop.values): + for p in cast(list[PropertyGroup], prop.values): has_neg, has_primary = check_negation_clause(p) has_negation_clause = has_negation_clause or has_neg has_primary_clase = has_primary_clase or has_primary else: - for property in cast(List[Property], prop.values): + for property in cast(list[Property], prop.values): if property.negation: has_negation_clause = True else: @@ -42,7 +42,7 @@ def check_negation_clause(prop: PropertyGroup) -> Tuple[bool, bool]: class EnterpriseCohortQuery(FOSSCohortQuery): - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: if not self._outer_property_groups: # everything is pushed down, no behavioral stuff to do # thus, use personQuery directly @@ -87,9 +87,9 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: return final_query, self.params - def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: res: str = "" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if prop.type == "behavioral": if prop.value == "performed_event": @@ -117,7 +117,7 @@ def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> return res, params - def get_stopped_performing_event(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_stopped_performing_event(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) column_name = f"stopped_event_condition_{prepend}_{idx}" @@ -152,7 +152,7 @@ def get_stopped_performing_event(self, prop: Property, prepend: str, idx: int) - }, ) - def get_restarted_performing_event(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_restarted_performing_event(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) column_name = f"restarted_event_condition_{prepend}_{idx}" @@ -191,7 +191,7 @@ def get_restarted_performing_event(self, prop: Property, prepend: str, idx: int) }, ) - def get_performed_event_first_time(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_performed_event_first_time(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) entity_query, entity_params = self._get_entity(event, prepend, idx) @@ -212,7 +212,7 @@ def get_performed_event_first_time(self, prop: Property, prepend: str, idx: int) {f"{date_param}": date_value, **entity_params}, ) - def get_performed_event_regularly(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_performed_event_regularly(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) entity_query, entity_params = self._get_entity(event, prepend, idx) @@ -266,7 +266,7 @@ def get_performed_event_regularly(self, prop: Property, prepend: str, idx: int) ) @cached_property - def sequence_filters_to_query(self) -> List[Property]: + def sequence_filters_to_query(self) -> list[Property]: props = [] for prop in self._filter.property_groups.flat: if prop.value == "performed_event_sequence": @@ -274,13 +274,13 @@ def sequence_filters_to_query(self) -> List[Property]: return props @cached_property - def sequence_filters_lookup(self) -> Dict[str, str]: + def sequence_filters_lookup(self) -> dict[str, str]: lookup = {} for idx, prop in enumerate(self.sequence_filters_to_query): lookup[str(prop.to_dict())] = f"{idx}" return lookup - def _get_sequence_query(self) -> Tuple[str, Dict[str, Any], str]: + def _get_sequence_query(self) -> tuple[str, dict[str, Any], str]: params = {} materialized_columns = list(self._column_optimizer.event_columns_to_query) @@ -356,7 +356,7 @@ def _get_sequence_query(self) -> Tuple[str, Dict[str, Any], str]: self.FUNNEL_QUERY_ALIAS, ) - def _get_sequence_filter(self, prop: Property, idx: int) -> Tuple[List[str], List[str], List[str], Dict[str, Any]]: + def _get_sequence_filter(self, prop: Property, idx: int) -> tuple[list[str], list[str], list[str], dict[str, Any]]: event = validate_entity((prop.event_type, prop.key)) entity_query, entity_params = self._get_entity(event, f"event_sequence_{self._cohort_pk}", idx) seq_event = validate_entity((prop.seq_event_type, prop.seq_event)) @@ -405,7 +405,7 @@ def _get_sequence_filter(self, prop: Property, idx: int) -> Tuple[List[str], Lis }, ) - def get_performed_event_sequence(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_performed_event_sequence(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: return ( f"{self.SEQUENCE_FIELD_ALIAS}_{self.sequence_filters_lookup[str(prop.to_dict())]}", {}, diff --git a/ee/clickhouse/queries/event_query.py b/ee/clickhouse/queries/event_query.py index b1b4dbb695e63..0e16abc780049 100644 --- a/ee/clickhouse/queries/event_query.py +++ b/ee/clickhouse/queries/event_query.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Tuple, Union +from typing import Optional, Union from ee.clickhouse.materialized_columns.columns import ColumnName from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer @@ -33,9 +33,9 @@ def __init__( should_join_distinct_ids=False, should_join_persons=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: Optional[List[ColumnName]] = None, - extra_event_properties: Optional[List[PropertyName]] = None, - extra_person_fields: Optional[List[ColumnName]] = None, + extra_fields: Optional[list[ColumnName]] = None, + extra_event_properties: Optional[list[PropertyName]] = None, + extra_person_fields: Optional[list[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, **kwargs, @@ -62,7 +62,7 @@ def __init__( self._column_optimizer = EnterpriseColumnOptimizer(self._filter, self._team_id) - def _get_groups_query(self) -> Tuple[str, Dict]: + def _get_groups_query(self) -> tuple[str, dict]: if isinstance(self._filter, PropertiesTimelineFilter): raise Exception("Properties Timeline never needs groups query") return GroupsJoinQuery( diff --git a/ee/clickhouse/queries/experiments/funnel_experiment_result.py b/ee/clickhouse/queries/experiments/funnel_experiment_result.py index ab117b07c69e2..845cce75d505c 100644 --- a/ee/clickhouse/queries/experiments/funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/funnel_experiment_result.py @@ -1,7 +1,7 @@ from dataclasses import asdict, dataclass from datetime import datetime import json -from typing import List, Optional, Tuple, Type +from typing import Optional from zoneinfo import ZoneInfo from numpy.random import default_rng @@ -56,7 +56,7 @@ def __init__( feature_flag: FeatureFlag, experiment_start_date: datetime, experiment_end_date: Optional[datetime] = None, - funnel_class: Type[ClickhouseFunnel] = ClickhouseFunnel, + funnel_class: type[ClickhouseFunnel] = ClickhouseFunnel, ): breakdown_key = f"$feature/{feature_flag.key}" self.variants = [variant["key"] for variant in feature_flag.variants] @@ -148,9 +148,9 @@ def get_variants(self, funnel_results): @staticmethod def calculate_results( control_variant: Variant, - test_variants: List[Variant], - priors: Tuple[int, int] = (1, 1), - ) -> List[Probability]: + test_variants: list[Variant], + priors: tuple[int, int] = (1, 1), + ) -> list[Probability]: """ Calculates probability that A is better than B. First variant is control, rest are test variants. @@ -186,9 +186,9 @@ def calculate_results( @staticmethod def are_results_significant( control_variant: Variant, - test_variants: List[Variant], - probabilities: List[Probability], - ) -> Tuple[ExperimentSignificanceCode, Probability]: + test_variants: list[Variant], + probabilities: list[Probability], + ) -> tuple[ExperimentSignificanceCode, Probability]: def get_conversion_rate(variant: Variant): return variant.success_count / (variant.success_count + variant.failure_count) @@ -226,7 +226,7 @@ def get_conversion_rate(variant: Variant): return ExperimentSignificanceCode.SIGNIFICANT, expected_loss -def calculate_expected_loss(target_variant: Variant, variants: List[Variant]) -> float: +def calculate_expected_loss(target_variant: Variant, variants: list[Variant]) -> float: """ Calculates expected loss in conversion rate for a given variant. Loss calculation comes from VWO's SmartStats technical paper: @@ -268,7 +268,7 @@ def calculate_expected_loss(target_variant: Variant, variants: List[Variant]) -> return loss / simulations_count -def simulate_winning_variant_for_conversion(target_variant: Variant, variants: List[Variant]) -> Probability: +def simulate_winning_variant_for_conversion(target_variant: Variant, variants: list[Variant]) -> Probability: random_sampler = default_rng() prior_success = 1 prior_failure = 1 @@ -300,7 +300,7 @@ def simulate_winning_variant_for_conversion(target_variant: Variant, variants: L return winnings / simulations_count -def calculate_probability_of_winning_for_each(variants: List[Variant]) -> List[Probability]: +def calculate_probability_of_winning_for_each(variants: list[Variant]) -> list[Probability]: """ Calculates the probability of winning for each variant. """ diff --git a/ee/clickhouse/queries/experiments/secondary_experiment_result.py b/ee/clickhouse/queries/experiments/secondary_experiment_result.py index 4926d2920afbd..bd485c43622bf 100644 --- a/ee/clickhouse/queries/experiments/secondary_experiment_result.py +++ b/ee/clickhouse/queries/experiments/secondary_experiment_result.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, Optional +from typing import Optional from rest_framework.exceptions import ValidationError from ee.clickhouse.queries.experiments.funnel_experiment_result import ClickhouseFunnelExperimentResult @@ -55,7 +55,7 @@ def get_results(self): return {"result": variants, **significance_results} - def get_funnel_conversion_rate_for_variants(self, insight_results) -> Dict[str, float]: + def get_funnel_conversion_rate_for_variants(self, insight_results) -> dict[str, float]: variants = {} for result in insight_results: total = result[0]["count"] @@ -67,7 +67,7 @@ def get_funnel_conversion_rate_for_variants(self, insight_results) -> Dict[str, return variants - def get_trend_count_data_for_variants(self, insight_results) -> Dict[str, float]: + def get_trend_count_data_for_variants(self, insight_results) -> dict[str, float]: # this assumes the Trend insight is Cumulative, unless using count per user variants = {} diff --git a/ee/clickhouse/queries/experiments/test_experiment_result.py b/ee/clickhouse/queries/experiments/test_experiment_result.py index 20b737efa1767..18eb673bf9ac8 100644 --- a/ee/clickhouse/queries/experiments/test_experiment_result.py +++ b/ee/clickhouse/queries/experiments/test_experiment_result.py @@ -1,7 +1,6 @@ import unittest from functools import lru_cache from math import exp, lgamma, log -from typing import List from flaky import flaky @@ -31,7 +30,7 @@ def logbeta(x: int, y: int) -> float: # calculation: https://www.evanmiller.org/bayesian-ab-testing.html#binary_ab -def calculate_probability_of_winning_for_target(target_variant: Variant, other_variants: List[Variant]) -> Probability: +def calculate_probability_of_winning_for_target(target_variant: Variant, other_variants: list[Variant]) -> Probability: """ Calculates the probability of winning for target variant. """ @@ -455,7 +454,7 @@ def test_calculate_results_many_variants_control_is_significant(self): # calculation: https://www.evanmiller.org/bayesian-ab-testing.html#count_ab def calculate_probability_of_winning_for_target_count_data( - target_variant: CountVariant, other_variants: List[CountVariant] + target_variant: CountVariant, other_variants: list[CountVariant] ) -> Probability: """ Calculates the probability of winning for target variant. diff --git a/ee/clickhouse/queries/experiments/trend_experiment_result.py b/ee/clickhouse/queries/experiments/trend_experiment_result.py index 02974d8bd8252..0370e0a684a88 100644 --- a/ee/clickhouse/queries/experiments/trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/trend_experiment_result.py @@ -3,7 +3,7 @@ from datetime import datetime from functools import lru_cache from math import exp, lgamma, log -from typing import List, Optional, Tuple, Type +from typing import Optional from zoneinfo import ZoneInfo from numpy.random import default_rng @@ -78,7 +78,7 @@ def __init__( feature_flag: FeatureFlag, experiment_start_date: datetime, experiment_end_date: Optional[datetime] = None, - trend_class: Type[Trends] = Trends, + trend_class: type[Trends] = Trends, custom_exposure_filter: Optional[Filter] = None, ): breakdown_key = f"$feature/{feature_flag.key}" @@ -316,7 +316,7 @@ def get_variants(self, insight_results, exposure_results): return control_variant, test_variants @staticmethod - def calculate_results(control_variant: Variant, test_variants: List[Variant]) -> List[Probability]: + def calculate_results(control_variant: Variant, test_variants: list[Variant]) -> list[Probability]: """ Calculates probability that A is better than B. First variant is control, rest are test variants. @@ -346,9 +346,9 @@ def calculate_results(control_variant: Variant, test_variants: List[Variant]) -> @staticmethod def are_results_significant( control_variant: Variant, - test_variants: List[Variant], - probabilities: List[Probability], - ) -> Tuple[ExperimentSignificanceCode, Probability]: + test_variants: list[Variant], + probabilities: list[Probability], + ) -> tuple[ExperimentSignificanceCode, Probability]: # TODO: Experiment with Expected Loss calculations for trend experiments for variant in test_variants: @@ -375,7 +375,7 @@ def are_results_significant( return ExperimentSignificanceCode.SIGNIFICANT, p_value -def simulate_winning_variant_for_arrival_rates(target_variant: Variant, variants: List[Variant]) -> float: +def simulate_winning_variant_for_arrival_rates(target_variant: Variant, variants: list[Variant]) -> float: random_sampler = default_rng() simulations_count = 100_000 @@ -399,7 +399,7 @@ def simulate_winning_variant_for_arrival_rates(target_variant: Variant, variants return winnings / simulations_count -def calculate_probability_of_winning_for_each(variants: List[Variant]) -> List[Probability]: +def calculate_probability_of_winning_for_each(variants: list[Variant]) -> list[Probability]: """ Calculates the probability of winning for each variant. """ @@ -458,7 +458,7 @@ def poisson_p_value(control_count, control_exposure, test_count, test_exposure): return min(1, 2 * min(low_p_value, high_p_value)) -def calculate_p_value(control_variant: Variant, test_variants: List[Variant]) -> Probability: +def calculate_p_value(control_variant: Variant, test_variants: list[Variant]) -> Probability: best_test_variant = max(test_variants, key=lambda variant: variant.count) return poisson_p_value( diff --git a/ee/clickhouse/queries/experiments/utils.py b/ee/clickhouse/queries/experiments/utils.py index 88418e3e354d2..c0211e4c9de24 100644 --- a/ee/clickhouse/queries/experiments/utils.py +++ b/ee/clickhouse/queries/experiments/utils.py @@ -1,4 +1,4 @@ -from typing import Set, Union +from typing import Union from posthog.client import sync_execute from posthog.constants import TREND_FILTER_TYPE_ACTIONS @@ -20,7 +20,7 @@ def requires_flag_warning(filter: Filter, team: Team) -> bool: {parsed_date_to} """ - events: Set[Union[int, str]] = set() + events: set[Union[int, str]] = set() entities_to_use = filter.entities for entity in entities_to_use: diff --git a/ee/clickhouse/queries/funnels/funnel_correlation.py b/ee/clickhouse/queries/funnels/funnel_correlation.py index ed3995968a001..c25763167f2bf 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation.py @@ -2,12 +2,8 @@ import urllib.parse from typing import ( Any, - Dict, - List, Literal, Optional, - Set, - Tuple, TypedDict, Union, cast, @@ -40,7 +36,7 @@ class EventDefinition(TypedDict): event: str - properties: Dict[str, Any] + properties: dict[str, Any] elements: list @@ -74,7 +70,7 @@ class FunnelCorrelationResponse(TypedDict): queries, but we could use, for example, a dataclass """ - events: List[EventOddsRatioSerialized] + events: list[EventOddsRatioSerialized] skewed: bool @@ -153,7 +149,7 @@ def __init__( ) @property - def properties_to_include(self) -> List[str]: + def properties_to_include(self) -> list[str]: props_to_include = [] if ( self._team.person_on_events_mode != PersonsOnEventsMode.disabled @@ -203,7 +199,7 @@ def support_autocapture_elements(self) -> bool: return True return False - def get_contingency_table_query(self) -> Tuple[str, Dict[str, Any]]: + def get_contingency_table_query(self) -> tuple[str, dict[str, Any]]: """ Returns a query string and params, which are used to generate the contingency table. The query returns success and failure count for event / property values, along with total success and failure counts. @@ -216,7 +212,7 @@ def get_contingency_table_query(self) -> Tuple[str, Dict[str, Any]]: return self.get_event_query() - def get_event_query(self) -> Tuple[str, Dict[str, Any]]: + def get_event_query(self) -> tuple[str, dict[str, Any]]: funnel_persons_query, funnel_persons_params = self.get_funnel_actors_cte() event_join_query = self._get_events_join_query() @@ -279,7 +275,7 @@ def get_event_query(self) -> Tuple[str, Dict[str, Any]]: return query, params - def get_event_property_query(self) -> Tuple[str, Dict[str, Any]]: + def get_event_property_query(self) -> tuple[str, dict[str, Any]]: if not self._filter.correlation_event_names: raise ValidationError("Event Property Correlation expects atleast one event name to run correlation on") @@ -359,7 +355,7 @@ def get_event_property_query(self) -> Tuple[str, Dict[str, Any]]: return query, params - def get_properties_query(self) -> Tuple[str, Dict[str, Any]]: + def get_properties_query(self) -> tuple[str, dict[str, Any]]: if not self._filter.correlation_property_names: raise ValidationError("Property Correlation expects atleast one Property to run correlation on") @@ -580,7 +576,7 @@ def _get_properties_prop_clause(self): ) def _get_funnel_step_names(self): - events: Set[Union[int, str]] = set() + events: set[Union[int, str]] = set() for entity in self._filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: action = entity.get_action() @@ -590,7 +586,7 @@ def _get_funnel_step_names(self): return sorted(events) - def _run(self) -> Tuple[List[EventOddsRatio], bool]: + def _run(self) -> tuple[list[EventOddsRatio], bool]: """ Run the diagnose query. @@ -834,7 +830,7 @@ def construct_person_properties_people_url( ).to_params() return f"{self._base_uri}api/person/funnel/correlation?{urllib.parse.urlencode(params)}&cache_invalidation_key={cache_invalidation_key}" - def format_results(self, results: Tuple[List[EventOddsRatio], bool]) -> FunnelCorrelationResponse: + def format_results(self, results: tuple[list[EventOddsRatio], bool]) -> FunnelCorrelationResponse: odds_ratios, skewed_totals = results return { "events": [self.serialize_event_odds_ratio(odds_ratio=odds_ratio) for odds_ratio in odds_ratios], @@ -847,7 +843,7 @@ def run(self) -> FunnelCorrelationResponse: return self.format_results(self._run()) - def get_partial_event_contingency_tables(self) -> Tuple[List[EventContingencyTable], int, int]: + def get_partial_event_contingency_tables(self) -> tuple[list[EventContingencyTable], int, int]: """ For each event a person that started going through the funnel, gets stats for how many of these users are sucessful and how many are unsuccessful. @@ -888,7 +884,7 @@ def get_partial_event_contingency_tables(self) -> Tuple[List[EventContingencyTab failure_total, ) - def get_funnel_actors_cte(self) -> Tuple[str, Dict[str, Any]]: + def get_funnel_actors_cte(self) -> tuple[str, dict[str, Any]]: extra_fields = ["steps", "final_timestamp", "first_timestamp"] for prop in self.properties_to_include: @@ -975,12 +971,12 @@ def get_entity_odds_ratio(event_contingency_table: EventContingencyTable, prior_ ) -def build_selector(elements: List[Dict[str, Any]]) -> str: +def build_selector(elements: list[dict[str, Any]]) -> str: # build a CSS select given an "elements_chain" # NOTE: my source of what this should be doing is # https://github.com/PostHog/posthog/blob/cc054930a47fb59940531e99a856add49a348ee5/frontend/src/scenes/events/createActionFromEvent.tsx#L36:L36 # - def element_to_selector(element: Dict[str, Any]) -> str: + def element_to_selector(element: dict[str, Any]) -> str: if attr_id := element.get("attr_id"): return f'[id="{attr_id}"]' diff --git a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py index 6a0cfe3655103..b02a8b8e9b6cb 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple, Union +from typing import Optional, Union from django.db.models.query import QuerySet from rest_framework.exceptions import ValidationError @@ -52,9 +52,9 @@ def actor_query(self, limit_actors: Optional[bool] = True): def get_actors( self, - ) -> Tuple[ + ) -> tuple[ Union[QuerySet[Person], QuerySet[Group]], - Union[List[SerializedGroup], List[SerializedPerson]], + Union[list[SerializedGroup], list[SerializedPerson]], int, ]: if self._filter.correlation_type == FunnelCorrelationType.PROPERTIES: @@ -167,7 +167,7 @@ def aggregation_group_type_index(self): def actor_query( self, limit_actors: Optional[bool] = True, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ): if not self._filter.correlation_property_values: raise ValidationError("Property Correlation expects atleast one Property to get persons for") diff --git a/ee/clickhouse/queries/funnels/test/breakdown_cases.py b/ee/clickhouse/queries/funnels/test/breakdown_cases.py index f4fb2689d87b7..7a1b2076776d0 100644 --- a/ee/clickhouse/queries/funnels/test/breakdown_cases.py +++ b/ee/clickhouse/queries/funnels/test/breakdown_cases.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, List +from typing import Any from posthog.constants import INSIGHT_FUNNELS from posthog.models.filters import Filter @@ -51,8 +51,8 @@ def _create_groups(self): properties={"industry": "random"}, ) - def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]): - def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: + def _assert_funnel_breakdown_result_is_correct(self, result, steps: list[FunnelStepResult]): + def funnel_result(step: FunnelStepResult, order: int) -> dict[str, Any]: return { "action_id": step.name if step.type == "events" else step.action_id, "name": step.name, diff --git a/ee/clickhouse/queries/groups_join_query.py b/ee/clickhouse/queries/groups_join_query.py index db1d12a3c6c46..7a3dc46daf993 100644 --- a/ee/clickhouse/queries/groups_join_query.py +++ b/ee/clickhouse/queries/groups_join_query.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, Union +from typing import Optional, Union from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer from posthog.models import Filter @@ -35,7 +35,7 @@ def __init__( self._join_key = join_key self._person_on_events_mode = person_on_events_mode - def get_join_query(self) -> Tuple[str, Dict]: + def get_join_query(self) -> tuple[str, dict]: join_queries, params = [], {} if self._person_on_events_mode != PersonsOnEventsMode.disabled and groups_on_events_querying_enabled(): @@ -63,7 +63,7 @@ def get_join_query(self) -> Tuple[str, Dict]: return "\n".join(join_queries), params - def get_filter_query(self, group_type_index: GroupTypeIndex) -> Tuple[str, Dict]: + def get_filter_query(self, group_type_index: GroupTypeIndex) -> tuple[str, dict]: var = f"group_index_{group_type_index}" params = { "team_id": self._team_id, diff --git a/ee/clickhouse/queries/paths/paths.py b/ee/clickhouse/queries/paths/paths.py index a5b9968da589e..f20744ee6729e 100644 --- a/ee/clickhouse/queries/paths/paths.py +++ b/ee/clickhouse/queries/paths/paths.py @@ -1,5 +1,5 @@ from re import escape -from typing import Dict, Literal, Optional, Tuple, Union, cast +from typing import Literal, Optional, Union, cast from jsonschema import ValidationError @@ -34,8 +34,8 @@ def __init__(self, filter: PathFilter, team: Team, funnel_filter: Optional[Filte ): raise ValidationError("Max Edge weight can't be lower than min edge weight") - def get_edge_weight_clause(self) -> Tuple[str, Dict]: - params: Dict[str, int] = {} + def get_edge_weight_clause(self) -> tuple[str, dict]: + params: dict[str, int] = {} conditions = [] @@ -60,8 +60,8 @@ def get_target_point_filter(self) -> str: else: return "" - def get_target_clause(self) -> Tuple[str, Dict]: - params: Dict[str, Union[str, None]] = { + def get_target_clause(self) -> tuple[str, dict]: + params: dict[str, Union[str, None]] = { "target_point": None, "secondary_target_point": None, } @@ -152,7 +152,7 @@ def get_array_compacting_function(self) -> Literal["arrayResize", "arraySlice"]: else: return "arraySlice" - def get_filtered_path_ordering(self) -> Tuple[str, ...]: + def get_filtered_path_ordering(self) -> tuple[str, ...]: fields_to_include = ["filtered_path", "filtered_timings"] + [ f"filtered_{field}s" for field in self.extra_event_fields_and_properties ] diff --git a/ee/clickhouse/queries/related_actors_query.py b/ee/clickhouse/queries/related_actors_query.py index 9c031a3b66221..e4cd462ace4f4 100644 --- a/ee/clickhouse/queries/related_actors_query.py +++ b/ee/clickhouse/queries/related_actors_query.py @@ -1,6 +1,6 @@ from datetime import timedelta from functools import cached_property -from typing import List, Optional, Union +from typing import Optional, Union from django.utils.timezone import now @@ -38,8 +38,8 @@ def __init__( self.group_type_index = validate_group_type_index("group_type_index", group_type_index) self.id = id - def run(self) -> List[SerializedActor]: - results: List[SerializedActor] = [] + def run(self) -> list[SerializedActor]: + results: list[SerializedActor] = [] results.extend(self._query_related_people()) for group_type_mapping in GroupTypeMapping.objects.filter(team_id=self.team.pk): results.extend(self._query_related_groups(group_type_mapping.group_type_index)) @@ -49,7 +49,7 @@ def run(self) -> List[SerializedActor]: def is_aggregating_by_groups(self) -> bool: return self.group_type_index is not None - def _query_related_people(self) -> List[SerializedPerson]: + def _query_related_people(self) -> list[SerializedPerson]: if not self.is_aggregating_by_groups: return [] @@ -72,7 +72,7 @@ def _query_related_people(self) -> List[SerializedPerson]: _, serialized_people = get_people(self.team, person_ids) return serialized_people - def _query_related_groups(self, group_type_index: GroupTypeIndex) -> List[SerializedGroup]: + def _query_related_groups(self, group_type_index: GroupTypeIndex) -> list[SerializedGroup]: if group_type_index == self.group_type_index: return [] @@ -102,7 +102,7 @@ def _query_related_groups(self, group_type_index: GroupTypeIndex) -> List[Serial _, serialize_groups = get_groups(self.team.pk, group_type_index, group_ids) return serialize_groups - def _take_first(self, rows: List) -> List: + def _take_first(self, rows: list) -> list: return [row[0] for row in rows] @property diff --git a/ee/clickhouse/queries/test/test_paths.py b/ee/clickhouse/queries/test/test_paths.py index fdaf25a043a6d..69f673e4489ca 100644 --- a/ee/clickhouse/queries/test/test_paths.py +++ b/ee/clickhouse/queries/test/test_paths.py @@ -1,5 +1,4 @@ from datetime import timedelta -from typing import Tuple from unittest.mock import MagicMock from uuid import UUID @@ -2905,7 +2904,7 @@ def test_start_and_end(self): @snapshot_clickhouse_queries def test_properties_queried_using_path_filter(self): - def should_query_list(filter) -> Tuple[bool, bool]: + def should_query_list(filter) -> tuple[bool, bool]: path_query = PathEventQuery(filter, self.team) return (path_query._should_query_url(), path_query._should_query_screen()) diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index f50b9921c926c..b37d4e4d765df 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Optional +from typing import Any, Optional +from collections.abc import Callable from django.utils.timezone import now from rest_framework import serializers, viewsets diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index e539de4673d60..4c67072b11de2 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -1,5 +1,5 @@ from collections import defaultdict -from typing import Dict, List, cast +from typing import cast from django.db.models import Q from drf_spectacular.types import OpenApiTypes @@ -34,7 +34,7 @@ class ClickhouseGroupsTypesView(TeamAndOrgViewSetMixin, mixins.ListModelMixin, v @action(detail=False, methods=["PATCH"], name="Update group types metadata") def update_metadata(self, request: request.Request, *args, **kwargs): - for row in cast(List[Dict], request.data): + for row in cast(list[dict], request.data): instance = GroupTypeMapping.objects.get(team=self.team, group_type_index=row["group_type_index"]) serializer = self.get_serializer(instance, data=row) serializer.is_valid(raise_exception=True) diff --git a/ee/clickhouse/views/insights.py b/ee/clickhouse/views/insights.py index ff772b71aaef8..e6adf49e7ff9e 100644 --- a/ee/clickhouse/views/insights.py +++ b/ee/clickhouse/views/insights.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from rest_framework.decorators import action from rest_framework.permissions import SAFE_METHODS, BasePermission @@ -47,7 +47,7 @@ def funnel_correlation(self, request: Request, *args: Any, **kwargs: Any) -> Res return Response(result) @cached_by_filters - def calculate_funnel_correlation(self, request: Request) -> Dict[str, Any]: + def calculate_funnel_correlation(self, request: Request) -> dict[str, Any]: team = self.team filter = Filter(request=request, team=team) diff --git a/ee/clickhouse/views/person.py b/ee/clickhouse/views/person.py index d01dba65da928..f3f8432ad6871 100644 --- a/ee/clickhouse/views/person.py +++ b/ee/clickhouse/views/person.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Tuple +from typing import Optional from rest_framework import request, response from rest_framework.decorators import action @@ -28,7 +28,7 @@ def funnel_correlation(self, request: request.Request, **kwargs) -> response.Res @cached_by_filters def calculate_funnel_correlation_persons( self, request: request.Request - ) -> Dict[str, Tuple[List, Optional[str], Optional[str], int]]: + ) -> dict[str, tuple[list, Optional[str], Optional[str], int]]: filter = Filter(request=request, data={"insight": INSIGHT_FUNNELS}, team=self.team) if not filter.correlation_person_limit: filter = filter.shallow_clone({FUNNEL_CORRELATION_PERSON_LIMIT: 100}) diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py index 829232d1bd94f..f5ff3722008b8 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py @@ -552,15 +552,15 @@ def test_properties_correlation_endpoint_provides_people_drill_down_urls(self): ), ) - (browser_correlation,) = [ + (browser_correlation,) = ( correlation for correlation in odds["result"]["events"] if correlation["event"]["event"] == "$browser::1" - ] + ) - (notset_correlation,) = [ + (notset_correlation,) = ( correlation for correlation in odds["result"]["events"] if correlation["event"]["event"] == "$browser::" - ] + ) assert get_people_for_correlation_ok(client=self.client, correlation=browser_correlation) == { "success": ["Person 2"], diff --git a/ee/clickhouse/views/test/funnel/util.py b/ee/clickhouse/views/test/funnel/util.py index 8d2c304cb8b4c..45984ee41ba29 100644 --- a/ee/clickhouse/views/test/funnel/util.py +++ b/ee/clickhouse/views/test/funnel/util.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Any, Dict, Literal, Optional, TypedDict, Union +from typing import Any, Literal, Optional, TypedDict, Union from django.test.client import Client @@ -12,7 +12,7 @@ class EventPattern(TypedDict, total=False): id: str type: Union[Literal["events"], Literal["actions"]] order: int - properties: Dict[str, Any] + properties: dict[str, Any] @dataclasses.dataclass @@ -46,7 +46,7 @@ def get_funnel(client: Client, team_id: int, request: FunnelRequest): ) -def get_funnel_ok(client: Client, team_id: int, request: FunnelRequest) -> Dict[str, Any]: +def get_funnel_ok(client: Client, team_id: int, request: FunnelRequest) -> dict[str, Any]: response = get_funnel(client=client, team_id=team_id, request=request) assert response.status_code == 200, response.content @@ -73,14 +73,14 @@ def get_funnel_correlation(client: Client, team_id: int, request: FunnelCorrelat ) -def get_funnel_correlation_ok(client: Client, team_id: int, request: FunnelCorrelationRequest) -> Dict[str, Any]: +def get_funnel_correlation_ok(client: Client, team_id: int, request: FunnelCorrelationRequest) -> dict[str, Any]: response = get_funnel_correlation(client=client, team_id=team_id, request=request) assert response.status_code == 200, response.content return response.json() -def get_people_for_correlation_ok(client: Client, correlation: EventOddsRatioSerialized) -> Dict[str, Any]: +def get_people_for_correlation_ok(client: Client, correlation: EventOddsRatioSerialized) -> dict[str, Any]: """ Helper for getting people for a correlation. Note we keep checking to just inclusion of name, to make the stable to changes in other people props. diff --git a/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py b/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py index 232312ec6449f..e7f9ebf7e2c3e 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from flaky import flaky @@ -7,7 +7,7 @@ from posthog.test.base import ClickhouseTestMixin, snapshot_clickhouse_queries from posthog.test.test_journeys import journeys_for -DEFAULT_JOURNEYS_FOR_PAYLOAD: Dict[str, List[Dict[str, Any]]] = { +DEFAULT_JOURNEYS_FOR_PAYLOAD: dict[str, list[dict[str, Any]]] = { # For a trend pageview metric "person1": [ { diff --git a/ee/clickhouse/views/test/test_clickhouse_retention.py b/ee/clickhouse/views/test/test_clickhouse_retention.py index 0e5a8ad0fafdf..5deff716a2658 100644 --- a/ee/clickhouse/views/test/test_clickhouse_retention.py +++ b/ee/clickhouse/views/test/test_clickhouse_retention.py @@ -1,5 +1,5 @@ from dataclasses import asdict, dataclass -from typing import List, Literal, Optional, TypedDict, Union +from typing import Literal, Optional, TypedDict, Union from django.test.client import Client @@ -719,10 +719,10 @@ class RetentionRequest: period: Union[Literal["Hour"], Literal["Day"], Literal["Week"], Literal["Month"]] retention_type: Literal["retention_first_time", "retention"] # probably not an exhaustive list - breakdowns: Optional[List[Breakdown]] = None + breakdowns: Optional[list[Breakdown]] = None breakdown_type: Optional[Literal["person", "event"]] = None - properties: Optional[List[PropertyFilter]] = None + properties: Optional[list[PropertyFilter]] = None filter_test_accounts: Optional[str] = None limit: Optional[int] = None @@ -734,26 +734,26 @@ class Value(TypedDict): class Cohort(TypedDict): - values: List[Value] + values: list[Value] date: str label: str class RetentionResponse(TypedDict): - result: List[Cohort] + result: list[Cohort] class Person(TypedDict): - distinct_ids: List[str] + distinct_ids: list[str] class RetentionTableAppearance(TypedDict): person: Person - appearances: List[int] + appearances: list[int] class RetentionTablePeopleResponse(TypedDict): - result: List[RetentionTableAppearance] + result: list[RetentionTableAppearance] def get_retention_ok(client: Client, team_id: int, request: RetentionRequest) -> RetentionResponse: diff --git a/ee/clickhouse/views/test/test_clickhouse_trends.py b/ee/clickhouse/views/test/test_clickhouse_trends.py index 8ce3809263a4b..4de1f00e53401 100644 --- a/ee/clickhouse/views/test/test_clickhouse_trends.py +++ b/ee/clickhouse/views/test/test_clickhouse_trends.py @@ -1,7 +1,7 @@ import json from dataclasses import dataclass, field from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from unittest.case import skip from unittest.mock import ANY @@ -420,20 +420,20 @@ class TrendsRequest: insight: Optional[str] = None display: Optional[str] = None compare: Optional[bool] = None - events: List[Dict[str, Any]] = field(default_factory=list) - properties: List[Dict[str, Any]] = field(default_factory=list) + events: list[dict[str, Any]] = field(default_factory=list) + properties: list[dict[str, Any]] = field(default_factory=list) smoothing_intervals: Optional[int] = 1 refresh: Optional[bool] = False @dataclass class TrendsRequestBreakdown(TrendsRequest): - breakdown: Optional[Union[List[int], str]] = None + breakdown: Optional[Union[list[int], str]] = None breakdown_type: Optional[str] = None def get_trends(client, request: Union[TrendsRequestBreakdown, TrendsRequest], team: Team): - data: Dict[str, Any] = { + data: dict[str, Any] = { "date_from": request.date_from, "date_to": request.date_to, "interval": request.interval, @@ -471,7 +471,7 @@ class NormalizedTrendResult: def get_trends_time_series_ok( client: Client, request: TrendsRequest, team: Team, with_order: bool = False -) -> Dict[str, Dict[str, NormalizedTrendResult]]: +) -> dict[str, dict[str, NormalizedTrendResult]]: data = get_trends_ok(client=client, request=request, team=team) res = {} for item in data["result"]: @@ -491,7 +491,7 @@ def get_trends_time_series_ok( return res -def get_trends_aggregate_ok(client: Client, request: TrendsRequest, team: Team) -> Dict[str, NormalizedTrendResult]: +def get_trends_aggregate_ok(client: Client, request: TrendsRequest, team: Team) -> dict[str, NormalizedTrendResult]: data = get_trends_ok(client=client, request=request, team=team) res = {} for item in data["result"]: diff --git a/ee/frontend/mobile-replay/schema/web/rr-web-schema.json b/ee/frontend/mobile-replay/schema/web/rr-web-schema.json index 1e0d7032e4534..4f6d3bb709302 100644 --- a/ee/frontend/mobile-replay/schema/web/rr-web-schema.json +++ b/ee/frontend/mobile-replay/schema/web/rr-web-schema.json @@ -617,6 +617,9 @@ "id": { "type": "number" }, + "loop": { + "type": "boolean" + }, "muted": { "type": "boolean" }, diff --git a/ee/migrations/0001_initial.py b/ee/migrations/0001_initial.py index fd3cad3892708..5b668bc772b6a 100644 --- a/ee/migrations/0001_initial.py +++ b/ee/migrations/0001_initial.py @@ -1,6 +1,5 @@ # Generated by Django 3.0.7 on 2020-08-07 09:15 -from typing import List from django.db import migrations, models @@ -8,7 +7,7 @@ class Migration(migrations.Migration): initial = True - dependencies: List = [] + dependencies: list = [] operations = [ migrations.CreateModel( diff --git a/ee/migrations/0012_migrate_tags_v2.py b/ee/migrations/0012_migrate_tags_v2.py index 9a2cf8e3d39c4..540cd281338d4 100644 --- a/ee/migrations/0012_migrate_tags_v2.py +++ b/ee/migrations/0012_migrate_tags_v2.py @@ -1,5 +1,5 @@ # Generated by Django 3.2.5 on 2022-03-02 22:44 -from typing import Any, List, Tuple +from typing import Any from django.core.paginator import Paginator from django.db import migrations @@ -19,7 +19,7 @@ def forwards(apps, schema_editor): EnterpriseEventDefinition = apps.get_model("ee", "EnterpriseEventDefinition") EnterprisePropertyDefinition = apps.get_model("ee", "EnterprisePropertyDefinition") - createables: List[Tuple[Any, Any]] = [] + createables: list[tuple[Any, Any]] = [] batch_size = 1_000 # Collect event definition tags and taggeditems diff --git a/ee/models/license.py b/ee/models/license.py index f0e12d3d2f440..35530b89687ac 100644 --- a/ee/models/license.py +++ b/ee/models/license.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from django.contrib.auth import get_user_model from django.db import models @@ -85,7 +85,7 @@ class License(models.Model): PLAN_TO_SORTING_VALUE = {SCALE_PLAN: 10, ENTERPRISE_PLAN: 20} @property - def available_features(self) -> List[AvailableFeature]: + def available_features(self) -> list[AvailableFeature]: return self.PLANS.get(self.plan, []) @property diff --git a/ee/session_recordings/ai/embeddings_queries.py b/ee/session_recordings/ai/embeddings_queries.py index 6d657d111096d..2034a9f190152 100644 --- a/ee/session_recordings/ai/embeddings_queries.py +++ b/ee/session_recordings/ai/embeddings_queries.py @@ -1,6 +1,5 @@ from django.conf import settings -from typing import List from posthog.models import Team from posthog.clickhouse.client import sync_execute @@ -9,7 +8,7 @@ MIN_DURATION_INCLUDE_SECONDS = settings.REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS -def fetch_errors_by_session_without_embeddings(team_id: int, offset=0) -> List[str]: +def fetch_errors_by_session_without_embeddings(team_id: int, offset=0) -> list[str]: query = """ WITH embedded_sessions AS ( SELECT @@ -47,7 +46,7 @@ def fetch_errors_by_session_without_embeddings(team_id: int, offset=0) -> List[s ) -def fetch_recordings_without_embeddings(team_id: int, offset=0) -> List[str]: +def fetch_recordings_without_embeddings(team_id: int, offset=0) -> list[str]: team = Team.objects.get(id=team_id) query = """ diff --git a/ee/session_recordings/ai/embeddings_runner.py b/ee/session_recordings/ai/embeddings_runner.py index 101c7175acb61..413e9f45368fe 100644 --- a/ee/session_recordings/ai/embeddings_runner.py +++ b/ee/session_recordings/ai/embeddings_runner.py @@ -3,7 +3,7 @@ import datetime import pytz -from typing import Dict, Any, List, Tuple +from typing import Any from abc import ABC, abstractmethod from prometheus_client import Histogram, Counter @@ -88,7 +88,7 @@ class EmbeddingPreparation(ABC): @staticmethod @abstractmethod - def prepare(item, team) -> Tuple[str, str]: + def prepare(item, team) -> tuple[str, str]: raise NotImplementedError() @@ -100,7 +100,7 @@ def __init__(self, team: Team): self.team = team self.openai_client = OpenAI() - def run(self, items: List[Any], embeddings_preparation: type[EmbeddingPreparation]) -> None: + def run(self, items: list[Any], embeddings_preparation: type[EmbeddingPreparation]) -> None: source_type = embeddings_preparation.source_type try: @@ -196,7 +196,7 @@ def _num_tokens_for_input(self, string: str) -> int: """Returns the number of tokens in a text string.""" return len(encoding.encode(string)) - def _flush_embeddings_to_clickhouse(self, embeddings: List[Dict[str, Any]], source_type: str) -> None: + def _flush_embeddings_to_clickhouse(self, embeddings: list[dict[str, Any]], source_type: str) -> None: try: sync_execute( "INSERT INTO session_replay_embeddings (session_id, team_id, embeddings, source_type, input) VALUES", @@ -213,7 +213,7 @@ class ErrorEmbeddingsPreparation(EmbeddingPreparation): source_type = "error" @staticmethod - def prepare(item: Tuple[str, str], _): + def prepare(item: tuple[str, str], _): session_id = item[0] error_message = item[1] return session_id, error_message @@ -286,7 +286,7 @@ def prepare(session_id: str, team: Team): return session_id, input @staticmethod - def _compact_result(event_name: str, current_url: int, elements_chain: Dict[str, str] | str) -> str: + def _compact_result(event_name: str, current_url: int, elements_chain: dict[str, str] | str) -> str: elements_string = ( elements_chain if isinstance(elements_chain, str) else ", ".join(str(e) for e in elements_chain) ) diff --git a/ee/session_recordings/ai/utils.py b/ee/session_recordings/ai/utils.py index a1d5f31460de0..1b7770a136128 100644 --- a/ee/session_recordings/ai/utils.py +++ b/ee/session_recordings/ai/utils.py @@ -1,7 +1,7 @@ import dataclasses from datetime import datetime -from typing import List, Dict, Any +from typing import Any from posthog.models.element import chain_to_elements from hashlib import shake_256 @@ -12,11 +12,11 @@ class SessionSummaryPromptData: # we may allow customisation of columns included in the future, # and we alter the columns present as we process the data # so want to stay as loose as possible here - columns: List[str] = dataclasses.field(default_factory=list) - results: List[List[Any]] = dataclasses.field(default_factory=list) + columns: list[str] = dataclasses.field(default_factory=list) + results: list[list[Any]] = dataclasses.field(default_factory=list) # in order to reduce the number of tokens in the prompt # we replace URLs with a placeholder and then pass this mapping of placeholder to URL into the prompt - url_mapping: Dict[str, str] = dataclasses.field(default_factory=dict) + url_mapping: dict[str, str] = dataclasses.field(default_factory=dict) def is_empty(self) -> bool: return not self.columns or not self.results @@ -63,7 +63,7 @@ def simplify_window_id(session_events: SessionSummaryPromptData) -> SessionSumma # find window_id column index window_id_index = session_events.column_index("$window_id") - window_id_mapping: Dict[str, int] = {} + window_id_mapping: dict[str, int] = {} simplified_results = [] for result in session_events.results: if window_id_index is None: @@ -128,7 +128,7 @@ def deduplicate_urls(session_events: SessionSummaryPromptData) -> SessionSummary # find url column index url_index = session_events.column_index("$current_url") - url_mapping: Dict[str, str] = {} + url_mapping: dict[str, str] = {} deduplicated_results = [] for result in session_events.results: if url_index is None: diff --git a/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py index 71196ec0ecadc..797ac453e69e0 100644 --- a/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ b/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py @@ -1,5 +1,4 @@ from itertools import product -from typing import Dict from unittest import mock from uuid import uuid4 @@ -131,7 +130,7 @@ def test_effect_of_poe_settings_on_query_generated( poe_v2: bool, allow_denormalized_props: bool, expected_poe_mode: PersonsOnEventsMode, - expected_query_params: Dict, + expected_query_params: dict, unmaterialized_person_column_used: bool, materialized_event_column_used: bool, ) -> None: diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index a54f8e38a6bdd..7d2b9fe0b0cb2 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, List, Optional +from typing import Any, Optional import structlog from django.db.models import Q, QuerySet @@ -49,7 +49,7 @@ def log_playlist_activity( team_id: int, user: User, was_impersonated: bool, - changes: Optional[List[Change]] = None, + changes: Optional[list[Change]] = None, ) -> None: """ Insight id and short_id are passed separately as some activities (like delete) alter the Insight instance @@ -101,7 +101,7 @@ class Meta: created_by = UserBasicSerializer(read_only=True) last_modified_by = UserBasicSerializer(read_only=True) - def create(self, validated_data: Dict, *args, **kwargs) -> SessionRecordingPlaylist: + def create(self, validated_data: dict, *args, **kwargs) -> SessionRecordingPlaylist: request = self.context["request"] team = self.context["get_team"]() @@ -128,7 +128,7 @@ def create(self, validated_data: Dict, *args, **kwargs) -> SessionRecordingPlayl return playlist - def update(self, instance: SessionRecordingPlaylist, validated_data: Dict, **kwargs) -> SessionRecordingPlaylist: + def update(self, instance: SessionRecordingPlaylist, validated_data: dict, **kwargs) -> SessionRecordingPlaylist: try: before_update = SessionRecordingPlaylist.objects.get(pk=instance.id) except SessionRecordingPlaylist.DoesNotExist: diff --git a/ee/session_recordings/test/test_session_recording_extensions.py b/ee/session_recordings/test/test_session_recording_extensions.py index 35fd5d2bc8b7a..ad545e5cec33f 100644 --- a/ee/session_recordings/test/test_session_recording_extensions.py +++ b/ee/session_recordings/test/test_session_recording_extensions.py @@ -103,7 +103,7 @@ def test_persists_recording_from_blob_ingested_storage(self): for file in ["a", "b", "c"]: blob_path = f"{TEST_BUCKET}/team_id/{self.team.pk}/session_id/{session_id}/data" file_name = f"{blob_path}/{file}" - write(file_name, f"my content-{file}".encode("utf-8")) + write(file_name, f"my content-{file}".encode()) recording: SessionRecording = SessionRecording.objects.create(team=self.team, session_id=session_id) @@ -164,7 +164,7 @@ def test_can_save_content_to_new_location(self, mock_write: MagicMock): mock_write.assert_called_with( f"{expected_path}/12345000-12346000", - gzip.compress("the new content".encode("utf-8")), + gzip.compress(b"the new content"), extras={ "ContentEncoding": "gzip", "ContentType": "application/json", diff --git a/ee/settings.py b/ee/settings.py index 7342bdf98f987..d9a863c3f816b 100644 --- a/ee/settings.py +++ b/ee/settings.py @@ -3,14 +3,13 @@ """ import os -from typing import Dict, List from posthog.settings import AUTHENTICATION_BACKENDS, DEMO, SITE_URL, DEBUG from posthog.settings.utils import get_from_env from posthog.utils import str_to_bool # Zapier REST hooks -HOOK_EVENTS: Dict[str, str] = { +HOOK_EVENTS: dict[str, str] = { # "event_name": "App.Model.Action" (created/updated/deleted) "action_performed": "posthog.Action.performed", } @@ -43,7 +42,7 @@ SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.getenv("SOCIAL_AUTH_GOOGLE_OAUTH2_KEY") SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.getenv("SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET") if "SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS" in os.environ: - SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: List[str] = os.environ[ + SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: list[str] = os.environ[ "SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS" ].split(",") elif DEMO: diff --git a/ee/tasks/auto_rollback_feature_flag.py b/ee/tasks/auto_rollback_feature_flag.py index d1b7e606976a6..f676f91d0c4bf 100644 --- a/ee/tasks/auto_rollback_feature_flag.py +++ b/ee/tasks/auto_rollback_feature_flag.py @@ -1,5 +1,4 @@ from datetime import datetime, timedelta -from typing import Dict from zoneinfo import ZoneInfo from celery import shared_task @@ -30,7 +29,7 @@ def check_feature_flag_rollback_conditions(feature_flag_id: int) -> None: flag.save() -def calculate_rolling_average(threshold_metric: Dict, team: Team, timezone: str) -> float: +def calculate_rolling_average(threshold_metric: dict, team: Team, timezone: str) -> float: curr = datetime.now(tz=ZoneInfo(timezone)) rolling_average_days = 7 @@ -54,7 +53,7 @@ def calculate_rolling_average(threshold_metric: Dict, team: Team, timezone: str) return sum(data) / rolling_average_days -def check_condition(rollback_condition: Dict, feature_flag: FeatureFlag) -> bool: +def check_condition(rollback_condition: dict, feature_flag: FeatureFlag) -> bool: if rollback_condition["threshold_type"] == "sentry": created_date = feature_flag.created_at base_start_date = created_date.strftime("%Y-%m-%dT%H:%M:%S") diff --git a/ee/tasks/replay.py b/ee/tasks/replay.py index 036925b279a91..fcf57196c2dc5 100644 --- a/ee/tasks/replay.py +++ b/ee/tasks/replay.py @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any import structlog from celery import shared_task @@ -25,7 +25,7 @@ # we currently are allowed 500 calls per minute, so let's rate limit each worker # to much less than that @shared_task(ignore_result=False, queue=CeleryQueue.SESSION_REPLAY_EMBEDDINGS.value, rate_limit="75/m") -def embed_batch_of_recordings_task(recordings: List[Any], team_id: int) -> None: +def embed_batch_of_recordings_task(recordings: list[Any], team_id: int) -> None: try: team = Team.objects.get(id=team_id) runner = SessionEmbeddingsRunner(team=team) diff --git a/ee/tasks/slack.py b/ee/tasks/slack.py index 0137089b08bab..251e9fd26138b 100644 --- a/ee/tasks/slack.py +++ b/ee/tasks/slack.py @@ -1,5 +1,5 @@ import re -from typing import Any, Dict +from typing import Any from urllib.parse import urlparse import structlog @@ -16,7 +16,7 @@ SHARED_LINK_REGEX = r"\/(?:shared_dashboard|shared|embedded)\/(.+)" -def _block_for_asset(asset: ExportedAsset) -> Dict: +def _block_for_asset(asset: ExportedAsset) -> dict: image_url = asset.get_public_content_url() alt_text = None if asset.insight: diff --git a/ee/tasks/subscriptions/email_subscriptions.py b/ee/tasks/subscriptions/email_subscriptions.py index aa62b7d83a4e0..39e342bcec1dd 100644 --- a/ee/tasks/subscriptions/email_subscriptions.py +++ b/ee/tasks/subscriptions/email_subscriptions.py @@ -1,5 +1,5 @@ import uuid -from typing import List, Optional +from typing import Optional import structlog @@ -15,7 +15,7 @@ def send_email_subscription_report( email: str, subscription: Subscription, - assets: List[ExportedAsset], + assets: list[ExportedAsset], invite_message: Optional[str] = None, total_asset_count: Optional[int] = None, ) -> None: diff --git a/ee/tasks/subscriptions/slack_subscriptions.py b/ee/tasks/subscriptions/slack_subscriptions.py index 1d35259a6f3c4..73643c7a97bbd 100644 --- a/ee/tasks/subscriptions/slack_subscriptions.py +++ b/ee/tasks/subscriptions/slack_subscriptions.py @@ -1,5 +1,3 @@ -from typing import Dict, List - import structlog from django.conf import settings @@ -12,7 +10,7 @@ UTM_TAGS_BASE = "utm_source=posthog&utm_campaign=subscription_report" -def _block_for_asset(asset: ExportedAsset) -> Dict: +def _block_for_asset(asset: ExportedAsset) -> dict: image_url = asset.get_public_content_url() alt_text = None if asset.insight: @@ -26,7 +24,7 @@ def _block_for_asset(asset: ExportedAsset) -> Dict: def send_slack_subscription_report( subscription: Subscription, - assets: List[ExportedAsset], + assets: list[ExportedAsset], total_asset_count: int, is_new_subscription: bool = False, ) -> None: diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index d89d73d4a3b40..6fa4b63960fc2 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -1,5 +1,5 @@ import datetime -from typing import List, Tuple, Union +from typing import Union from django.conf import settings import structlog from celery import chain @@ -28,7 +28,7 @@ def generate_assets( resource: Union[Subscription, SharingConfiguration], max_asset_count: int = DEFAULT_MAX_ASSET_COUNT, -) -> Tuple[List[Insight], List[ExportedAsset]]: +) -> tuple[list[Insight], list[ExportedAsset]]: with SUBSCRIPTION_ASSET_GENERATION_TIMER.time(): if resource.dashboard: tiles = get_tiles_ordered_by_position(resource.dashboard) diff --git a/ee/tasks/test/subscriptions/test_subscriptions.py b/ee/tasks/test/subscriptions/test_subscriptions.py index d6afe50b68f7f..c814b2a4ebc18 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions.py +++ b/ee/tasks/test/subscriptions/test_subscriptions.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import List from unittest.mock import MagicMock, call, patch from zoneinfo import ZoneInfo @@ -25,10 +24,10 @@ @patch("ee.tasks.subscriptions.generate_assets") @freeze_time("2022-02-02T08:55:00.000Z") class TestSubscriptionsTasks(APIBaseTest): - subscriptions: List[Subscription] = None # type: ignore + subscriptions: list[Subscription] = None # type: ignore dashboard: Dashboard insight: Insight - tiles: List[DashboardTile] = None # type: ignore + tiles: list[DashboardTile] = None # type: ignore asset: ExportedAsset def setUp(self) -> None: diff --git a/ee/tasks/test/subscriptions/test_subscriptions_utils.py b/ee/tasks/test/subscriptions/test_subscriptions_utils.py index c8ff89adcea65..edab23bbfb9ed 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions_utils.py +++ b/ee/tasks/test/subscriptions/test_subscriptions_utils.py @@ -1,4 +1,3 @@ -from typing import List from unittest.mock import MagicMock, patch import pytest @@ -21,7 +20,7 @@ class TestSubscriptionsTasksUtils(APIBaseTest): dashboard: Dashboard insight: Insight asset: ExportedAsset - tiles: List[DashboardTile] + tiles: list[DashboardTile] def setUp(self) -> None: self.dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) diff --git a/ee/tasks/test/test_slack.py b/ee/tasks/test/test_slack.py index 03b28b8155cfe..64b227d7d1e64 100644 --- a/ee/tasks/test/test_slack.py +++ b/ee/tasks/test/test_slack.py @@ -1,4 +1,3 @@ -from typing import List from unittest.mock import MagicMock, patch from freezegun import freeze_time @@ -14,7 +13,7 @@ from posthog.test.base import APIBaseTest -def create_mock_unfurl_event(team_id: str, links: List[str]): +def create_mock_unfurl_event(team_id: str, links: list[str]): return { "token": "XXYYZZ", "team_id": team_id, diff --git a/ee/urls.py b/ee/urls.py index a3851a2807583..2ee3f7d3a8fc0 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any from django.conf import settings from django.contrib import admin @@ -92,7 +92,7 @@ def extend_api_router( ) -urlpatterns: List[Any] = [ +urlpatterns: list[Any] = [ path("api/saml/metadata/", authentication.saml_metadata_view), path("api/sentry_stats/", sentry_stats.sentry_stats), *admin_urlpatterns, diff --git a/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--dark.png b/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--dark.png index a820c399ecbf2..f427261f0c801 100644 Binary files a/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--light.png b/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--light.png index 1276a2564f958..fa79b1b313e1d 100644 Binary files a/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--light.png and b/frontend/__snapshots__/exporter-exporter--funnel-historical-trends-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--lifecycle-insight--dark.png b/frontend/__snapshots__/exporter-exporter--lifecycle-insight--dark.png index 018ad3409a7dd..4bcaf72e57489 100644 Binary files a/frontend/__snapshots__/exporter-exporter--lifecycle-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--lifecycle-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--lifecycle-insight--light.png b/frontend/__snapshots__/exporter-exporter--lifecycle-insight--light.png index c04fa37aa98b0..09e1fc225e528 100644 Binary files a/frontend/__snapshots__/exporter-exporter--lifecycle-insight--light.png and b/frontend/__snapshots__/exporter-exporter--lifecycle-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--stickiness-insight--dark.png b/frontend/__snapshots__/exporter-exporter--stickiness-insight--dark.png index f4cbf9995b82f..6d713310d5c2a 100644 Binary files a/frontend/__snapshots__/exporter-exporter--stickiness-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--stickiness-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--stickiness-insight--light.png b/frontend/__snapshots__/exporter-exporter--stickiness-insight--light.png index dc435f2bfd38a..6e6eacea85cc2 100644 Binary files a/frontend/__snapshots__/exporter-exporter--stickiness-insight--light.png and b/frontend/__snapshots__/exporter-exporter--stickiness-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--dark.png index ab7c91b7a4d47..ea56d0f8417e4 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--light.png index 31b3bd7c73118..48b922e0e9508 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-area-breakdown-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-area-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-area-insight--dark.png index 95eb72a15a0d8..2fe7512609975 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-area-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-area-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-area-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-area-insight--light.png index 825c9393c2b22..fe83d8ef48dac 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-area-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-area-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--dark.png index 583ea3c112f90..43cd39dcf89c1 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--light.png index fbdde64ab4043..2ecf596d36987 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-bar-breakdown-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-bar-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-bar-insight--dark.png index 95eb72a15a0d8..2fe7512609975 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-bar-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-bar-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-bar-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-bar-insight--light.png index 825c9393c2b22..fe83d8ef48dac 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-bar-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-bar-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--dark.png index 583ea3c112f90..43cd39dcf89c1 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--light.png index fbdde64ab4043..2ecf596d36987 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-breakdown-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight--dark.png index 95eb72a15a0d8..2fe7512609975 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight--light.png index 825c9393c2b22..fe83d8ef48dac 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--dark.png index ffd8771048ebc..5f4585a4936c4 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--light.png index a1a007fc01ea6..7c7ebbeb0b391 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-multi-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--dark.png index 7a9b1e69e025b..509277fbeaeeb 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--light.png index cec2a1255413a..30b972e4c0943 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-value-breakdown-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-value-insight--dark.png b/frontend/__snapshots__/exporter-exporter--trends-value-insight--dark.png index 95eb72a15a0d8..2fe7512609975 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-value-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-value-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-value-insight--light.png b/frontend/__snapshots__/exporter-exporter--trends-value-insight--light.png index 825c9393c2b22..fe83d8ef48dac 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-value-insight--light.png and b/frontend/__snapshots__/exporter-exporter--trends-value-insight--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--dark.png new file mode 100644 index 0000000000000..56fb681c1295e Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--light.png new file mode 100644 index 0000000000000..732914a5c71ea Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--from-today--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--dark.png new file mode 100644 index 0000000000000..56fb681c1295e Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--light.png new file mode 100644 index 0000000000000..732914a5c71ea Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--only-allow-upcoming--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--dark.png new file mode 100644 index 0000000000000..92619d1b50c02 Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--light.png new file mode 100644 index 0000000000000..b2f945f37289c Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--show-time--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--dark.png index 86e2d8c764b82..280afb86c352c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--light.png index 4a6a3820eeb07..03ac0e2a2ffdc 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--lemon-calendar-select--light.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--android-recording--dark.png b/frontend/__snapshots__/replay-components-propertyicons--android-recording--dark.png index 1e89f87950222..330d3bd5de11d 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--android-recording--dark.png and b/frontend/__snapshots__/replay-components-propertyicons--android-recording--dark.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--android-recording--light.png b/frontend/__snapshots__/replay-components-propertyicons--android-recording--light.png index e714c6f371d7f..d964d3cf036f8 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--android-recording--light.png and b/frontend/__snapshots__/replay-components-propertyicons--android-recording--light.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--loading--dark.png b/frontend/__snapshots__/replay-components-propertyicons--loading--dark.png index 8a404e2a4678f..2e57ae5e23fcb 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--loading--dark.png and b/frontend/__snapshots__/replay-components-propertyicons--loading--dark.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--loading--light.png b/frontend/__snapshots__/replay-components-propertyicons--loading--light.png index 272b11050ae7b..47664e292068d 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--loading--light.png and b/frontend/__snapshots__/replay-components-propertyicons--loading--light.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--web-recording--dark.png b/frontend/__snapshots__/replay-components-propertyicons--web-recording--dark.png index 16805c539b8be..8c5c98645af78 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--web-recording--dark.png and b/frontend/__snapshots__/replay-components-propertyicons--web-recording--dark.png differ diff --git a/frontend/__snapshots__/replay-components-propertyicons--web-recording--light.png b/frontend/__snapshots__/replay-components-propertyicons--web-recording--light.png index f84bfb91ffc82..2b58e688291d9 100644 Binary files a/frontend/__snapshots__/replay-components-propertyicons--web-recording--light.png and b/frontend/__snapshots__/replay-components-propertyicons--web-recording--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png index 47bf7ab05f463..2fe69211c78a0 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index b8636e803a367..96602a314f7aa 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--dark.png index 7e63dcc4450bd..2f638baa35bac 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--light.png index ad59b116fccc7..46e10c41687f5 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--edit-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--dark.png index 97fb2a7698f4c..413bb10e14a0d 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--light.png index f005ed0bf07c5..b6fe07b5c0eab 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--edit-multi-variate-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png index 729faab2b5537..6fd194e19ee1c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png index afb4a88922ef0..54556b7f7d7e4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png index 3bef2b27e6bd8..885f2a5702c0a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png index 52ffe7a338bca..169ad1e1093be 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png index f575e7615f929..fcc49da4ac9f8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png index d60541174dfda..a899188e78de2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png index 9c94b509734fc..2628564c68138 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png index 11c5cd31a9701..fe530f50f66ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png index db33b25c6b98d..54b5fe58b05f5 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png index 95c52c2f024ca..6588de25fb805 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--dark.png new file mode 100644 index 0000000000000..f426a5b73a22f Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--light.png new file mode 100644 index 0000000000000..62ba2ef78de87 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-destination--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--dark.png new file mode 100644 index 0000000000000..577feadb27baa Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--light.png new file mode 100644 index 0000000000000..00395effc0840 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-transformation--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-v-2--light.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-v-2--light.png index f23ba81faf042..54eef32ed150f 100644 Binary files a/frontend/__snapshots__/scenes-other-billing-v2--billing-v-2--light.png and b/frontend/__snapshots__/scenes-other-billing-v2--billing-v-2--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index 7e50137a13381..02b1fdf3d65c3 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index fca36f690b5b4..ae4d45535e16d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--cloud--dark.png b/frontend/__snapshots__/scenes-other-signup--cloud--dark.png index 3812da12f2360..0551ac7e4e34f 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--cloud--dark.png and b/frontend/__snapshots__/scenes-other-signup--cloud--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--cloud--light.png b/frontend/__snapshots__/scenes-other-signup--cloud--light.png index a16b63058f6fd..3c8a578331453 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--cloud--light.png and b/frontend/__snapshots__/scenes-other-signup--cloud--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png b/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png index 4070b44e89d74..f5fb1a77065fc 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png b/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png index 603977e87add8..2f0d1306f40e2 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png index cdb034b88aba3..9ebb88b1f5c0c 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png index 5ecd2c576c64d..aa455d3b66c3c 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png index 0724cce928ed6..ca9cb5bc87861 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png index 72c8963740f72..1cefef9bc27f8 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png index 53dd61dfec0b7..dc8141139bfb4 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png index 1480c76289b9d..a02e8534103ef 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png differ diff --git a/frontend/src/exporter/Exporter.tsx b/frontend/src/exporter/Exporter.tsx index 339f0a4ee6a2d..4c50137babefa 100644 --- a/frontend/src/exporter/Exporter.tsx +++ b/frontend/src/exporter/Exporter.tsx @@ -34,7 +34,7 @@ export function Exporter(props: ExportedData): JSX.Element { window.parent?.postMessage({ event: 'posthog:dimensions', name: window.name, height, width }, '*') }, [height, width]) - useThemedHtml() + useThemedHtml(false) return (
{ ) : null} - {hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) ? ( + {hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) || + window.location.href.includes(urls.organizationBilling()) ? ( <>

Can't find what you need in the docs?

@@ -309,7 +310,8 @@ export const SidePanelSupport = (): JSX.Element => {
- {!hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) ? ( + {hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) || + window.location.href.includes(urls.organizationBilling()) ? null : (

Due to our large userbase, we're unable to offer email support to organizations @@ -355,7 +357,7 @@ export const SidePanelSupport = (): JSX.Element => {

- ) : null} + )} )}
diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts index 22d997bd673f5..180461c465996 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts @@ -50,9 +50,8 @@ export const activityForSceneLogic = kea([ state, activeLoadedScene?.paramsToProps?.(activeLoadedScene?.sceneParams) || props ) - } else { - return activityFiltersForScene(sceneConfig) } + return activityFiltersForScene(sceneConfig) }, ], (filters): ActivityFilters | null => filters, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/exports/SidePanelExports.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/exports/SidePanelExports.tsx index dab152562be01..9fd6c1bf9cb17 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/exports/SidePanelExports.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/exports/SidePanelExports.tsx @@ -1,5 +1,5 @@ import { IconDownload } from '@posthog/icons' -import { LemonButton, LemonTag, Spinner, Tooltip } from '@posthog/lemon-ui' +import { LemonButton, Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { downloadExportedAsset } from 'lib/components/ExportButton/exporter' import { dayjs } from 'lib/dayjs' @@ -78,9 +78,6 @@ export const SidePanelExports = (): JSX.Element => { title={
Exports - - Experimental -
} /> diff --git a/frontend/src/lib/api.mock.ts b/frontend/src/lib/api.mock.ts index b8536c139e434..0306f5a1530c3 100644 --- a/frontend/src/lib/api.mock.ts +++ b/frontend/src/lib/api.mock.ts @@ -76,6 +76,7 @@ export const MOCK_DEFAULT_TEAM: TeamType = { session_replay_config: null, capture_console_log_opt_in: true, capture_performance_opt_in: true, + heatmaps_opt_in: true, autocapture_exceptions_opt_in: false, autocapture_exceptions_errors_to_ignore: [], effective_membership_level: OrganizationMembershipLevel.Admin, diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 89e262e18f923..09e3924e21deb 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -388,6 +388,10 @@ class ApiRequest { .withQueryString(queryParams) } + public sessionPropertyDefinitions(teamId?: TeamType['id']): ApiRequest { + return this.projectsDetail(teamId).addPathComponent('sessions').addPathComponent('property_definitions') + } + public dataManagementActivity(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('data_management').addPathComponent('activity') } @@ -1212,6 +1216,23 @@ const api = { }, }, + sessions: { + async propertyDefinitions({ + teamId = ApiConfig.getCurrentTeamId(), + search, + properties, + }: { + teamId?: TeamType['id'] + search?: string + properties?: string[] + }): Promise> { + return new ApiRequest() + .sessionPropertyDefinitions(teamId) + .withQueryString(toParams({ search, ...(properties ? { properties: properties.join(',') } : {}) })) + .get() + }, + }, + cohorts: { async get(cohortId: CohortType['id']): Promise { return await new ApiRequest().cohortsDetail(cohortId).get() @@ -1801,22 +1822,18 @@ const api = { ): Promise { return await new ApiRequest().batchExport(id).update({ data }) }, - async create(data?: Partial): Promise { return await new ApiRequest().batchExports().create({ data }) }, async delete(id: BatchExportConfiguration['id']): Promise { return await new ApiRequest().batchExport(id).delete() }, - async pause(id: BatchExportConfiguration['id']): Promise { return await new ApiRequest().batchExport(id).withAction('pause').create() }, - async unpause(id: BatchExportConfiguration['id']): Promise { return await new ApiRequest().batchExport(id).withAction('unpause').create() }, - async listRuns( id: BatchExportConfiguration['id'], params: Record = {} @@ -1955,6 +1972,12 @@ const api = { ): Promise { return await new ApiRequest().externalDataSourceSchema(schemaId).update({ data }) }, + async reload(schemaId: ExternalDataSourceSchema['id']): Promise { + await new ApiRequest().externalDataSourceSchema(schemaId).withAction('reload').create() + }, + async resync(schemaId: ExternalDataSourceSchema['id']): Promise { + await new ApiRequest().externalDataSourceSchema(schemaId).withAction('resync').create() + }, }, dataWarehouseViewLinks: { diff --git a/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts b/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts index a65a0d6947997..2a2432b8d940c 100644 --- a/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts +++ b/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts @@ -31,11 +31,10 @@ export function useAnnotationsPositioning( tickIntervalPx: (lastTickLeftPx - firstTickLeftPx) / (tickCount - 1), firstTickLeftPx, } - } else { - return { - tickIntervalPx: 0, - firstTickLeftPx: 0, - } + } + return { + tickIntervalPx: 0, + firstTickLeftPx: 0, } }, [chart, chartWidth, chartHeight]) } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx index 084371830fa99..96619d094f892 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx @@ -215,13 +215,13 @@ export function FilterBasedCardContent({ {tooFewFunnelSteps ? ( ) : validationError ? ( - + ) : empty ? ( ) : !loading && timedOut ? ( ) : apiErrored && !loading ? ( - + ) : ( !apiErrored && )} @@ -348,6 +348,7 @@ function InsightCardInternal( ) : (
diff --git a/frontend/src/lib/components/CommandBar/utils.ts b/frontend/src/lib/components/CommandBar/utils.ts index cb7a621c6d423..00de3bd6b5f12 100644 --- a/frontend/src/lib/components/CommandBar/utils.ts +++ b/frontend/src/lib/components/CommandBar/utils.ts @@ -3,7 +3,6 @@ import { actionScopeToName } from './constants' export const getNameFromActionScope = (scope: string): string => { if (scope in actionScopeToName) { return actionScopeToName[scope] - } else { - return scope } + return scope } diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index b8c9597972f71..ebc06a7b489fc 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -618,7 +618,7 @@ export const commandPaletteLogic = kea([ }, { icon: IconDecisionTree, - display: 'Go to Apps', + display: 'Go to Data pipelines', synonyms: ['integrations'], executor: () => { push(urls.projectApps()) diff --git a/frontend/src/lib/components/DefinitionPopover/DefinitionPopover.tsx b/frontend/src/lib/components/DefinitionPopover/DefinitionPopover.tsx index 4d93fd38f2ae9..807f1097e45c2 100644 --- a/frontend/src/lib/components/DefinitionPopover/DefinitionPopover.tsx +++ b/frontend/src/lib/components/DefinitionPopover/DefinitionPopover.tsx @@ -124,7 +124,8 @@ function Example({ value }: { value?: string }): JSX.Element { type === TaxonomicFilterGroupType.EventFeatureFlags || type === TaxonomicFilterGroupType.PersonProperties || type === TaxonomicFilterGroupType.GroupsPrefix || - type === TaxonomicFilterGroupType.Metadata + type === TaxonomicFilterGroupType.Metadata || + type === TaxonomicFilterGroupType.SessionProperties ) { data = getCoreFilterDefinition(value, type) } else if (type === TaxonomicFilterGroupType.Elements) { diff --git a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx index d6419882b3327..f578e4d37b07f 100644 --- a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx +++ b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx @@ -78,6 +78,7 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element isCohort, isDataWarehouse, isProperty, + hasSentAs, } = useValues(definitionPopoverLogic) const { setLocalDefinition } = useActions(definitionPopoverLogic) @@ -142,13 +143,17 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element /> - - - {_definition.name}} - /> - + {hasSentAs ? ( + <> + + + {_definition.name}} + /> + + + ) : null} ) } @@ -176,17 +181,21 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element - - - - {_definition.name !== '' ? _definition.name : (empty string)} - - } - /> - + {hasSentAs ? ( + <> + + + + {_definition.name !== '' ? _definition.name : (empty string)} + + } + /> + + + ) : null} ) } diff --git a/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts b/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts index c3baac7ce76ca..38d9af8eb5311 100644 --- a/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts +++ b/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts @@ -176,11 +176,17 @@ export const definitionPopoverLogic = kea([ [ TaxonomicFilterGroupType.PersonProperties, TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.SessionProperties, TaxonomicFilterGroupType.EventFeatureFlags, TaxonomicFilterGroupType.NumericalEventProperties, TaxonomicFilterGroupType.Metadata, ].includes(type) || type.startsWith(TaxonomicFilterGroupType.GroupsPrefix), ], + hasSentAs: [ + (s) => [s.type, s.isProperty, s.isEvent], + (type, isProperty, isEvent) => + isEvent || (isProperty && type !== TaxonomicFilterGroupType.SessionProperties), + ], isCohort: [(s) => [s.type], (type) => type === TaxonomicFilterGroupType.Cohorts], isDataWarehouse: [(s) => [s.type], (type) => type === TaxonomicFilterGroupType.DataWarehouse], viewFullDetailUrl: [ diff --git a/frontend/src/lib/components/DefinitionPopover/utils.ts b/frontend/src/lib/components/DefinitionPopover/utils.ts index f828aea981fc2..a12c76237da42 100644 --- a/frontend/src/lib/components/DefinitionPopover/utils.ts +++ b/frontend/src/lib/components/DefinitionPopover/utils.ts @@ -48,6 +48,7 @@ export function getSingularType(type: TaxonomicFilterGroupType): string { case TaxonomicFilterGroupType.EventProperties: case TaxonomicFilterGroupType.PersonProperties: case TaxonomicFilterGroupType.GroupsPrefix: // Group properties + case TaxonomicFilterGroupType.SessionProperties: return 'property' case TaxonomicFilterGroupType.EventFeatureFlags: return 'feature' diff --git a/frontend/src/lib/components/JSBookmarklet.tsx b/frontend/src/lib/components/JSBookmarklet.tsx index c0cee95aed608..19827c037a73d 100644 --- a/frontend/src/lib/components/JSBookmarklet.tsx +++ b/frontend/src/lib/components/JSBookmarklet.tsx @@ -1,12 +1,15 @@ import { useActions } from 'kea' import { IconBookmarkBorder } from 'lib/lemon-ui/icons' +import { apiHostOrigin } from 'lib/utils/apiHost' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { useEffect, useRef } from 'react' import { TeamBasicType } from '~/types' export function JSBookmarklet({ team }: { team: TeamBasicType }): JSX.Element { - const initCall = `posthog.init('${team?.api_token}',{api_host:'${location.origin}', loaded: () => alert('PostHog is now tracking events!')})` + const initCall = `posthog.init('${ + team?.api_token + }',{api_host:'${apiHostOrigin()}', loaded: () => alert('PostHog is now tracking events!')})` const href = `javascript:(function()%7Bif%20(window.posthog)%20%7Balert(%22Error%3A%20PostHog%20already%20is%20installed%20on%20this%20site%22)%7D%20else%20%7B!function(t%2Ce)%7Bvar%20o%2Cn%2Cp%2Cr%3Be.__SV%7C%7C(window.posthog%3De%2Ce._i%3D%5B%5D%2Ce.init%3Dfunction(i%2Cs%2Ca)%7Bfunction%20g(t%2Ce)%7Bvar%20o%3De.split(%22.%22)%3B2%3D%3Do.length%26%26(t%3Dt%5Bo%5B0%5D%5D%2Ce%3Do%5B1%5D)%2Ct%5Be%5D%3Dfunction()%7Bt.push(%5Be%5D.concat(Array.prototype.slice.call(arguments%2C0)))%7D%7D(p%3Dt.createElement(%22script%22)).type%3D%22text%2Fjavascript%22%2Cp.async%3D!0%2Cp.src%3Ds.api_host%2B%22%2Fstatic%2Farray.js%22%2C(r%3Dt.getElementsByTagName(%22script%22)%5B0%5D).parentNode.insertBefore(p%2Cr)%3Bvar%20u%3De%3Bfor(void%200!%3D%3Da%3Fu%3De%5Ba%5D%3D%5B%5D%3Aa%3D%22posthog%22%2Cu.people%3Du.people%7C%7C%5B%5D%2Cu.toString%3Dfunction(t)%7Bvar%20e%3D%22posthog%22%3Breturn%22posthog%22!%3D%3Da%26%26(e%2B%3D%22.%22%2Ba)%2Ct%7C%7C(e%2B%3D%22%20(stub)%22)%2Ce%7D%2Cu.people.toString%3Dfunction()%7Breturn%20u.toString(1)%2B%22.people%20(stub)%22%7D%2Co%3D%22capture%20identify%20alias%20people.set%20people.set_once%20set_config%20register%20register_once%20unregister%20opt_out_capturing%20has_opted_out_capturing%20opt_in_capturing%20reset%20isFeatureEnabled%20onFeatureFlags%22.split(%22%20%22)%2Cn%3D0%3Bn%3Co.length%3Bn%2B%2B)g(u%2Co%5Bn%5D)%3Be._i.push(%5Bi%2Cs%2Ca%5D)%7D%2Ce.__SV%3D1)%7D(document%2Cwindow.posthog%7C%7C%5B%5D)%3B${encodeURIComponent( initCall )}%7D%7D)()` diff --git a/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts b/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts index 08fd06bdeedbd..9e96aa3599339 100644 --- a/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts +++ b/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts @@ -63,9 +63,8 @@ export const propertyFilterLogic = kea([ (filters) => { if (filters.length === 0 || isValidPropertyFilter(filters[filters.length - 1])) { return [...filters, {} as AnyPropertyFilter] - } else { - return filters } + return filters }, ], }), diff --git a/frontend/src/lib/components/PropertyFilters/utils.test.ts b/frontend/src/lib/components/PropertyFilters/utils.test.ts index 33ad74f8e35d6..da3f32d7e1b3c 100644 --- a/frontend/src/lib/components/PropertyFilters/utils.test.ts +++ b/frontend/src/lib/components/PropertyFilters/utils.test.ts @@ -83,7 +83,7 @@ describe('propertyFilterTypeToTaxonomicFilterType()', () => { ...baseFilter, type: PropertyFilterType.Session, } as SessionPropertyFilter) - ).toEqual(TaxonomicFilterGroupType.Sessions) + ).toEqual(TaxonomicFilterGroupType.SessionProperties) expect(propertyFilterTypeToTaxonomicFilterType({ ...baseFilter, type: PropertyFilterType.HogQL })).toEqual( TaxonomicFilterGroupType.HogQLExpression ) @@ -122,7 +122,7 @@ describe('breakdownFilterToTaxonomicFilterType()', () => { TaxonomicFilterGroupType.EventProperties ) expect(breakdownFilterToTaxonomicFilterType({ ...baseFilter, breakdown_type: 'session' })).toEqual( - TaxonomicFilterGroupType.Sessions + TaxonomicFilterGroupType.SessionProperties ) expect(breakdownFilterToTaxonomicFilterType({ ...baseFilter, breakdown_type: 'hogql' })).toEqual( TaxonomicFilterGroupType.HogQLExpression diff --git a/frontend/src/lib/components/PropertyFilters/utils.ts b/frontend/src/lib/components/PropertyFilters/utils.ts index ad135c0525b0e..63833cbd7acb5 100644 --- a/frontend/src/lib/components/PropertyFilters/utils.ts +++ b/frontend/src/lib/components/PropertyFilters/utils.ts @@ -97,7 +97,7 @@ export const PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE: Omit< [PropertyFilterType.Feature]: TaxonomicFilterGroupType.EventFeatureFlags, [PropertyFilterType.Cohort]: TaxonomicFilterGroupType.Cohorts, [PropertyFilterType.Element]: TaxonomicFilterGroupType.Elements, - [PropertyFilterType.Session]: TaxonomicFilterGroupType.Sessions, + [PropertyFilterType.Session]: TaxonomicFilterGroupType.SessionProperties, [PropertyFilterType.HogQL]: TaxonomicFilterGroupType.HogQLExpression, [PropertyFilterType.Group]: TaxonomicFilterGroupType.GroupsPrefix, [PropertyFilterType.DataWarehouse]: TaxonomicFilterGroupType.DataWarehouse, @@ -183,10 +183,14 @@ export function isEventPropertyFilter(filter?: AnyFilterLike | null): filter is export function isPersonPropertyFilter(filter?: AnyFilterLike | null): filter is PersonPropertyFilter { return filter?.type === PropertyFilterType.Person } -export function isEventPropertyOrPersonPropertyFilter( +export function isEventPersonOrSessionPropertyFilter( filter?: AnyFilterLike | null -): filter is EventPropertyFilter | PersonPropertyFilter { - return filter?.type === PropertyFilterType.Event || filter?.type === PropertyFilterType.Person +): filter is EventPropertyFilter | PersonPropertyFilter | SessionPropertyFilter { + return ( + filter?.type === PropertyFilterType.Event || + filter?.type === PropertyFilterType.Person || + filter?.type === PropertyFilterType.Session + ) } export function isElementPropertyFilter(filter?: AnyFilterLike | null): filter is ElementPropertyFilter { return filter?.type === PropertyFilterType.Element @@ -264,7 +268,7 @@ const propertyFilterMapping: Partial(null) @@ -129,6 +131,8 @@ export function SupportForm(): JSX.Element | null { disabledReason={ !user ? 'Please login to your account before opening a ticket unrelated to authentication issues.' + : !hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) + ? 'You can only create billing related issues while viewing the billing page.' : null } fullWidth diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index 994117de70404..9adb17a94a5e2 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -8,6 +8,7 @@ import { uuid } from 'lib/utils' import posthog from 'posthog-js' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { teamLogic } from 'scenes/teamLogic' +import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' @@ -79,7 +80,7 @@ export const TARGET_AREA_TO_NAME = [ { value: 'apps', 'data-attr': `support-form-target-area-apps`, - label: 'Apps', + label: 'Data pipelines', }, { value: 'login', @@ -337,6 +338,11 @@ export const supportLogic = kea([ actions.setSidePanelOptions(panelOptions) } }, + openEmailForm: async () => { + if (window.location.href.includes(urls.organizationBilling())) { + actions.setSendSupportRequestValue('target_area', 'billing') + } + }, openSupportForm: async ({ name, email, kind, target_area, severity_level, message }) => { let area = target_area ?? getURLPathToTargetArea(window.location.pathname) if (!userLogic.values.user) { diff --git a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx index ab03c6f9721c0..4c60ef206deb4 100644 --- a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx @@ -106,6 +106,7 @@ const renderItemContents = ({ listGroupType === TaxonomicFilterGroupType.Events || listGroupType === TaxonomicFilterGroupType.CustomEvents || listGroupType === TaxonomicFilterGroupType.Metadata || + listGroupType === TaxonomicFilterGroupType.SessionProperties || listGroupType.startsWith(TaxonomicFilterGroupType.GroupsPrefix) ? ( <>
@@ -160,6 +161,7 @@ const selectedItemHasPopover = ( TaxonomicFilterGroupType.Cohorts, TaxonomicFilterGroupType.CohortsWithAllUsers, TaxonomicFilterGroupType.Metadata, + TaxonomicFilterGroupType.SessionProperties, ].includes(listGroupType) || listGroupType.startsWith(TaxonomicFilterGroupType.GroupsPrefix)) ) diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts index 2c6f0ff84c2db..0dbbe75ddd38d 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts @@ -45,7 +45,7 @@ describe('taxonomicFilterLogic', () => { TaxonomicFilterGroupType.Events, TaxonomicFilterGroupType.Actions, TaxonomicFilterGroupType.Elements, - TaxonomicFilterGroupType.Sessions, + TaxonomicFilterGroupType.SessionProperties, ], } logic = taxonomicFilterLogic(logicProps) @@ -62,7 +62,7 @@ describe('taxonomicFilterLogic', () => { infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.Events }), infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.Actions }), infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.Elements }), - infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.Sessions }), + infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.SessionProperties }), ]) expect( infiniteListLogic({ ...logic.props, listGroupType: TaxonomicFilterGroupType.Cohorts }).isMounted() @@ -76,7 +76,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 1, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 4, - [TaxonomicFilterGroupType.Sessions]: 1, + [TaxonomicFilterGroupType.SessionProperties]: 1, }, }) .toDispatchActions(['infiniteListResultsReceived']) @@ -87,7 +87,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 157, [TaxonomicFilterGroupType.Actions]: 0, // not mocked [TaxonomicFilterGroupType.Elements]: 4, - [TaxonomicFilterGroupType.Sessions]: 1, + [TaxonomicFilterGroupType.SessionProperties]: 1, }, }) }) @@ -110,7 +110,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 4, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 0, - [TaxonomicFilterGroupType.Sessions]: 0, + [TaxonomicFilterGroupType.SessionProperties]: 0, }, }) @@ -127,7 +127,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 0, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 1, - [TaxonomicFilterGroupType.Sessions]: 0, + [TaxonomicFilterGroupType.SessionProperties]: 0, }, }) @@ -144,7 +144,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 0, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 0, - [TaxonomicFilterGroupType.Sessions]: 0, + [TaxonomicFilterGroupType.SessionProperties]: 0, }, }) @@ -161,13 +161,13 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 157, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 4, - [TaxonomicFilterGroupType.Sessions]: 1, + [TaxonomicFilterGroupType.SessionProperties]: 1, }, }) // move right, skipping Actions await expectLogic(logic, () => logic.actions.tabRight()).toMatchValues({ - activeTab: TaxonomicFilterGroupType.Sessions, + activeTab: TaxonomicFilterGroupType.SessionProperties, }) await expectLogic(logic, () => logic.actions.tabRight()).toMatchValues({ activeTab: TaxonomicFilterGroupType.Events, @@ -181,7 +181,7 @@ describe('taxonomicFilterLogic', () => { activeTab: TaxonomicFilterGroupType.Events, }) await expectLogic(logic, () => logic.actions.tabLeft()).toMatchValues({ - activeTab: TaxonomicFilterGroupType.Sessions, + activeTab: TaxonomicFilterGroupType.SessionProperties, }) await expectLogic(logic, () => logic.actions.tabLeft()).toMatchValues({ activeTab: TaxonomicFilterGroupType.Elements, @@ -201,7 +201,7 @@ describe('taxonomicFilterLogic', () => { [TaxonomicFilterGroupType.Events]: 4, [TaxonomicFilterGroupType.Actions]: 0, [TaxonomicFilterGroupType.Elements]: 0, - [TaxonomicFilterGroupType.Sessions]: 0, + [TaxonomicFilterGroupType.SessionProperties]: 0, }, }) }) diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx index 966b33c5527ed..f35dbda2f7cf5 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx @@ -11,7 +11,9 @@ import { TaxonomicFilterLogicProps, TaxonomicFilterValue, } from 'lib/components/TaxonomicFilter/types' +import { FEATURE_FLAGS } from 'lib/constants' import { IconCohort } from 'lib/lemon-ui/icons' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { CORE_FILTER_DEFINITIONS_BY_GROUP } from 'lib/taxonomy' import { capitalizeFirstLetter, pluralize, toParams } from 'lib/utils' import { getEventDefinitionIcon, getPropertyDefinitionIcon } from 'scenes/data-management/events/DefinitionHeader' @@ -168,6 +170,7 @@ export const taxonomicFilterLogic = kea([ s.metadataSource, s.excludedProperties, s.propertyAllowList, + featureFlagLogic.selectors.featureFlags, ], ( teamId, @@ -177,7 +180,8 @@ export const taxonomicFilterLogic = kea([ schemaColumns, metadataSource, excludedProperties, - propertyAllowList + propertyAllowList, + featureFlags ): TaxonomicFilterGroup[] => { const groups: TaxonomicFilterGroup[] = [ { @@ -486,18 +490,26 @@ export const taxonomicFilterLogic = kea([ getPopoverHeader: () => 'Notebooks', }, { - name: 'Sessions', + name: 'Session Properties', searchPlaceholder: 'sessions', - type: TaxonomicFilterGroupType.Sessions, - options: [ - { - name: 'Session duration', - value: '$session_duration', - }, - ], + type: TaxonomicFilterGroupType.SessionProperties, + options: featureFlags[FEATURE_FLAGS.SESSION_TABLE_PROPERTY_FILTERS] + ? undefined + : [ + { + id: '$session_duration', + name: '$session_duration', + property_type: 'Duration', + is_numerical: true, + }, + ], getName: (option: any) => option.name, - getValue: (option: any) => option.value, + getValue: (option) => option.name, getPopoverHeader: () => 'Session', + endpoint: featureFlags[FEATURE_FLAGS.SESSION_TABLE_PROPERTY_FILTERS] + ? `api/projects/${teamId}/sessions/property_definitions` + : undefined, + getIcon: getPropertyDefinitionIcon, }, { name: 'HogQL', diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index 8b46784a19b7e..37f1c95b45daa 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -105,7 +105,7 @@ export enum TaxonomicFilterGroupType { Plugins = 'plugins', Dashboards = 'dashboards', GroupNamesPrefix = 'name_groups', - Sessions = 'sessions', + SessionProperties = 'session_properties', HogQLExpression = 'hogql_expression', Notebooks = 'notebooks', } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index fff3f335f7449..ca029c4102896 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -154,7 +154,6 @@ export const FEATURE_FLAGS = { POSTHOG_3000_NAV: 'posthog-3000-nav', // owner: @Twixes HEDGEHOG_MODE: 'hedgehog-mode', // owner: @benjackwhite HEDGEHOG_MODE_DEBUG: 'hedgehog-mode-debug', // owner: @benjackwhite - SIGNUP_BENEFITS: 'signup-benefits', // experiment, owner: @zlwaterfield WEB_ANALYTICS: 'web-analytics', // owner @robbie-c #team-web-analytics WEB_ANALYTICS_SAMPLING: 'web-analytics-sampling', // owner @robbie-c #team-web-analytics HIGH_FREQUENCY_BATCH_EXPORTS: 'high-frequency-batch-exports', // owner: @tomasfarias @@ -177,6 +176,7 @@ export const FEATURE_FLAGS = { HOGQL_INSIGHTS_STICKINESS: 'hogql-insights-stickiness', // owner: @Gilbert09 HOGQL_INSIGHTS_FUNNELS: 'hogql-insights-funnels', // owner: @thmsobrmlr HOGQL_INSIGHT_LIVE_COMPARE: 'hogql-insight-live-compare', // owner: @mariusandra + HOGQL_IN_INSIGHT_SERIALIZATION: 'hogql-in-insight-serialization', // owner: @Twixes BI_VIZ: 'bi_viz', // owner: @Gilbert09 WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline PERSONS_HOGQL_QUERY: 'persons-hogql-query', // owner: @mariusandra @@ -209,7 +209,9 @@ export const FEATURE_FLAGS = { SESSION_REPLAY_MOBILE_ONBOARDING: 'session-replay-mobile-onboarding', // owner: #team-replay IP_ALLOWLIST_SETTING: 'ip-allowlist-setting', // owner: @benjackwhite EMAIL_VERIFICATION_TICKET_SUBMISSION: 'email-verification-ticket-submission', // owner: #team-growth + TOOLBAR_HEATMAPS: 'toolbar-heatmaps', // owner: #team-replay THEME: 'theme', // owner: @aprilfools + SESSION_TABLE_PROPERTY_FILTERS: 'session-table-property-filters', // owner: @robbie-c } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/hooks/useOutsideClickHandler.ts b/frontend/src/lib/hooks/useOutsideClickHandler.ts index c69acfd21fb20..c7342f659842d 100644 --- a/frontend/src/lib/hooks/useOutsideClickHandler.ts +++ b/frontend/src/lib/hooks/useOutsideClickHandler.ts @@ -21,10 +21,9 @@ export function useOutsideClickHandler( allRefs.some((maybeRef) => { if (typeof maybeRef === 'string') { return event.composedPath?.()?.find((e) => (e as HTMLElement)?.matches?.(maybeRef)) - } else { - const ref = maybeRef.current - return event.target && ref && `contains` in ref && ref.contains(event.target as Element) } + const ref = maybeRef.current + return event.target && ref && `contains` in ref && ref.contains(event.target as Element) }) ) { return diff --git a/frontend/src/lib/hooks/useThemedHtml.ts b/frontend/src/lib/hooks/useThemedHtml.ts index 18498a25d5ca5..580596f8addb1 100644 --- a/frontend/src/lib/hooks/useThemedHtml.ts +++ b/frontend/src/lib/hooks/useThemedHtml.ts @@ -5,14 +5,16 @@ import { sceneLogic } from 'scenes/sceneLogic' import { themeLogic } from '~/layout/navigation-3000/themeLogic' -export function useThemedHtml(): void { +export function useThemedHtml(overflowHidden = true): void { const { isDarkModeOn } = useValues(themeLogic) const { sceneConfig } = useValues(sceneLogic) useEffect(() => { document.body.setAttribute('theme', isDarkModeOn ? 'dark' : 'light') // overflow-hidden since each area handles scrolling individually (e.g. navbar, scene, side panel) - document.body.classList.add('overflow-hidden') + if (overflowHidden) { + document.body.classList.add('overflow-hidden') + } }, [isDarkModeOn]) useEffect(() => { diff --git a/frontend/src/lib/introductions/groupsAccessLogic.ts b/frontend/src/lib/introductions/groupsAccessLogic.ts index 37bcb2e97972c..e00ca51dc0bee 100644 --- a/frontend/src/lib/introductions/groupsAccessLogic.ts +++ b/frontend/src/lib/introductions/groupsAccessLogic.ts @@ -37,9 +37,8 @@ export const groupsAccessLogic = kea([ return GroupsAccessStatus.HasAccess } else if (hasGroups) { return GroupsAccessStatus.HasGroupTypes - } else { - return GroupsAccessStatus.NoAccess } + return GroupsAccessStatus.NoAccess }, ], needsUpgradeForGroups: [ diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss index ed6baaf7e8955..1c4c9dfe3573e 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss @@ -2,6 +2,10 @@ --lemon-calendar-row-gap: 2px; --lemon-calendar-day-width: 40px; --lemon-calendar-today-radius: 2px; + --lemon-calendar-time-column-width: 50px; + + // Tricky: needs to match the equivalent height button from LemonButton.scss + --lemon-calendar-time-button-height: 2.3125rem; .LemonCalendar__month > thead > tr:first-child > th, .LemonCalendar__month > tbody > tr > td { @@ -50,4 +54,25 @@ .LemonCalendar__range--boundary { background-color: var(--glass-border-3000); } + + &--with-time { + padding-right: calc(3 * var(--lemon-calendar-time-column-width)); + } + + .LemonCalendar__time { + & > div { + width: var(--lemon-calendar-time-column-width); + + &.ScrollableShadows { + & .ScrollableShadows__inner { + scrollbar-width: none; + scroll-behavior: smooth; + } + } + } + + &--scroll-spacer { + height: calc(100% - var(--lemon-calendar-time-button-height)); + } + } } diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.stories.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.stories.tsx index ede3a38ee7bdc..0755fcf205e20 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.stories.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.stories.tsx @@ -77,3 +77,13 @@ export const SundayFirst: Story = BasicTemplate.bind({}) SundayFirst.args = { weekStartDay: 0, } + +export const ShowTime: Story = BasicTemplate.bind({}) +ShowTime.args = { + showTime: true, +} + +export const OnlyAllowUpcoming: Story = BasicTemplate.bind({}) +OnlyAllowUpcoming.args = { + onlyAllowUpcoming: true, +} diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.test.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.test.tsx index ea52b8c681552..fae0ec6d3504a 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.test.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.test.tsx @@ -1,6 +1,7 @@ import { render, within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import { dayjs } from 'lib/dayjs' +import { range } from 'lib/utils' import { getAllByDataAttr, getByDataAttr } from '~/test/byDataAttr' @@ -183,4 +184,40 @@ describe('LemonCalendar', () => { expect(fourteen).toBeDefined() expect(fourteen.className.split(' ')).toContain('yolo') }) + + test('calls getLemonButtonTimeProps for each time', async () => { + const calls: any = [] + render( + { + calls.push([unit, value]) + return {} + }} + showTime + /> + ) + const minutes = range(0, 60).map((num) => ['m', num]) + expect(calls.length).toBe(74) + expect(calls).toEqual([ + ...[ + ['h', 12], + ['h', 1], + ['h', 2], + ['h', 3], + ['h', 4], + ['h', 5], + ['h', 6], + ['h', 7], + ['h', 8], + ['h', 9], + ['h', 10], + ['h', 11], + ], + ...minutes, + ...[ + ['a', 'am'], + ['a', 'pm'], + ], + ]) + }) }) diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx index e4e15e050bf05..abe4d0ed21625 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx @@ -2,11 +2,12 @@ import './LemonCalendar.scss' import clsx from 'clsx' import { useValues } from 'kea' +import { ScrollableShadows } from 'lib/components/ScrollableShadows/ScrollableShadows' import { dayjs } from 'lib/dayjs' import { IconChevronLeft, IconChevronRight } from 'lib/lemon-ui/icons' import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' import { range } from 'lib/utils' -import { useEffect, useState } from 'react' +import { forwardRef, Ref, useEffect, useState } from 'react' import { teamLogic } from 'scenes/teamLogic' export interface LemonCalendarProps { @@ -18,10 +19,16 @@ export interface LemonCalendarProps { onLeftmostMonthChanged?: (date: dayjs.Dayjs) => void /** Use custom LemonButton properties for each date */ getLemonButtonProps?: (opts: GetLemonButtonPropsOpts) => LemonButtonProps + /** Use custom LemonButton properties for each date */ + getLemonButtonTimeProps?: (opts: GetLemonButtonTimePropsOpts) => LemonButtonProps /** Number of months */ months?: number /** 0 or unset for Sunday, 1 for Monday. */ weekStartDay?: number + /** Show a time picker */ + showTime?: boolean + /** Only allow upcoming dates */ + onlyAllowUpcoming?: boolean } export interface GetLemonButtonPropsOpts { @@ -30,10 +37,17 @@ export interface GetLemonButtonPropsOpts { dayIndex: number weekIndex: number } +export interface GetLemonButtonTimePropsOpts { + unit: 'h' | 'm' | 'a' + value: number | string +} const dayLabels = ['su', 'mo', 'tu', 'we', 'th', 'fr', 'sa'] -export function LemonCalendar(props: LemonCalendarProps): JSX.Element { +export const LemonCalendar = forwardRef(function LemonCalendar( + { showTime = false, ...props }: LemonCalendarProps, + ref: Ref +): JSX.Element { const { weekStartDay: teamWeekStartDay } = useValues(teamLogic) const months = Math.max(props.months ?? 1, 1) @@ -47,7 +61,11 @@ export function LemonCalendar(props: LemonCalendarProps): JSX.Element { }, [props.leftmostMonth]) return ( -
+
{range(0, months).map((month) => { const startOfMonth = leftmostMonth.add(month, 'month').startOf('month') const endOfMonth = startOfMonth.endOf('month') @@ -112,12 +130,18 @@ export function LemonCalendar(props: LemonCalendarProps): JSX.Element { {range(0, 7).map((day) => { const date = firstDay.add(week * 7 + day, 'day') + const pastDate = date.isBefore(today) const defaultProps: LemonButtonProps = { className: clsx('flex-col', { 'opacity-25': date.isBefore(startOfMonth) || date.isAfter(endOfMonth), LemonCalendar__today: date.isSame(today, 'd'), }), + disabledReason: + props.onlyAllowUpcoming && pastDate + ? 'Cannot select dates in the past' + : undefined, } + const buttonProps = props.getLemonButtonProps?.({ dayIndex: day, @@ -145,6 +169,47 @@ export function LemonCalendar(props: LemonCalendarProps): JSX.Element { ) })} + {showTime && ( +
+ + {[12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11].map((hour) => { + const buttonProps = props.getLemonButtonTimeProps?.({ + unit: 'h', + value: hour, + }) + + return ( + + {String(hour).padStart(2, '0')} + + ) + })} +
+ + + {range(0, 60).map((minute) => { + const buttonProps = props.getLemonButtonTimeProps?.({ + unit: 'm', + value: minute, + }) + return ( + + {String(minute).padStart(2, '0')} + + ) + })} +
+ +
+ + AM + + + PM + +
+
+ )}
) -} +}) diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.stories.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.stories.tsx index 1c6bff250dd2b..89c9f04589296 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.stories.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.stories.tsx @@ -34,6 +34,7 @@ const BasicTemplate: StoryFn = (props: LemonCalendar setValue(value) setVisible(false) }} + showTime onClose={() => setVisible(false)} /> } diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.test.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.test.tsx index dd44efa6634cc..439580f41ae8c 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.test.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.test.tsx @@ -1,11 +1,13 @@ import { render, within } from '@testing-library/react' import userEvent from '@testing-library/user-event' import { dayjs } from 'lib/dayjs' -import { LemonCalendarSelect } from 'lib/lemon-ui/LemonCalendar/LemonCalendarSelect' +import { getTimeElement, LemonCalendarSelect } from 'lib/lemon-ui/LemonCalendar/LemonCalendarSelect' import { useState } from 'react' import { getByDataAttr } from '~/test/byDataAttr' +import { GetLemonButtonTimePropsOpts } from './LemonCalendar' + describe('LemonCalendarSelect', () => { test('select various dates', async () => { const onClose = jest.fn() @@ -50,4 +52,127 @@ describe('LemonCalendarSelect', () => { userEvent.click(getByDataAttr(container, 'lemon-calendar-select-cancel')) expect(onClose).toHaveBeenCalled() }) + + test('select various times', async () => { + const onClose = jest.fn() + const onChange = jest.fn() + window.HTMLElement.prototype.scrollIntoView = jest.fn() + + jest.useFakeTimers().setSystemTime(new Date('2023-01-10 17:22:08')) + + function TestSelect(): JSX.Element { + const [value, setValue] = useState(null) + return ( + { + setValue(value) + onChange(value) + }} + showTime + /> + ) + } + const { container } = render() + + async function clickOnDate(day: string): Promise { + const element = container.querySelector('.LemonCalendar__month') as HTMLElement + if (element) { + userEvent.click(await within(element).findByText(day)) + userEvent.click(getByDataAttr(container, 'lemon-calendar-select-apply')) + } + } + + async function clickOnTime(props: GetLemonButtonTimePropsOpts): Promise { + const element = getTimeElement(container.querySelector('.LemonCalendar__time'), props) + if (element) { + userEvent.click(element) + userEvent.click(getByDataAttr(container, 'lemon-calendar-select-apply')) + } + } + + // click on hour 8 + await clickOnDate('15') + // sets the date to 15, hour and minutes to current time, and seconds to 0 + expect(onChange).toHaveBeenCalledWith(dayjs('2023-01-15T17:22:00.000Z')) + + // click on minute 42 + await clickOnTime({ unit: 'm', value: 42 }) + // sets the minutes but leaves all other values unchanged + expect(onChange).toHaveBeenCalledWith(dayjs('2023-01-15T17:42:00.000Z')) + + // click on 'am' + await clickOnTime({ unit: 'a', value: 'am' }) + // subtracts 12 hours from the time + expect(onChange).toHaveBeenCalledWith(dayjs('2023-01-15T05:42:00.000Z')) + + // click on hour 8 + await clickOnTime({ unit: 'h', value: 8 }) + // only changes the hour + expect(onChange).toHaveBeenCalledWith(dayjs('2023-01-15T08:42:00.000Z')) + }) + + test('onlyAllowUpcoming', async () => { + const onClose = jest.fn() + const onChange = jest.fn() + window.HTMLElement.prototype.scrollIntoView = jest.fn() + + jest.useFakeTimers().setSystemTime(new Date('2023-01-10 17:22:08')) + + function TestSelect(): JSX.Element { + const [value, setValue] = useState(null) + return ( + { + setValue(value) + onChange(value) + }} + showTime + onlyAllowUpcoming + /> + ) + } + const { container } = render() + + async function clickOnDate(day: string): Promise { + const element = container.querySelector('.LemonCalendar__month') as HTMLElement + if (element) { + userEvent.click(await within(element).findByText(day)) + userEvent.click(getByDataAttr(container, 'lemon-calendar-select-apply')) + } + } + + async function clickOnTime(props: GetLemonButtonTimePropsOpts): Promise { + const element = getTimeElement(container.querySelector('.LemonCalendar__time'), props) + if (element) { + userEvent.click(element) + userEvent.click(getByDataAttr(container, 'lemon-calendar-select-apply')) + } + } + + // click on minute + await clickOnTime({ unit: 'm', value: 42 }) + // time is disabled until a date is clicked + expect(onChange).not.toHaveBeenCalled() + + // click on current date + await clickOnDate('9') + // cannot select a date in the past + expect(onChange).not.toHaveBeenCalled() + + // click on current date + await clickOnDate('10') + // chooses the current date and sets the time to the current hour and minute + expect(onChange).toHaveBeenCalledWith(dayjs('2023-01-10T17:22:00.000Z')) + + // click on an earlier hour + await clickOnTime({ unit: 'a', value: 'am' }) + // does not update the date because it is in the past + expect(onChange).lastCalledWith(dayjs('2023-01-10T17:22:00.000Z')) + }) }) diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.tsx index ab25876bac452..44662382d9edc 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendarSelect.tsx @@ -1,20 +1,104 @@ import { IconX } from '@posthog/icons' import { dayjs } from 'lib/dayjs' -import { LemonButton, LemonButtonWithSideActionProps, SideAction } from 'lib/lemon-ui/LemonButton' -import { LemonCalendar } from 'lib/lemon-ui/LemonCalendar/LemonCalendar' -import { useState } from 'react' +import { LemonButton, LemonButtonProps, LemonButtonWithSideActionProps, SideAction } from 'lib/lemon-ui/LemonButton' +import { GetLemonButtonTimePropsOpts, LemonCalendar } from 'lib/lemon-ui/LemonCalendar/LemonCalendar' +import { useEffect, useMemo, useRef, useState } from 'react' import { Popover } from '../Popover' +function timeDataAttr({ unit, value }: GetLemonButtonTimePropsOpts): string { + return `${value}-${unit}` +} + +export function getTimeElement( + parent: HTMLElement | null, + props: GetLemonButtonTimePropsOpts +): HTMLDivElement | undefined | null { + return parent?.querySelector(`[data-attr="${timeDataAttr(props)}"]`) +} +function scrollToTimeElement( + calendarEl: HTMLDivElement | null, + props: GetLemonButtonTimePropsOpts, + skipAnimation: boolean +): void { + getTimeElement(calendarEl, props)?.scrollIntoView({ + block: 'start', + inline: 'nearest', + behavior: skipAnimation ? ('instant' as ScrollBehavior) : 'smooth', + }) +} + +function proposedDate(target: dayjs.Dayjs | null, { value, unit }: GetLemonButtonTimePropsOpts): dayjs.Dayjs { + let date = target || dayjs().startOf('day') + if (value != date.format(unit)) { + if (unit === 'h') { + date = date.hour(date.format('a') === 'am' || value === 12 ? Number(value) : Number(value) + 12) + } else if (unit === 'm') { + date = date.minute(Number(value)) + } else if (unit === 'a') { + date = value === 'am' ? date.subtract(12, 'hour') : date.add(12, 'hour') + } + } + return date +} + export interface LemonCalendarSelectProps { value?: dayjs.Dayjs | null onChange: (date: dayjs.Dayjs) => void months?: number onClose?: () => void + showTime?: boolean + onlyAllowUpcoming?: boolean } -export function LemonCalendarSelect({ value, onChange, months, onClose }: LemonCalendarSelectProps): JSX.Element { - const [selectValue, setSelectValue] = useState(value ? value.startOf('day') : null) +export function LemonCalendarSelect({ + value, + onChange, + months, + onClose, + showTime, + onlyAllowUpcoming, +}: LemonCalendarSelectProps): JSX.Element { + const calendarRef = useRef(null) + const [selectValue, setSelectValue] = useState( + value ? (showTime ? value : value.startOf('day')) : null + ) + + const now = dayjs() + const isAM = useMemo(() => selectValue?.format('a') === 'am', [selectValue]) + + const scrollToTime = (date: dayjs.Dayjs, skipAnimation: boolean): void => { + const calendarEl = calendarRef.current + if (calendarEl && date) { + const hour = isAM ? date.hour() : date.hour() - 12 + scrollToTimeElement(calendarEl, { unit: 'h', value: hour }, skipAnimation) + scrollToTimeElement(calendarEl, { unit: 'm', value: date.minute() }, skipAnimation) + } + } + + const onDateClick = (date: dayjs.Dayjs | null): void => { + if (date) { + date = showTime ? date.hour(selectValue === null ? now.hour() : selectValue.hour()) : date.startOf('hour') + date = showTime + ? date.minute(selectValue === null ? now.minute() : selectValue.minute()) + : date.startOf('minute') + scrollToTime(date, true) + } + + setSelectValue(date) + } + + useEffect(() => { + if (selectValue) { + scrollToTime(selectValue, true) + } + }, []) + + const onTimeClick = (props: GetLemonButtonTimePropsOpts): void => { + const date = proposedDate(selectValue, props) + scrollToTime(date, false) + setSelectValue(date) + } return (
@@ -24,19 +108,56 @@ export function LemonCalendarSelect({ value, onChange, months, onClose }: LemonC } size="small" onClick={onClose} aria-label="close" noPadding /> )}
-
- { - if (date.isSame(selectValue, 'd')) { - return { ...props, status: 'default', type: 'primary' } - } - return props - }} - /> -
+ { + const modifiedProps: LemonButtonProps = { ...props } + const isDisabled = + onlyAllowUpcoming && + selectValue && + date.isSame(now.tz('utc'), 'date') && + (selectValue.hour() < now.hour() || + (selectValue.hour() === now.hour() && selectValue.minute() <= now.minute())) + + if (isDisabled) { + modifiedProps.disabledReason = 'Pick a time in the future first' + } + + if (date.isSame(selectValue, 'd')) { + return { ...modifiedProps, status: 'default', type: 'primary' } + } + return modifiedProps + }} + getLemonButtonTimeProps={(props) => { + const selected = selectValue ? selectValue.format(props.unit) : null + const newDate = proposedDate(selectValue, props) + + const disabledReason = onlyAllowUpcoming + ? selectValue + ? newDate.isBefore(now) + ? 'Cannot choose a time in the past' + : undefined + : 'Choose a date first' + : undefined + + return { + active: selected === String(props.value), + className: 'rounded-none', + 'data-attr': timeDataAttr(props), + disabledReason: disabledReason, + onClick: () => { + if (selected != props.value) { + onTimeClick(props) + } + }, + } + }} + showTime={showTime} + onlyAllowUpcoming={onlyAllowUpcoming} + />
Cancel @@ -100,7 +221,7 @@ export function LemonCalendarSelectInput( } {...props.buttonProps} > - {props.value?.format('MMMM D, YYYY') ?? placeholder ?? 'Select date'} + {props.value?.format(`MMMM D, YYYY${props.showTime && ' h:mm A'}`) ?? placeholder ?? 'Select date'} ) diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx index 561efcf9de9f9..f9ae2a2b3a73b 100644 --- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx +++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx @@ -51,7 +51,7 @@ interface LemonInputPropsBase } export interface LemonInputPropsText extends LemonInputPropsBase { - type?: 'text' | 'email' | 'search' | 'url' | 'password' + type?: 'text' | 'email' | 'search' | 'url' | 'password' | 'time' value?: string defaultValue?: string onChange?: (newValue: string) => void diff --git a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx index 8e06a932310ab..3e5a03c309fbd 100644 --- a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx @@ -62,6 +62,7 @@ export interface LemonSelectPropsBase | 'onClick' | 'tabIndex' | 'type' + | 'tooltip' > { options: LemonSelectOptions /** Callback fired when a value is selected, even if it already is set. */ diff --git a/frontend/src/lib/lemon-ui/hooks.ts b/frontend/src/lib/lemon-ui/hooks.ts index da53cbe7538eb..d9d5aa8675f77 100644 --- a/frontend/src/lib/lemon-ui/hooks.ts +++ b/frontend/src/lib/lemon-ui/hooks.ts @@ -29,9 +29,8 @@ export function useSliderPositioning setTransitioning(false), transitionMs) return () => clearTimeout(transitioningTimeout) - } else { - hasRenderedInitiallyRef.current = true } + hasRenderedInitiallyRef.current = true } }, [currentValue, containerWidth]) diff --git a/frontend/src/lib/logic/featureFlagLogic.ts b/frontend/src/lib/logic/featureFlagLogic.ts index 4fa049bacc0db..db646d26d7656 100644 --- a/frontend/src/lib/logic/featureFlagLogic.ts +++ b/frontend/src/lib/logic/featureFlagLogic.ts @@ -54,22 +54,21 @@ function spyOnFeatureFlags(featureFlags: FeatureFlagsSet): FeatureFlagsSet { }, } ) - } else { - // Fallback for IE11. Won't track "false" results. ¯\_(ツ)_/¯ - const flags: FeatureFlagsSet = {} - for (const flag of Object.keys(availableFlags)) { - Object.defineProperty(flags, flag, { - get: function () { - if (flag === 'toJSON') { - return () => availableFlags - } - notifyFlagIfNeeded(flag, true) - return true - }, - }) - } - return flags } + // Fallback for IE11. Won't track "false" results. ¯\_(ツ)_/¯ + const flags: FeatureFlagsSet = {} + for (const flag of Object.keys(availableFlags)) { + Object.defineProperty(flags, flag, { + get: function () { + if (flag === 'toJSON') { + return () => availableFlags + } + notifyFlagIfNeeded(flag, true) + return true + }, + }) + } + return flags } export const featureFlagLogic = kea([ diff --git a/frontend/src/lib/sortable.ts b/frontend/src/lib/sortable.ts index 3b62cb4ea7db9..edad9ff60ecd6 100644 --- a/frontend/src/lib/sortable.ts +++ b/frontend/src/lib/sortable.ts @@ -8,9 +8,8 @@ import { CollisionDetection, DroppableContainer, UniqueIdentifier } from '@dnd-k export const verticalSortableListCollisionDetection: CollisionDetection = (args) => { if (args.collisionRect.top < (args.active.rect.current?.initial?.top ?? 0)) { return highestDroppableContainerMajorityCovered(args) - } else { - return lowestDroppableContainerMajorityCovered(args) } + return lowestDroppableContainerMajorityCovered(args) } // Look for the first (/ furthest up / highest) droppable container that is at least diff --git a/frontend/src/lib/taxonomy.test.tsx b/frontend/src/lib/taxonomy.test.tsx index 3805f9d7670ad..e8820fbad0c32 100644 --- a/frontend/src/lib/taxonomy.test.tsx +++ b/frontend/src/lib/taxonomy.test.tsx @@ -26,10 +26,10 @@ describe('taxonomy', () => { }) describe('session properties', () => { - const sessionPropertyNames = Object.keys(CORE_FILTER_DEFINITIONS_BY_GROUP.sessions) + const sessionPropertyNames = Object.keys(CORE_FILTER_DEFINITIONS_BY_GROUP.session_properties) it('should have an $initial_referring_domain property', () => { const property: CoreFilterDefinition = - CORE_FILTER_DEFINITIONS_BY_GROUP.sessions['$initial_referring_domain'] + CORE_FILTER_DEFINITIONS_BY_GROUP.session_properties['$initial_referring_domain'] expect(property.label).toEqual('Initial Referring Domain') }) it(`should have every property in SESSION_PROPERTIES_ADAPTED_FROM_PERSON`, () => { diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index 9b5297cf62955..d52ee74c583c0 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -993,7 +993,7 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, numerical_event_properties: {}, // Same as event properties, see assignment below person_properties: {}, // Currently person properties are the same as event properties, see assignment below - sessions: { + session_properties: { $session_duration: { label: 'Session duration', description: ( @@ -1006,13 +1006,13 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { ), examples: ['01:04:12'], }, - $min_timestamp: { - label: 'First timestamp', + $start_timestamp: { + label: 'Start timestamp', description: The timestamp of the first event from this session., examples: [new Date().toISOString()], }, - $max_timestamp: { - label: 'Last timestamp', + $end_timestamp: { + label: 'End timestamp', description: The timestamp of the last event from this session, examples: [new Date().toISOString()], }, @@ -1022,7 +1022,7 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { examples: ['https://example.com/interesting-article?parameter=true'], }, $exit_url: { - label: 'Entry URL', + label: 'Exit URL', description: The last URL visited in this session, examples: ['https://example.com/interesting-article?parameter=true'], }, @@ -1036,7 +1036,7 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: The number of autocapture events in this session, examples: ['123'], }, - $initial_channel_type: { + $channel_type: { label: 'Channel type', description: What type of acquisition channel this traffic came from., examples: ['Paid Search', 'Organic Video', 'Direct'], @@ -1079,20 +1079,21 @@ for (const [key, value] of Object.entries(CORE_FILTER_DEFINITIONS_BY_GROUP.event CORE_FILTER_DEFINITIONS_BY_GROUP.person_properties[key] = value } if (SESSION_INITIAL_PROPERTIES_ADAPTED_FROM_EVENTS.has(key)) { - CORE_FILTER_DEFINITIONS_BY_GROUP.sessions[`$initial_${key.replace(/^\$/, '')}`] = { + CORE_FILTER_DEFINITIONS_BY_GROUP.session_properties[`$initial_${key.replace(/^\$/, '')}`] = { ...value, label: `Initial ${value.label}`, description: 'description' in value ? `${value.description} Data from the first event in this session.` : 'Data from the first event in this session.', + examples: 'examples' in value ? value.examples : undefined, } } } // We treat `$session_duration` as an event property in the context of series `math`, but it's fake in a sense CORE_FILTER_DEFINITIONS_BY_GROUP.event_properties.$session_duration = - CORE_FILTER_DEFINITIONS_BY_GROUP.sessions.$session_duration + CORE_FILTER_DEFINITIONS_BY_GROUP.session_properties.$session_duration export const PROPERTY_KEYS = Object.keys(CORE_FILTER_DEFINITIONS_BY_GROUP.event_properties) diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index ab32f34b314f3..20b8114375939 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -1597,6 +1597,8 @@ export function promiseResolveReject(): { return { resolve: resolve!, reject: reject!, promise } } +export type AsyncReturnType any> = T extends (...args: any) => Promise ? R : any + export function calculateDays(timeValue: number, timeUnit: TimeUnitType): number { if (timeUnit === TimeUnitType.Year) { return timeValue * 365 diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 78bd3fe7110e4..0d2db1c6ccb7d 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -43,6 +43,7 @@ import { Resource, SessionPlayerData, SessionRecordingPlayerTab, + SessionRecordingType, SessionRecordingUsageType, Survey, } from '~/types' @@ -100,7 +101,7 @@ interface RecordingViewedProps { page_change_events_length: number recording_width?: number loadedFromBlobStorage: boolean - + snapshot_source: 'web' | 'mobile' | 'unknown' load_time: number // DEPRECATE: How much time it took to load the session (backend) (milliseconds) } @@ -355,8 +356,9 @@ export const eventUsageLogic = kea([ playerData: SessionPlayerData, durations: RecordingReportLoadTimes, type: SessionRecordingUsageType, + metadata: SessionRecordingType | null, delay?: number - ) => ({ playerData, durations, type, delay }), + ) => ({ playerData, durations, type, delay, metadata }), reportHelpButtonViewed: true, reportHelpButtonUsed: (help_type: HelpType) => ({ help_type }), reportRecordingsListFetched: (loadTime: number) => ({ @@ -406,6 +408,7 @@ export const eventUsageLogic = kea([ existingCohort, newCohort, }), + reportExperimentInsightLoadFailed: true, // Definition Popover reportDataManagementDefinitionHovered: (type: TaxonomicFilterGroupType) => ({ type }), reportDataManagementDefinitionClickView: (type: TaxonomicFilterGroupType) => ({ type }), @@ -454,6 +457,7 @@ export const eventUsageLogic = kea([ reportIngestionContinueWithoutVerifying: true, reportAutocaptureToggled: (autocapture_opt_out: boolean) => ({ autocapture_opt_out }), reportAutocaptureExceptionsToggled: (autocapture_opt_in: boolean) => ({ autocapture_opt_in }), + reportHeatmapsToggled: (heatmaps_opt_in: boolean) => ({ heatmaps_opt_in }), reportFailedToCreateFeatureFlagWithCohort: (code: string, detail: string) => ({ code, detail }), reportFeatureFlagCopySuccess: true, reportFeatureFlagCopyFailure: (error) => ({ error }), @@ -488,7 +492,7 @@ export const eventUsageLogic = kea([ reportSurveyViewed: (survey: Survey) => ({ survey, }), - reportSurveyCreated: (survey: Survey) => ({ survey }), + reportSurveyCreated: (survey: Survey, isDuplicate?: boolean) => ({ survey, isDuplicate }), reportSurveyEdited: (survey: Survey) => ({ survey }), reportSurveyLaunched: (survey: Survey) => ({ survey }), reportSurveyStopped: (survey: Survey) => ({ survey }), @@ -847,7 +851,7 @@ export const eventUsageLogic = kea([ reportSavedInsightNewInsightClicked: ({ insightType }) => { posthog.capture('saved insights new insight clicked', { insight_type: insightType }) }, - reportRecording: ({ playerData, durations, type }) => { + reportRecording: ({ playerData, durations, type, metadata }) => { // @ts-expect-error const eventIndex = new EventIndex(playerData?.snapshots || []) const payload: Partial = { @@ -862,6 +866,9 @@ export const eventUsageLogic = kea([ page_change_events_length: eventIndex.pageChangeEvents().length, recording_width: eventIndex.getRecordingScreenMetadata(0)[0]?.width, load_time: durations.firstPaint ?? 0, // TODO: DEPRECATED field. Keep around so dashboards don't break + // older recordings did not store this and so "null" is equivalent to web + // but for reporting we want to distinguish between not loaded and no value to load + snapshot_source: metadata?.snapshot_source || 'unknown', } posthog.capture(`recording ${type}`, payload) }, @@ -1015,6 +1022,9 @@ export const eventUsageLogic = kea([ id: newCohort.id, }) }, + reportExperimentInsightLoadFailed: () => { + posthog.capture('experiment load insight failed') + }, reportPropertyGroupFilterAdded: () => { posthog.capture('property group filter added') }, @@ -1094,6 +1104,11 @@ export const eventUsageLogic = kea([ autocapture_opt_in, }) }, + reportHeatmapsToggled: ({ heatmaps_opt_in }) => { + posthog.capture('heatmaps toggled', { + heatmaps_opt_in, + }) + }, reportFailedToCreateFeatureFlagWithCohort: ({ detail, code }) => { posthog.capture('failed to create feature flag with cohort', { detail, code }) }, @@ -1156,13 +1171,14 @@ export const eventUsageLogic = kea([ language, }) }, - reportSurveyCreated: ({ survey }) => { + reportSurveyCreated: ({ survey, isDuplicate }) => { posthog.capture('survey created', { name: survey.name, id: survey.id, survey_type: survey.type, questions_length: survey.questions.length, question_types: survey.questions.map((question) => question.type), + is_duplicate: isDuplicate ?? false, }) }, reportSurveyLaunched: ({ survey }) => { diff --git a/frontend/src/lib/utils/getAppContext.ts b/frontend/src/lib/utils/getAppContext.ts index 22bf16a073e21..f937abcb690a0 100644 --- a/frontend/src/lib/utils/getAppContext.ts +++ b/frontend/src/lib/utils/getAppContext.ts @@ -16,6 +16,11 @@ export function getDefaultEventName(): string { return getAppContext()?.default_event_name || PathType.PageView } +export function getDefaultEventLabel(): string { + const name = getDefaultEventName() + return name === PathType.PageView ? 'Pageview' : name === PathType.Screen ? 'Screen' : name +} + // NOTE: Any changes to the teamId trigger a full page load so we don't use the logic // This helps avoid circular imports export function getCurrentTeamId(): TeamType['id'] { diff --git a/frontend/src/loadPostHogJS.tsx b/frontend/src/loadPostHogJS.tsx index ccc3729114384..ed96f7dfccc32 100644 --- a/frontend/src/loadPostHogJS.tsx +++ b/frontend/src/loadPostHogJS.tsx @@ -43,6 +43,7 @@ export function loadPostHogJS(): void { capture_copied_text: true, }, process_person: 'identified_only', + // Helper to capture events for assertions in Cypress _onCapture: (window as any)._cypress_posthog_captures ? (_, event) => (window as any)._cypress_posthog_captures.push(event) diff --git a/frontend/src/mocks/fixtures/_billing_v2.tsx b/frontend/src/mocks/fixtures/_billing_v2.tsx index 51dbc7c7aae77..f90062884a11a 100644 --- a/frontend/src/mocks/fixtures/_billing_v2.tsx +++ b/frontend/src/mocks/fixtures/_billing_v2.tsx @@ -2423,7 +2423,7 @@ export const billingJson: BillingV2Type = { image_url: 'https://posthog.com/images/product/product-icons/integrations.svg', screenshot_url: null, icon_key: 'IconBolt', - docs_url: 'https://posthog.com/docs/apps', + docs_url: 'https://posthog.com/docs/cdp', subscribed: null, plans: [ { @@ -2432,7 +2432,7 @@ export const billingJson: BillingV2Type = { name: 'Free', description: 'Connect PostHog to your favorite tools.', image_url: 'https://posthog.com/images/product/product-icons/integrations.svg', - docs_url: 'https://posthog.com/docs/apps', + docs_url: 'https://posthog.com/docs/cdp', note: null, unit: null, free_allocation: null, @@ -2472,9 +2472,9 @@ export const billingJson: BillingV2Type = { }, { key: 'apps', - name: 'Apps', + name: 'Transformations', description: - 'Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)', + 'Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)', unit: null, limit: null, note: null, @@ -2492,7 +2492,7 @@ export const billingJson: BillingV2Type = { name: 'Paid', description: 'Connect PostHog to your favorite tools.', image_url: 'https://posthog.com/images/product/product-icons/integrations.svg', - docs_url: 'https://posthog.com/docs/apps', + docs_url: 'https://posthog.com/docs/cdp', note: null, unit: null, free_allocation: null, @@ -2532,17 +2532,18 @@ export const billingJson: BillingV2Type = { }, { key: 'apps', - name: 'Apps', + name: 'Transformations', description: - 'Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)', + 'Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)', unit: null, limit: null, note: null, }, { key: 'app_metrics', - name: 'App metrics', - description: 'Get metrics on your apps to see their usage, reliability, and more.', + name: 'Transformation & destination metrics', + description: + 'Get metrics on your transformation and destination metrics to see their usage, reliability, and more.', unit: null, limit: null, note: null, @@ -2575,9 +2576,9 @@ export const billingJson: BillingV2Type = { features: [ { key: 'apps', - name: 'Apps', + name: 'Transformations', description: - 'Use apps to transform, filter, and modify your incoming data. (Export apps not included, see the Data pipelines addon for product analytics.)', + 'Use transformations to filter or modify your incoming data. (Destinations not included, see the Data pipelines addon for product analytics.)', images: null, icon_key: null, type: null, @@ -2617,8 +2618,9 @@ export const billingJson: BillingV2Type = { }, { key: 'app_metrics', - name: 'App metrics', - description: 'Get metrics on your apps to see their usage, reliability, and more.', + name: 'Transformation & destination metrics', + description: + 'Get metrics on your transformation and destination metrics to see their usage, reliability, and more.', images: null, icon_key: null, type: null, diff --git a/frontend/src/mocks/utils.ts b/frontend/src/mocks/utils.ts index a1d23f28969fb..e6bcda39be06a 100644 --- a/frontend/src/mocks/utils.ts +++ b/frontend/src/mocks/utils.ts @@ -32,12 +32,10 @@ export const mocksToHandlers = (mocks: Mocks): ReturnType<(typeof rest)['get']>[ return res(...responseArray) } else if (!response) { return res() - } else { - return response } - } else { - return res(ctx.json(handler ?? null)) + return response } + return res(ctx.json(handler ?? null)) }) ) }) diff --git a/frontend/src/models/groupsModel.ts b/frontend/src/models/groupsModel.ts index eb8babc316dc2..83a76fa89c1ab 100644 --- a/frontend/src/models/groupsModel.ts +++ b/frontend/src/models/groupsModel.ts @@ -83,11 +83,10 @@ export const groupsModel = kea([ singular: groupType.name_plural || groupType.group_type, plural: groupType.name_plural || `${groupType.group_type}(s)`, } - } else { - return { - singular: 'unknown group', - plural: 'unknown groups', - } + } + return { + singular: 'unknown group', + plural: 'unknown groups', } } return deferToUserWording diff --git a/frontend/src/models/propertyDefinitionsModel.ts b/frontend/src/models/propertyDefinitionsModel.ts index 338e60a5e956f..497a218b3eaaf 100644 --- a/frontend/src/models/propertyDefinitionsModel.ts +++ b/frontend/src/models/propertyDefinitionsModel.ts @@ -1,5 +1,5 @@ -import { actions, kea, listeners, path, reducers, selectors } from 'kea' -import api, { ApiMethodOptions } from 'lib/api' +import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' +import api, { ApiMethodOptions, CountedPaginatedResponse } from 'lib/api' import { TaxonomicFilterValue } from 'lib/components/TaxonomicFilter/types' import { dayjs } from 'lib/dayjs' import { captureTimeToSeeData } from 'lib/internalMetrics' @@ -65,9 +65,8 @@ const getPropertyKey = ( ): string => { if (type === PropertyDefinitionType.Group) { return `${type}/${groupTypeIndex}/${propertyName}` - } else { - return `${type}/${propertyName}` } + return `${type}/${propertyName}` } /** Schedules an immediate background task, that fetches property definitions after a 10ms debounce. Returns the property sync if already found. */ @@ -95,8 +94,42 @@ const checkOrLoadPropertyDefinition = ( return null } +const getEndpoint = ( + teamId: number, + type: PropertyDefinitionType, + propertyKey: string, + eventNames: string[] | undefined, + newInput: string | undefined +): string => { + let eventParams = '' + for (const eventName of eventNames || []) { + eventParams += `&event_name=${eventName}` + } + + if (type === PropertyDefinitionType.Session) { + return ( + `api/projects/${teamId}/${type}s/values/?key=` + + encodeURIComponent(propertyKey) + + (newInput ? '&value=' + encodeURIComponent(newInput) : '') + + eventParams + ) + } + + return ( + 'api/' + + type + + '/values/?key=' + + encodeURIComponent(propertyKey) + + (newInput ? '&value=' + encodeURIComponent(newInput) : '') + + eventParams + ) +} + export const propertyDefinitionsModel = kea([ path(['models', 'propertyDefinitionsModel']), + connect({ + values: [teamLogic, ['currentTeamId']], + }), actions({ // public loadPropertyDefinitions: ( @@ -125,10 +158,12 @@ export const propertyDefinitionsModel = kea([ propertyDefinitionStorage: [ { ...localProperties } as PropertyDefinitionStorage, { - updatePropertyDefinitions: (state, { propertyDefinitions }) => ({ - ...state, - ...propertyDefinitions, - }), + updatePropertyDefinitions: (state, { propertyDefinitions }) => { + return { + ...state, + ...propertyDefinitions, + } + }, }, ], options: [ @@ -179,7 +214,7 @@ export const propertyDefinitionsModel = kea([ // take the first 50 pending properties to avoid the 4k query param length limit const allPending = values.pendingProperties.slice(0, 50) const pendingByType: Record< - 'event' | 'person' | 'group/0' | 'group/1' | 'group/2' | 'group/3' | 'group/4', + 'event' | 'person' | 'group/0' | 'group/1' | 'group/2' | 'group/3' | 'group/4' | 'session', string[] > = { event: [], @@ -189,6 +224,7 @@ export const propertyDefinitionsModel = kea([ 'group/2': [], 'group/3': [], 'group/4': [], + session: [], } for (const key of allPending) { let [type, ...rest] = key.split('/') @@ -226,10 +262,17 @@ export const propertyDefinitionsModel = kea([ } // and then fetch them - const propertyDefinitions = await api.propertyDefinitions.list({ - properties: pending, - ...queryParams, - }) + let propertyDefinitions: CountedPaginatedResponse + if (type === 'session') { + propertyDefinitions = await api.sessions.propertyDefinitions({ + properties: pending, + }) + } else { + propertyDefinitions = await api.propertyDefinitions.list({ + properties: pending, + ...queryParams, + }) + } for (const propertyDefinition of propertyDefinitions.results) { newProperties[`${type}/${propertyDefinition.name}`] = propertyDefinition @@ -268,10 +311,10 @@ export const propertyDefinitionsModel = kea([ }, loadPropertyValues: async ({ endpoint, type, newInput, propertyKey, eventNames }, breakpoint) => { - if (['cohort', 'session'].includes(type)) { + if (['cohort'].includes(type)) { return } - if (!propertyKey) { + if (!propertyKey || values.currentTeamId === null) { return } @@ -286,19 +329,8 @@ export const propertyDefinitionsModel = kea([ signal: cache.abortController.signal, } - let eventParams = '' - for (const eventName of eventNames || []) { - eventParams += `&event_name=${eventName}` - } - const propValues: PropValue[] = await api.get( - endpoint || - 'api/' + - type + - '/values/?key=' + - encodeURIComponent(propertyKey) + - (newInput ? '&value=' + encodeURIComponent(newInput) : '') + - eventParams, + endpoint || getEndpoint(values.currentTeamId, type, propertyKey, eventNames, newInput), methodOptions ) breakpoint() diff --git a/frontend/src/queries/QueryEditor/QueryEditor.tsx b/frontend/src/queries/QueryEditor/QueryEditor.tsx index dc7b44eb22e12..88026a6c7262c 100644 --- a/frontend/src/queries/QueryEditor/QueryEditor.tsx +++ b/frontend/src/queries/QueryEditor/QueryEditor.tsx @@ -16,6 +16,7 @@ export interface QueryEditorProps { query: string setQuery?: (query: string) => void className?: string + aboveButton?: JSX.Element context?: QueryContext } @@ -78,6 +79,7 @@ export function QueryEditor(props: QueryEditorProps): JSX.Element { Error parsing JSON: {error}
) : null} + {props.aboveButton} {time.toFixed(3)}s
))} - {timings.length > 0 ? ( + {elapsedTime !== undefined && timings.length > 0 ? (
+ HTTP overhead
{(elapsedTime / 1000 - timings[timings.length - 1].t).toFixed(3)}s
diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 744b1aa784aa1..56d11af1665b5 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -76,9 +76,8 @@ const concurrencyController = new ConcurrencyController(Infinity) const queryEqual = (a: DataNode, b: DataNode): boolean => { if (isInsightQueryNode(a) && isInsightQueryNode(b)) { return compareInsightQuery(a, b, true) - } else { - return objectsEqual(a, b) } + return objectsEqual(a, b) } /** Tests wether a query is valid to prevent unnecessary requests. */ @@ -86,9 +85,8 @@ const queryValid = (q: DataNode): boolean => { if (isFunnelsQuery(q)) { // funnels require at least two steps return q.series.length >= 2 - } else { - return true } + return true } export const dataNodeLogic = kea([ @@ -406,9 +404,8 @@ export const dataNodeLogic = kea([ if (firstTimestamp) { const nextQuery: EventsQuery = { ...query, after: firstTimestamp } return nextQuery - } else { - return query } + return query } } } diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index f178e451f8994..f0174514a1a47 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -510,6 +510,7 @@ export function DataTable({ responseError ? ( sourceFeatures.has(QueryFeature.displayResponseError) ? ( ) : ( - + ) ) : ( ([ lastResult = result } return newResults - } else { - return results.map((result) => ({ result })) } + return results.map((result) => ({ result })) } } diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts index 3505204aed08a..6b3a38c01e93e 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts @@ -46,10 +46,9 @@ export const cleanGlobalProperties = ( values: [properties], } return cleanPropertyGroupFilter(properties) - } else { - // property group filter - return cleanPropertyGroupFilter(properties) } + // property group filter + return cleanPropertyGroupFilter(properties) } /** Cleans properties of entities i.e. event and action nodes. These are a simple list of property filters. */ @@ -75,9 +74,8 @@ export const cleanEntityProperties = ( ) { // property group filter value return properties.values.map(cleanProperty) - } else { - throw new Error('Unexpected format of entity properties.') } + throw new Error('Unexpected format of entity properties.') } const cleanPropertyGroupFilter = (properties: Record): PropertyGroupFilter => { @@ -98,10 +96,9 @@ const cleanPropertyGroupFilterValue = ( // property group filter value property['values'] = cleanPropertyGroupFilterValues(property['values'] as PropertyGroupFilterValue[]) return property - } else { - // property filter - return cleanProperty(property) } + // property filter + return cleanProperty(property) } const cleanProperty = (property: Record): AnyPropertyFilter => { diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts index 46565f648bd34..ee5d2af61f5ce 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts @@ -143,13 +143,12 @@ export const legacyEntityToNode = ( id: entity.id, ...shared, }) as any - } else { - return objectCleanWithEmpty({ - kind: NodeKind.EventsNode, - event: entity.id, - ...shared, - }) as any } + return objectCleanWithEmpty({ + kind: NodeKind.EventsNode, + event: entity.id, + ...shared, + }) as any } export const exlusionEntityToNode = ( @@ -219,17 +218,15 @@ const processBool = (value: string | boolean | null | undefined): boolean | unde return value } else if (typeof value == 'string') { return strToBool(value) - } else { - return false } + return false } const strToBool = (value: any): boolean | undefined => { if (value == null) { return undefined - } else { - return ['y', 'yes', 't', 'true', 'on', '1'].includes(String(value).toLowerCase()) } + return ['y', 'yes', 't', 'true', 'on', '1'].includes(String(value).toLowerCase()) } export const filtersToQueryNode = (filters: Partial): InsightQueryNode => { diff --git a/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx b/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx index 8485b9a71ee84..351e82136133a 100644 --- a/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx @@ -27,7 +27,7 @@ export function GlobalAndOrFilters({ insightProps }: EditorFilterProps): JSX.Ele ...groupsTaxonomicTypes, TaxonomicFilterGroupType.Cohorts, TaxonomicFilterGroupType.Elements, - ...(isTrends ? [TaxonomicFilterGroupType.Sessions] : []), + ...(isTrends ? [TaxonomicFilterGroupType.SessionProperties] : []), TaxonomicFilterGroupType.HogQLExpression, ...(featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] && featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] ? [TaxonomicFilterGroupType.DataWarehousePersonProperties] diff --git a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx index dd3cfbb43fb90..52bab0544ea96 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx @@ -75,6 +75,7 @@ export function InsightVizDisplay({ erroredQueryId, timedOutQueryId, vizSpecificOptions, + query, } = useValues(insightVizDataLogic(insightProps)) const { exportContext } = useValues(insightDataLogic(insightProps)) @@ -92,7 +93,7 @@ export function InsightVizDisplay({ } if (validationError) { - return + return } // Insight specific empty states - note order is important here @@ -107,7 +108,7 @@ export function InsightVizDisplay({ // Insight agnostic empty states if (erroredQueryId) { - return + return } if (timedOutQueryId) { return ( diff --git a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx index 38b96c2162aca..dd75cd96a6f97 100644 --- a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx +++ b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx @@ -34,7 +34,7 @@ export function TrendsSeries(): JSX.Element | null { ...groupsTaxonomicTypes, TaxonomicFilterGroupType.Cohorts, TaxonomicFilterGroupType.Elements, - ...(isTrends ? [TaxonomicFilterGroupType.Sessions] : []), + ...(isTrends ? [TaxonomicFilterGroupType.SessionProperties] : []), TaxonomicFilterGroupType.HogQLExpression, TaxonomicFilterGroupType.DataWarehouseProperties, ...(featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] && featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] diff --git a/frontend/src/queries/nodes/InsightViz/utils.ts b/frontend/src/queries/nodes/InsightViz/utils.ts index 6bb24f64cdad2..60d7aa143c0d2 100644 --- a/frontend/src/queries/nodes/InsightViz/utils.ts +++ b/frontend/src/queries/nodes/InsightViz/utils.ts @@ -43,9 +43,8 @@ export const getDisplay = (query: InsightQueryNode): ChartDisplayType | undefine return query.stickinessFilter?.display } else if (isTrendsQuery(query)) { return query.trendsFilter?.display - } else { - return undefined } + return undefined } export const getCompare = (query: InsightQueryNode): boolean | undefined => { @@ -53,41 +52,36 @@ export const getCompare = (query: InsightQueryNode): boolean | undefined => { return query.stickinessFilter?.compare } else if (isTrendsQuery(query)) { return query.trendsFilter?.compare - } else { - return undefined } + return undefined } export const getFormula = (query: InsightQueryNode): string | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.formula - } else { - return undefined } + return undefined } export const getSeries = (query: InsightQueryNode): (EventsNode | ActionsNode | DataWarehouseNode)[] | undefined => { if (isInsightQueryWithSeries(query)) { return query.series - } else { - return undefined } + return undefined } export const getInterval = (query: InsightQueryNode): IntervalType | undefined => { if (isInsightQueryWithSeries(query)) { return query.interval - } else { - return undefined } + return undefined } export const getBreakdown = (query: InsightQueryNode): BreakdownFilter | undefined => { if (isInsightQueryWithBreakdown(query)) { return query.breakdownFilter - } else { - return undefined } + return undefined } export const getShowLegend = (query: InsightQueryNode): boolean | undefined => { @@ -95,9 +89,8 @@ export const getShowLegend = (query: InsightQueryNode): boolean | undefined => { return query.stickinessFilter?.showLegend } else if (isTrendsQuery(query)) { return query.trendsFilter?.showLegend - } else { - return undefined } + return undefined } export const getShowValueOnSeries = (query: InsightQueryNode): boolean | undefined => { @@ -107,25 +100,22 @@ export const getShowValueOnSeries = (query: InsightQueryNode): boolean | undefin return query.stickinessFilter?.showValuesOnSeries } else if (isTrendsQuery(query)) { return query.trendsFilter?.showValuesOnSeries - } else { - return undefined } + return undefined } export const getShowLabelsOnSeries = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.showLabelsOnSeries - } else { - return undefined } + return undefined } export const getShowPercentStackView = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.showPercentStackView - } else { - return undefined } + return undefined } export const getCachedResults = ( diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index b5e74046cffca..e6f9cf712271d 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -89,9 +89,8 @@ export function queryExportContext( session_end: query.source.sessionEnd ?? now().toISOString(), }, } - } else { - return { source: query } } + return { source: query } } const SYNC_ONLY_QUERY_KINDS = [ @@ -416,9 +415,8 @@ export function legacyInsightQueryURL({ filters, currentTeamId, refresh }: Legac return `api/projects/${currentTeamId}/insights/funnel/${refresh ? '?refresh=true' : ''}` } else if (isPathsFilter(filters)) { return `api/projects/${currentTeamId}/insights/path${refresh ? '?refresh=true' : ''}` - } else { - throw new Error(`Unsupported insight type: ${filters.insight}`) } + throw new Error(`Unsupported insight type: ${filters.insight}`) } export function legacyInsightQueryData({ @@ -469,9 +467,8 @@ export function legacyInsightQueryExportContext({ method: 'POST', body: filters, } - } else { - throw new Error(`Unsupported insight type: ${filters.insight}`) } + throw new Error(`Unsupported insight type: ${filters.insight}`) } export async function legacyInsightQuery({ diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 868156528ece8..1ef498660d74a 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -5667,6 +5667,9 @@ }, { "$ref": "#/definitions/PersonPropertyFilter" + }, + { + "$ref": "#/definitions/SessionPropertyFilter" } ] }, diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 01708078b175b..a923dfaec77cf 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -23,6 +23,7 @@ import { PropertyGroupFilter, PropertyMathType, RetentionFilterType, + SessionPropertyFilter, StickinessFilterType, TrendsFilterType, } from '~/types' @@ -985,7 +986,7 @@ export interface SessionsTimelineQuery extends DataNode { before?: string response?: SessionsTimelineQueryResponse } -export type WebAnalyticsPropertyFilter = EventPropertyFilter | PersonPropertyFilter +export type WebAnalyticsPropertyFilter = EventPropertyFilter | PersonPropertyFilter | SessionPropertyFilter export type WebAnalyticsPropertyFilters = WebAnalyticsPropertyFilter[] export interface WebAnalyticsQueryBase { diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index d59f284e09c96..fcc3d0eb8d555 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -25,6 +25,7 @@ export const appScenes: Record any> = { [Scene.ReplayFilePlayback]: () => import('./session-recordings/file-playback/SessionRecordingFilePlaybackScene'), [Scene.PersonsManagement]: () => import('./persons-management/PersonsManagementScene'), [Scene.Person]: () => import('./persons/PersonScene'), + [Scene.PipelineNodeNew]: () => import('./pipeline/PipelineNodeNew'), [Scene.Pipeline]: () => import('./pipeline/Pipeline'), [Scene.PipelineNode]: () => import('./pipeline/PipelineNode'), [Scene.Group]: () => import('./groups/Group'), diff --git a/frontend/src/scenes/apps/appMetricsSceneLogic.ts b/frontend/src/scenes/apps/appMetricsSceneLogic.ts index afcfec42c346e..f878f824da7b7 100644 --- a/frontend/src/scenes/apps/appMetricsSceneLogic.ts +++ b/frontend/src/scenes/apps/appMetricsSceneLogic.ts @@ -213,9 +213,8 @@ export const appMetricsSceneLogic = kea([ return '-24h' } else if (daysSinceInstall <= 7) { return '-7d' - } else { - return DEFAULT_DATE_FROM } + return DEFAULT_DATE_FROM }, ], @@ -258,9 +257,8 @@ export const appMetricsSceneLogic = kea([ capabilities.scheduled_tasks?.includes(method) ) ) - } else { - return !!capabilities.methods?.includes(tab) } + return !!capabilities.methods?.includes(tab) }, ], diff --git a/frontend/src/scenes/apps/frontendAppRequire.ts b/frontend/src/scenes/apps/frontendAppRequire.ts index 2714d4e0c20c0..2bbc74a38bb65 100644 --- a/frontend/src/scenes/apps/frontendAppRequire.ts +++ b/frontend/src/scenes/apps/frontendAppRequire.ts @@ -24,7 +24,6 @@ const packages = { export function frontendAppRequire(module: string): any { if (module in packages) { return packages[module] - } else { - throw new Error(`Cannot import from unknown module "${module}"`) } + throw new Error(`Cannot import from unknown module "${module}"`) } diff --git a/frontend/src/scenes/authentication/signup/SignupContainer.tsx b/frontend/src/scenes/authentication/signup/SignupContainer.tsx index 285602454fc10..c0ff60ccbb796 100644 --- a/frontend/src/scenes/authentication/signup/SignupContainer.tsx +++ b/frontend/src/scenes/authentication/signup/SignupContainer.tsx @@ -2,21 +2,14 @@ import { IconCheckCircle } from '@posthog/icons' import { useValues } from 'kea' import { router } from 'kea-router' import { BridgePage } from 'lib/components/BridgePage/BridgePage' -import { CustomerLogo } from 'lib/components/CustomerLogo' -import { CLOUD_HOSTNAMES, FEATURE_FLAGS } from 'lib/constants' +import { CLOUD_HOSTNAMES } from 'lib/constants' import { Link } from 'lib/lemon-ui/Link' -import { featureFlagLogic, FeatureFlagsSet } from 'lib/logic/featureFlagLogic' -import { ReactNode } from 'react' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { SceneExport } from 'scenes/sceneTypes' import { userLogic } from 'scenes/userLogic' import { Region } from '~/types' -import airbus from '../../../lib/customers/airbus.svg' -import hasura from '../../../lib/customers/hasura.svg' -import staples from '../../../lib/customers/staples.svg' -import yCombinator from '../../../lib/customers/y-combinator.svg' import { SignupForm } from './signupForm/SignupForm' export const scene: SceneExport = { @@ -52,83 +45,29 @@ export function SignupContainer(): JSX.Element | null { ) : null } -type ProductBenefit = { - benefit: string - description: string | ReactNode -} - -const getProductBenefits = (featureFlags: FeatureFlagsSet): ProductBenefit[] => { - const signupBenefitsFlag = featureFlags[FEATURE_FLAGS.SIGNUP_BENEFITS] - switch (signupBenefitsFlag) { - case 'generic-language': - return [ - { - benefit: 'Free usage every month - even on paid plans', - description: '1M free events, 5K free session recordings, and more. Every month. Forever.', - }, - { - benefit: 'Start collecting data immediately', - description: 'Integrate with developer-friendly APIs or a low-code web snippet.', - }, - { - benefit: 'Join industry leaders that run on PostHog', - description: - 'Airbus, Hasura, Y Combinator, Staples, and thousands more trust PostHog as their Product OS.', - }, - ] - case 'logos': - return [ - { - benefit: '1M events free every month', - description: 'Product analytics, feature flags, experiments, and more.', - }, - { - benefit: 'Start collecting events immediately', - description: 'Integrate with developer-friendly APIs or use our easy autocapture script.', - }, - { - benefit: 'Join industry leaders that run on PostHog', - description: ( -
- {[airbus, hasura, yCombinator, staples].map((company, i) => ( - - - - ))} -
- ), - }, - ] - default: - return [ - { - benefit: 'Free for 1M events every month', - description: 'Product analytics, feature flags, experiments, and more.', - }, - { - benefit: 'Start collecting events immediately', - description: 'Integrate with developer-friendly APIs or use our easy autocapture script.', - }, - { - benefit: 'Join industry leaders that run on PostHog', - description: - 'Airbus, Hasura, Y Combinator, Staples, and thousands more trust PostHog as their Product OS.', - }, - ] - } -} +const productBenefits = [ + { + benefit: 'Free usage every month - even on paid plans', + description: '1M free events, 5K free session recordings, and more. Every month. Forever.', + }, + { + benefit: 'Start collecting data immediately', + description: 'Integrate with developer-friendly APIs or a low-code web snippet.', + }, + { + benefit: 'Join industry leaders that run on PostHog', + description: 'Airbus, Hasura, Y Combinator, Staples, and thousands more trust PostHog as their Product OS.', + }, +] export function SignupLeftContainer(): JSX.Element { const { preflight } = useValues(preflightLogic) - const { featureFlags } = useValues(featureFlagLogic) const getRegionUrl = (region: string): string => { const { pathname, search, hash } = router.values.currentLocation return `https://${CLOUD_HOSTNAMES[region]}${pathname}${search}${hash}` } - const productBenefits = getProductBenefits(featureFlags) - return ( <>
diff --git a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts index 30c123256d81a..c574c17e62873 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts @@ -8,13 +8,13 @@ import { urls } from 'scenes/urls' import { BatchExportConfiguration, - BatchExportDestination, - BatchExportDestinationBigQuery, - BatchExportDestinationHTTP, - BatchExportDestinationPostgres, - BatchExportDestinationRedshift, - BatchExportDestinationS3, - BatchExportDestinationSnowflake, + BatchExportService, + BatchExportServiceBigQuery, + BatchExportServiceHTTP, + BatchExportServicePostgres, + BatchExportServiceRedshift, + BatchExportServiceS3, + BatchExportServiceSnowflake, Breadcrumb, } from '~/types' @@ -29,12 +29,12 @@ export type BatchExportConfigurationForm = Omit< BatchExportConfiguration, 'id' | 'destination' | 'start_at' | 'end_at' > & - Partial & - Partial & - Partial & - Partial & - Partial & - Partial & { + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & { destination: 'S3' | 'Snowflake' | 'Postgres' | 'BigQuery' | 'Redshift' | 'HTTP' start_at: Dayjs | null end_at: Dayjs | null @@ -159,36 +159,36 @@ export const batchExportsEditLogic = kea([ } as BatchExportConfigurationForm, errors: (form) => batchExportFormFields(props.id === 'new', form), submit: async ({ name, destination, interval, start_at, end_at, paused, ...config }) => { - const destinationObject: BatchExportDestination = + const destinationObject: BatchExportService = destination === 'Postgres' ? ({ type: 'Postgres', config: config, - } as unknown as BatchExportDestinationPostgres) + } as unknown as BatchExportServicePostgres) : destination === 'S3' ? ({ type: 'S3', config: config, - } as unknown as BatchExportDestinationS3) + } as unknown as BatchExportServiceS3) : destination === 'Redshift' ? ({ type: 'Redshift', config: config, - } as unknown as BatchExportDestinationRedshift) + } as unknown as BatchExportServiceRedshift) : destination === 'BigQuery' ? ({ type: 'BigQuery', config: config, - } as unknown as BatchExportDestinationBigQuery) + } as unknown as BatchExportServiceBigQuery) : destination === 'HTTP' ? ({ type: 'HTTP', config: config, - } as unknown as BatchExportDestinationHTTP) + } as unknown as BatchExportServiceHTTP) : ({ type: 'Snowflake', config: config, - } as unknown as BatchExportDestinationSnowflake) + } as unknown as BatchExportServiceSnowflake) const data: Omit = { paused, diff --git a/frontend/src/scenes/batch_exports/utils.ts b/frontend/src/scenes/batch_exports/utils.ts index e9d5667d4ed34..4108bcb359a47 100644 --- a/frontend/src/scenes/batch_exports/utils.ts +++ b/frontend/src/scenes/batch_exports/utils.ts @@ -1,7 +1,7 @@ import { useValues } from 'kea' import { userLogic } from 'scenes/userLogic' -import { AvailableFeature, BatchExportConfiguration, BatchExportDestination, BatchExportRun } from '~/types' +import { AvailableFeature, BatchExportConfiguration, BatchExportRun, BatchExportService } from '~/types' export function intervalToFrequency(interval: BatchExportConfiguration['interval']): string { return { @@ -15,7 +15,7 @@ export function isRunInProgress(run: BatchExportRun): boolean { return ['Running', 'Starting'].includes(run.status) } -export function humanizeDestination(destination: BatchExportDestination): string { +export function humanizeDestination(destination: BatchExportService): string { if (destination.type === 'S3') { return `s3://${destination.config.bucket_name}/${destination.config.prefix}` } diff --git a/frontend/src/scenes/billing/Billing.tsx b/frontend/src/scenes/billing/Billing.tsx index e418e66c52309..08afa0d112860 100644 --- a/frontend/src/scenes/billing/Billing.tsx +++ b/frontend/src/scenes/billing/Billing.tsx @@ -1,23 +1,19 @@ import './Billing.scss' -import { IconCheckCircle, IconPlus } from '@posthog/icons' +import { IconCheckCircle } from '@posthog/icons' import { LemonButton, LemonDivider, LemonInput, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { Field, Form } from 'kea-forms' import { router } from 'kea-router' -import { BillingUpgradeCTA } from 'lib/components/BillingUpgradeCTA' import { SurprisedHog } from 'lib/components/hedgehogs' -import { PageHeader } from 'lib/components/PageHeader' import { supportLogic } from 'lib/components/Support/supportLogic' -import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { useResizeBreakpoints } from 'lib/hooks/useResizeObserver' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' import { SpinnerOverlay } from 'lib/lemon-ui/Spinner/Spinner' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect } from 'react' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { SceneExport } from 'scenes/sceneTypes' @@ -32,26 +28,19 @@ export const scene: SceneExport = { logic: billingLogic, } -export function BillingPageHeader(): JSX.Element { - return -} - export function Billing(): JSX.Element { const { billing, billingLoading, - redirectPath, isOnboarding, showLicenseDirectInput, isActivateLicenseSubmitting, - isUnlicensedDebug, over20kAnnual, isAnnualPlan, } = useValues(billingLogic) const { reportBillingV2Shown } = useActions(billingLogic) const { preflight, isCloudOrDev } = useValues(preflightLogic) const { openSupportForm } = useActions(supportLogic) - const { featureFlags } = useValues(featureFlagLogic) if (preflight && !isCloudOrDev) { router.actions.push(urls.default()) @@ -71,7 +60,6 @@ export function Billing(): JSX.Element { if (!billing && billingLoading) { return ( <> - ) @@ -80,7 +68,6 @@ export function Billing(): JSX.Element { if (!billing && !billingLoading) { return (
- {!isOnboarding && } { 'There was an issue retrieving your current billing information. If this message persists, please ' @@ -99,46 +86,8 @@ export function Billing(): JSX.Element { } const products = billing?.products - const getUpgradeAllProductsLink = (): string => { - if (!products) { - return '' - } - let url = '/api/billing-v2/activation?products=' - let productsToUpgrade = '' - for (const product of products) { - if (product.subscribed || product.contact_support || product.inclusion_only) { - continue - } - const currentPlanIndex = product.plans.findIndex((plan) => plan.current_plan) - const upgradePlanKey = isUnlicensedDebug - ? product.plans?.[product.plans?.length - 1].plan_key - : product.plans?.[currentPlanIndex + 1]?.plan_key - if (!upgradePlanKey) { - continue - } - productsToUpgrade += `${product.type}:${upgradePlanKey},` - if (product.addons?.length) { - for (const addon of product.addons) { - productsToUpgrade += `${addon.type}:${addon.plans[0].plan_key},` - } - } - } - // remove the trailing comma that will be at the end of the url - if (!productsToUpgrade) { - return '' - } - url += productsToUpgrade.slice(0, -1) - if (redirectPath) { - url += `&redirect_path=${redirectPath}` - } - return url - } - - const upgradeAllProductsLink = getUpgradeAllProductsLink() - return (
- {!isOnboarding && } {showLicenseDirectInput && ( <>
@@ -313,24 +262,6 @@ export function Billing(): JSX.Element {

Products

- {isOnboarding && upgradeAllProductsLink && ( - } - to={upgradeAllProductsLink} - disableClientSideRouting - > - {featureFlags[FEATURE_FLAGS.BILLING_UPGRADE_LANGUAGE] === 'subscribe' - ? 'Subscribe to all' - : featureFlags[FEATURE_FLAGS.BILLING_UPGRADE_LANGUAGE] === 'credit_card' && - !billing?.has_active_subscription - ? 'Add credit card to all products' - : featureFlags[FEATURE_FLAGS.BILLING_UPGRADE_LANGUAGE] === 'credit_card' && - billing?.has_active_subscription - ? 'Add all products to plan' - : 'Upgrade to all'}{' '} - - )}
diff --git a/frontend/src/scenes/billing/BillingProduct.tsx b/frontend/src/scenes/billing/BillingProduct.tsx index b7023ee15df01..8e30240eda1e6 100644 --- a/frontend/src/scenes/billing/BillingProduct.tsx +++ b/frontend/src/scenes/billing/BillingProduct.tsx @@ -42,8 +42,10 @@ export const getTierDescription = ( export const BillingProductAddon = ({ addon }: { addon: BillingProductV2AddonType }): JSX.Element => { const { billing, redirectPath } = useValues(billingLogic) - const { isPricingModalOpen, currentAndUpgradePlans, surveyID } = useValues(billingProductLogic({ product: addon })) - const { toggleIsPricingModalOpen, reportSurveyShown, setSurveyResponse } = useActions( + const { isPricingModalOpen, currentAndUpgradePlans, surveyID, billingProductLoading } = useValues( + billingProductLogic({ product: addon }) + ) + const { toggleIsPricingModalOpen, reportSurveyShown, setSurveyResponse, setBillingProductLoading } = useActions( billingProductLogic({ product: addon }) ) const { featureFlags } = useValues(featureFlagLogic) @@ -168,6 +170,10 @@ export const BillingProductAddon = ({ addon }: { addon: BillingProductV2AddonTyp currentAndUpgradePlans?.upgradePlan?.plan_key }${redirectPath && `&redirect_path=${redirectPath}`}`} disableClientSideRouting + loading={billingProductLoading === addon.type} + onClick={() => { + setBillingProductLoading(addon.type) + }} > Add @@ -201,6 +207,7 @@ export const BillingProduct = ({ product }: { product: BillingProductV2Type }): isPlanComparisonModalOpen, currentAndUpgradePlans, surveyID, + billingProductLoading, } = useValues(billingProductLogic({ product })) const { setIsEditingBillingLimit, @@ -209,6 +216,7 @@ export const BillingProduct = ({ product }: { product: BillingProductV2Type }): toggleIsPlanComparisonModalOpen, reportSurveyShown, setSurveyResponse, + setBillingProductLoading, } = useActions(billingProductLogic({ product, productRef })) const { reportBillingUpgradeClicked } = useActions(eventUsageLogic) @@ -707,8 +715,10 @@ export const BillingProduct = ({ product }: { product: BillingProductV2Type }): type="primary" icon={} disableClientSideRouting + loading={billingProductLoading === product.type} onClick={() => { reportBillingUpgradeClicked(product.type) + setBillingProductLoading(product.type) }} className="grow" center diff --git a/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx b/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx index 096713fc5445c..cb1b860f1af48 100644 --- a/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx +++ b/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx @@ -13,8 +13,9 @@ export const UnsubscribeSurveyModal = ({ product: BillingProductV2Type | BillingProductV2AddonType }): JSX.Element | null => { const { surveyID, surveyResponse } = useValues(billingProductLogic({ product })) - const { setSurveyResponse, reportSurveySent, reportSurveyDismissed } = useActions(billingProductLogic({ product })) + const { setSurveyResponse, reportSurveyDismissed } = useActions(billingProductLogic({ product })) const { deactivateProduct } = useActions(billingLogic) + const { unsubscribeError, billingLoading } = useValues(billingLogic) const { unsubscribeDisabledReason, itemsToDisable } = useValues(exportsUnsubscribeTableLogic) const textAreaNotEmpty = surveyResponse['$survey_response']?.length > 0 @@ -45,11 +46,9 @@ export const UnsubscribeSurveyModal = ({ type={textAreaNotEmpty ? 'primary' : 'secondary'} disabledReason={includesPipelinesAddon && unsubscribeDisabledReason} onClick={() => { - textAreaNotEmpty - ? reportSurveySent(surveyID, surveyResponse) - : reportSurveyDismissed(surveyID) deactivateProduct(product.type) }} + loading={billingLoading} > Unsubscribe @@ -57,6 +56,13 @@ export const UnsubscribeSurveyModal = ({ } >
+ {unsubscribeError && ( + +

+ {unsubscribeError.detail} {unsubscribeError.link} +

+
+ )} chat with support - {product.type === 'session_replay' && ( <> {', or '} @@ -103,6 +108,7 @@ export const UnsubscribeSurveyModal = ({ {' for tuning recording volume with sampling and minimum duration.'} )} + .

diff --git a/frontend/src/scenes/billing/billing-utils.ts b/frontend/src/scenes/billing/billing-utils.ts index bf690d0174a0c..1ad1a6140b0a1 100644 --- a/frontend/src/scenes/billing/billing-utils.ts +++ b/frontend/src/scenes/billing/billing-utils.ts @@ -9,9 +9,8 @@ export const summarizeUsage = (usage: number | null): string => { return `${usage}` } else if (Math.round(usage / 1000) < 1000) { return `${Math.round(usage / 1000)} thousand` - } else { - return `${Math.round(usage / 1000000)} million` } + return `${Math.round(usage / 1000000)} million` } export const projectUsage = ( diff --git a/frontend/src/scenes/billing/billingLogic.ts b/frontend/src/scenes/billing/billingLogic.tsx similarity index 88% rename from frontend/src/scenes/billing/billingLogic.ts rename to frontend/src/scenes/billing/billingLogic.tsx index 67e5551dae55b..28179a26d9ef6 100644 --- a/frontend/src/scenes/billing/billingLogic.ts +++ b/frontend/src/scenes/billing/billingLogic.tsx @@ -1,9 +1,9 @@ -import { lemonToast } from '@posthog/lemon-ui' +import { lemonToast, Link } from '@posthog/lemon-ui' import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' -import api from 'lib/api' +import api, { getJSONOrNull } from 'lib/api' import { dayjs } from 'lib/dayjs' import { LemonBannerAction } from 'lib/lemon-ui/LemonBanner/LemonBanner' import { lemonBannerLogic } from 'lib/lemon-ui/LemonBanner/lemonBannerLogic' @@ -33,6 +33,11 @@ export interface BillingAlertConfig { onClose?: () => void } +export interface UnsubscribeError { + detail: string | JSX.Element + link: JSX.Element +} + const parseBillingResponse = (data: Partial): BillingV2Type => { if (data.billing_period) { data.billing_period = { @@ -70,6 +75,8 @@ export const billingLogic = kea([ setRedirectPath: true, setIsOnboarding: true, determineBillingAlert: true, + setUnsubscribeError: (error: null | UnsubscribeError) => ({ error }), + resetUnsubscribeError: true, setBillingAlert: (billingAlert: BillingAlertConfig | null) => ({ billingAlert }), }), connect(() => ({ @@ -126,8 +133,15 @@ export const billingLogic = kea([ setIsOnboarding: () => window.location.pathname.includes('/onboarding'), }, ], + unsubscribeError: [ + null as null | UnsubscribeError, + { + resetUnsubscribeError: () => null, + setUnsubscribeError: (_, { error }) => error, + }, + ], }), - loaders(({ actions }) => ({ + loaders(({ actions, values }) => ({ billing: [ null as BillingV2Type | null, { @@ -145,10 +159,34 @@ export const billingLogic = kea([ }, deactivateProduct: async (key: string) => { - const response = await api.get('api/billing-v2/deactivate?products=' + key) - lemonToast.success('Product unsubscribed') - actions.reportProductUnsubscribed(key) - return parseBillingResponse(response) + actions.resetUnsubscribeError() + try { + const response = await api.getResponse('api/billing-v2/deactivate?products=' + key) + const jsonRes = await getJSONOrNull(response) + lemonToast.success('Product unsubscribed') + actions.reportProductUnsubscribed(key) + return parseBillingResponse(jsonRes) + } catch (error: any) { + if (error.detail && error.detail.includes('open invoice')) { + actions.setUnsubscribeError({ + detail: error.detail, + link: ( + + View invoices + + ), + } as UnsubscribeError) + } else { + actions.setUnsubscribeError({ + detail: + error.detail || + `We encountered a problem. Please try again or submit a support ticket.`, + } as UnsubscribeError) + } + console.error(error) + // This is a bit of a hack to prevent the page from re-rendering. + return values.billing + } }, }, ], diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index 7655bef5c99a4..ef2630234c75c 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -24,7 +24,7 @@ export const billingProductLogic = kea([ key((props) => props.product.type), path(['scenes', 'billing', 'billingProductLogic']), connect({ - values: [billingLogic, ['billing', 'isUnlicensedDebug', 'scrollToProductKey']], + values: [billingLogic, ['billing', 'isUnlicensedDebug', 'scrollToProductKey', 'unsubscribeError']], actions: [ billingLogic, [ @@ -34,6 +34,7 @@ export const billingProductLogic = kea([ 'deactivateProduct', 'setProductSpecificAlert', 'setScrollToProductKey', + 'deactivateProductSuccess', ], ], }), @@ -52,6 +53,7 @@ export const billingProductLogic = kea([ }), reportSurveyDismissed: (surveyID: string) => ({ surveyID }), setSurveyID: (surveyID: string) => ({ surveyID }), + setBillingProductLoading: (productKey: string) => ({ productKey }), }), reducers({ billingLimitInput: [ @@ -108,6 +110,12 @@ export const billingProductLogic = kea([ setSurveyID: (_, { surveyID }) => surveyID, }, ], + billingProductLoading: [ + null as string | null, + { + setBillingProductLoading: (_, { productKey }) => productKey, + }, + ], comparisonModalHighlightedFeatureKey: [ null as string | null, { @@ -251,6 +259,14 @@ export const billingProductLogic = kea([ }) actions.setSurveyID('') }, + deactivateProductSuccess: () => { + if (!values.unsubscribeError) { + const textAreaNotEmpty = values.surveyResponse['$survey_response']?.length > 0 + textAreaNotEmpty + ? actions.reportSurveySent(values.surveyID, values.surveyResponse) + : actions.reportSurveyDismissed(values.surveyID) + } + }, setScrollToProductKey: ({ scrollToProductKey }) => { if (scrollToProductKey && scrollToProductKey === props.product.type) { const { currentPlan } = values.currentAndUpgradePlans diff --git a/frontend/src/scenes/dashboard/Dashboard.tsx b/frontend/src/scenes/dashboard/Dashboard.tsx index 2208e58611c07..26f869a6cbe6c 100644 --- a/frontend/src/scenes/dashboard/Dashboard.tsx +++ b/frontend/src/scenes/dashboard/Dashboard.tsx @@ -96,7 +96,7 @@ function DashboardScene(): JSX.Element { [setDashboardMode, dashboardMode, placement] ) - if (!dashboard && !itemsLoading && receivedErrorsFromAPI) { + if (!dashboard && !itemsLoading && !receivedErrorsFromAPI) { return } diff --git a/frontend/src/scenes/dashboard/NewDashboardModal.tsx b/frontend/src/scenes/dashboard/NewDashboardModal.tsx index 6c20019690db9..2d58b0cbe653d 100644 --- a/frontend/src/scenes/dashboard/NewDashboardModal.tsx +++ b/frontend/src/scenes/dashboard/NewDashboardModal.tsx @@ -34,6 +34,7 @@ export function NewDashboardModal(): JSX.Element { onClose={hideNewDashboardModal} isOpen={newDashboardModalVisible} title={activeDashboardTemplate ? 'Choose your events' : 'Create a dashboard'} + data-attr="new-dashboard-chooser" description={ activeDashboardTemplate ? (

diff --git a/frontend/src/scenes/dashboard/dashboardLogic.test.ts b/frontend/src/scenes/dashboard/dashboardLogic.test.ts index cc2b95aa10379..8894985af483b 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.test.ts +++ b/frontend/src/scenes/dashboard/dashboardLogic.test.ts @@ -79,9 +79,8 @@ export const boxToString = (param: string | readonly string[]): string => { //path params from msw can be a string or an array if (typeof param === 'string') { return param - } else { - throw new Error("this shouldn't be an array") } + throw new Error("this shouldn't be an array") } const insight800 = (): InsightModel => ({ diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 4a9c3a861a3a8..c21233b7b9a52 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -216,7 +216,7 @@ export const dashboardLogic = kea([ return dashboard } catch (error: any) { if (error.status === 404) { - throw new Error('Dashboard not found') + return null } throw error } diff --git a/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts b/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts index 60a039058f4dc..583f1ae7c9a25 100644 --- a/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts +++ b/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts @@ -44,9 +44,8 @@ export const dashboardTemplateEditorLogic = kea { if (!markers || markers.length === 0) { return [] - } else { - return markers.map((marker: MonacoMarker) => marker.message) } + return markers.map((marker: MonacoMarker) => marker.message) }, clear: () => [], }, diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.ts b/frontend/src/scenes/dashboard/newDashboardLogic.ts index d63a829f5b0f6..264c4b5af135d 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.ts +++ b/frontend/src/scenes/dashboard/newDashboardLogic.ts @@ -89,6 +89,7 @@ export const newDashboardLogic = kea([ hideNewDashboardModal: () => false, submitNewDashboardSuccess: () => false, submitNewDashboardFailure: () => false, + clearActiveDashboardTemplate: () => false, }, ], newDashboardModalVisible: [ diff --git a/frontend/src/scenes/dashboard/tileLayouts.ts b/frontend/src/scenes/dashboard/tileLayouts.ts index 1b1e89a67886b..c0e7e58cdd2b9 100644 --- a/frontend/src/scenes/dashboard/tileLayouts.ts +++ b/frontend/src/scenes/dashboard/tileLayouts.ts @@ -15,9 +15,8 @@ export const sortTilesByLayout = (tiles: Array, col: DashboardLay return -1 } else if (ay > by || (ay == by && ax > bx)) { return 1 - } else { - return 0 } + return 0 }) } export const calculateLayouts = (tiles: DashboardTile[]): Partial> => { diff --git a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts index 551ce197e46b7..fae3e0e4d1028 100644 --- a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts +++ b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts @@ -176,9 +176,9 @@ export const dataWarehouseSceneLogic = kea([ selectRow: () => { actions.setIsEditingSavedQuery(false) }, - updateDataWarehouseSavedQuerySuccess: async (_, view) => { + updateDataWarehouseSavedQuerySuccess: async ({ payload }) => { actions.setIsEditingSavedQuery(false) - lemonToast.success(`${view.name} successfully updated`) + lemonToast.success(`${payload?.name ?? 'View'} successfully updated`) }, })), afterMount(({ actions, values }) => { diff --git a/frontend/src/scenes/data-warehouse/external/forms/SyncProgressStep.tsx b/frontend/src/scenes/data-warehouse/external/forms/SyncProgressStep.tsx index 2432f349e3040..1e58314fdb862 100644 --- a/frontend/src/scenes/data-warehouse/external/forms/SyncProgressStep.tsx +++ b/frontend/src/scenes/data-warehouse/external/forms/SyncProgressStep.tsx @@ -3,28 +3,31 @@ import { useValues } from 'kea' import { sourceWizardLogic } from 'scenes/data-warehouse/new/sourceWizardLogic' import { dataWarehouseSettingsLogic } from 'scenes/data-warehouse/settings/dataWarehouseSettingsLogic' +import { ExternalDataSourceSchema } from '~/types' + export const SyncProgressStep = (): JSX.Element => { - const { databaseSchema, sourceId } = useValues(sourceWizardLogic) + const { sourceId } = useValues(sourceWizardLogic) const { dataWarehouseSources, dataWarehouseSourcesLoading } = useValues(dataWarehouseSettingsLogic) const source = dataWarehouseSources?.results.find((n) => n.id === sourceId) + const schemas = source?.schemas ?? [] - const getSyncStatus = (shouldSync: boolean): { status: string; tagType: LemonTagType } => { - if (!shouldSync) { + const getSyncStatus = (schema: ExternalDataSourceSchema): { status: string; tagType: LemonTagType } => { + if (!schema.should_sync) { return { status: 'Not synced', tagType: 'default', } } - if (!source || source.status === 'Running') { + if (schema.status === 'Running') { return { status: 'Syncing...', tagType: 'primary', } } - if (source.status === 'Completed') { + if (schema.status === 'Completed') { return { status: 'Completed', tagType: 'success', @@ -42,7 +45,7 @@ export const SyncProgressStep = (): JSX.Element => {

{ title: 'Table', key: 'table', render: function RenderTable(_, schema) { - return schema.table + return schema.name }, }, { title: 'Status', key: 'status', render: function RenderStatus(_, schema) { - const { status, tagType } = getSyncStatus(schema.should_sync) + const { status, tagType } = getSyncStatus(schema) return {status} }, diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx index 2f400d9e14872..7ff85f473dd8f 100644 --- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx @@ -448,6 +448,7 @@ export const sourceWizardLogic = kea([ lemonToast.success('New Data Resource Created') actions.setSourceId(id) actions.resetSourceConnectionDetails() + actions.loadSources(null) actions.onNext() } catch (e: any) { lemonToast.error(e.data?.message ?? e.message) diff --git a/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx b/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx index 682cd9b5a4594..241bfe11e1a28 100644 --- a/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx +++ b/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx @@ -1,5 +1,5 @@ import { TZLabel } from '@posthog/apps-common' -import { LemonButton, LemonDialog, LemonSwitch, LemonTable, LemonTag, Link, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonDialog, LemonSwitch, LemonTable, LemonTag, Link, Spinner, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' import { More } from 'lib/lemon-ui/LemonButton/More' @@ -181,7 +181,8 @@ interface SchemaTableProps { } const SchemaTable = ({ schemas }: SchemaTableProps): JSX.Element => { - const { updateSchema } = useActions(dataWarehouseSettingsLogic) + const { updateSchema, reloadSchema, resyncSchema } = useActions(dataWarehouseSettingsLogic) + const { schemaReloadingById } = useValues(dataWarehouseSettingsLogic) return ( { title: 'Schema Name', key: 'name', render: function RenderName(_, schema) { - return schema.name + return {schema.name} + }, + }, + { + title: 'Refresh Type', + key: 'incremental', + render: function RenderIncremental(_, schema) { + return schema.incremental ? ( + + Incremental + + ) : ( + + Full Refresh + + ) }, }, { @@ -236,6 +252,17 @@ const SchemaTable = ({ schemas }: SchemaTableProps): JSX.Element => { } }, }, + { + title: 'Status', + key: 'status', + render: function RenderStatus(_, schema) { + if (!schema.status) { + return null + } + + return {schema.status} + }, + }, { title: 'Last Synced At', key: 'last_synced_at', @@ -254,6 +281,55 @@ const SchemaTable = ({ schemas }: SchemaTableProps): JSX.Element => { return schema.table?.row_count ?? '' }, }, + { + key: 'actions', + width: 0, + render: function RenderActions(_, schema) { + if (schemaReloadingById[schema.id]) { + return ( +
+ +
+ ) + } + + return ( +
+
+ + { + reloadSchema(schema) + }} + > + Reload + + {schema.incremental && ( + + { + resyncSchema(schema) + }} + status="danger" + > + Resync + + + )} + + } + /> +
+
+ ) + }, + }, ]} /> ) diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts index fa8b151f19388..a9cd46d0360da 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts @@ -18,7 +18,10 @@ export const dataWarehouseSettingsLogic = kea([ actions({ deleteSource: (source: ExternalDataStripeSource) => ({ source }), reloadSource: (source: ExternalDataStripeSource) => ({ source }), - loadingFinished: (source: ExternalDataStripeSource) => ({ source }), + reloadSchema: (schema: ExternalDataSourceSchema) => ({ schema }), + resyncSchema: (schema: ExternalDataSourceSchema) => ({ schema }), + sourceLoadingFinished: (source: ExternalDataStripeSource) => ({ source }), + schemaLoadingFinished: (schema: ExternalDataSourceSchema) => ({ schema }), updateSchema: (schema: ExternalDataSourceSchema) => ({ schema }), abortAnyRunningQuery: true, }), @@ -65,12 +68,29 @@ export const dataWarehouseSettingsLogic = kea([ ...state, [source.id]: true, }), - loadingFinished: (state, { source }) => ({ + sourceLoadingFinished: (state, { source }) => ({ ...state, [source.id]: false, }), }, ], + schemaReloadingById: [ + {} as Record, + { + reloadSchema: (state, { schema }) => ({ + ...state, + [schema.id]: true, + }), + resyncSchema: (state, { schema }) => ({ + ...state, + [schema.id]: true, + }), + schemaLoadingFinished: (state, { schema }) => ({ + ...state, + [schema.id]: false, + }), + }, + ], })), selectors({ breadcrumbs: [ @@ -107,9 +127,31 @@ export const dataWarehouseSettingsLogic = kea([ deleteSource: async ({ source }) => { await api.externalDataSources.delete(source.id) actions.loadSources(null) - actions.loadingFinished(source) + actions.sourceLoadingFinished(source) }, reloadSource: async ({ source }) => { + // Optimistic UI updates before sending updates to the backend + const clonedSources = JSON.parse( + JSON.stringify(values.dataWarehouseSources?.results ?? []) + ) as ExternalDataStripeSource[] + const sourceIndex = clonedSources.findIndex((n) => n.id === source.id) + clonedSources[sourceIndex].status = 'Running' + clonedSources[sourceIndex].schemas = clonedSources[sourceIndex].schemas.map((n) => { + if (n.should_sync) { + return { + ...n, + status: 'Running', + } + } + + return n + }) + + actions.loadSourcesSuccess({ + ...values.dataWarehouseSources, + results: clonedSources, + }) + try { await api.externalDataSources.reload(source.id) actions.loadSources(null) @@ -120,7 +162,61 @@ export const dataWarehouseSettingsLogic = kea([ lemonToast.error('Cant refresh source at this time') } } - actions.loadingFinished(source) + actions.sourceLoadingFinished(source) + }, + reloadSchema: async ({ schema }) => { + // Optimistic UI updates before sending updates to the backend + const clonedSources = JSON.parse( + JSON.stringify(values.dataWarehouseSources?.results ?? []) + ) as ExternalDataStripeSource[] + const sourceIndex = clonedSources.findIndex((n) => n.schemas.find((m) => m.id === schema.id)) + const schemaIndex = clonedSources[sourceIndex].schemas.findIndex((n) => n.id === schema.id) + clonedSources[sourceIndex].status = 'Running' + clonedSources[sourceIndex].schemas[schemaIndex].status = 'Running' + + actions.loadSourcesSuccess({ + ...values.dataWarehouseSources, + results: clonedSources, + }) + + try { + await api.externalDataSchemas.reload(schema.id) + actions.schemaLoadingFinished(schema) + actions.loadSources(null) + } catch (e: any) { + if (e.message) { + lemonToast.error(e.message) + } else { + lemonToast.error('Cant reload schema at this time') + } + } + }, + // Complete refresh + resyncSchema: async ({ schema }) => { + const clonedSources = JSON.parse( + JSON.stringify(values.dataWarehouseSources?.results ?? []) + ) as ExternalDataStripeSource[] + const sourceIndex = clonedSources.findIndex((n) => n.schemas.find((m) => m.id === schema.id)) + const schemaIndex = clonedSources[sourceIndex].schemas.findIndex((n) => n.id === schema.id) + clonedSources[sourceIndex].status = 'Running' + clonedSources[sourceIndex].schemas[schemaIndex].status = 'Running' + + actions.loadSourcesSuccess({ + ...values.dataWarehouseSources, + results: clonedSources, + }) + + try { + await api.externalDataSchemas.resync(schema.id) + actions.schemaLoadingFinished(schema) + actions.loadSources(null) + } catch (e: any) { + if (e.message) { + lemonToast.error(e.message) + } else { + lemonToast.error('Cant refresh schema at this time') + } + } }, updateSchema: async ({ schema }) => { // Optimistic UI updates before sending updates to the backend diff --git a/frontend/src/scenes/debug/DebugScene.tsx b/frontend/src/scenes/debug/DebugScene.tsx index fffbe0dc4e521..91d189f97ead4 100644 --- a/frontend/src/scenes/debug/DebugScene.tsx +++ b/frontend/src/scenes/debug/DebugScene.tsx @@ -1,20 +1,17 @@ import { useActions, useValues } from 'kea' -import { CodeEditor } from 'lib/components/CodeEditors' import { PageHeader } from 'lib/components/PageHeader' import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' import { LemonSelect } from 'lib/lemon-ui/LemonSelect' import { HogQLDebug } from 'scenes/debug/HogQLDebug' import { Modifiers } from 'scenes/debug/Modifiers' +import { QueryTabs } from 'scenes/debug/QueryTabs' import { SceneExport } from 'scenes/sceneTypes' import { stringifiedExamples } from '~/queries/examples' import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' -import { Query } from '~/queries/Query/Query' import { QueryEditor } from '~/queries/QueryEditor/QueryEditor' import { DataNode, HogQLQuery, Node } from '~/queries/schema' -import { isDataTableNode, isInsightVizNode } from '~/queries/utils' import { debugSceneLogic } from './debugSceneLogic' @@ -48,28 +45,27 @@ function QueryDebug({ query, setQuery, queryKey }: QueryDebugProps): JSX.Element /> ) : (
- - setQuery(JSON.stringify({ ...parsed, source: query }, null, 2)) - : (query) => setQuery(JSON.stringify(query, null, 2)) - } - query={parsed?.source ?? parsed} - response={response} - /> - - setQuery(JSON.stringify(query, null, 2))} + setQuery={setQuery} + aboveButton={ + setQuery(JSON.stringify({ ...parsed, source: query }, null, 2)) + : (query) => setQuery(JSON.stringify(query, null, 2)) + } + query={parsed?.source ?? parsed} + response={response} + /> + } /> - {response && parsed && (isDataTableNode(parsed as Node) || isInsightVizNode(parsed as Node)) ? ( - setQuery(JSON.stringify(query, null, 2))} /> ) : null}
diff --git a/frontend/src/scenes/debug/HogQLDebug.tsx b/frontend/src/scenes/debug/HogQLDebug.tsx index d42f4af5d5664..dab62f85b68cb 100644 --- a/frontend/src/scenes/debug/HogQLDebug.tsx +++ b/frontend/src/scenes/debug/HogQLDebug.tsx @@ -1,157 +1,50 @@ import { BindLogic, useValues } from 'kea' -import { CodeEditor } from 'lib/components/CodeEditors' -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { LemonTable } from 'lib/lemon-ui/LemonTable' +import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { Modifiers } from 'scenes/debug/Modifiers' import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' import { DateRange } from '~/queries/nodes/DataNode/DateRange' -import { ElapsedTime, Timings } from '~/queries/nodes/DataNode/ElapsedTime' +import { ElapsedTime } from '~/queries/nodes/DataNode/ElapsedTime' import { Reload } from '~/queries/nodes/DataNode/Reload' import { EventPropertyFilters } from '~/queries/nodes/EventsNode/EventPropertyFilters' import { HogQLQueryEditor } from '~/queries/nodes/HogQLQuery/HogQLQueryEditor' import { DataNode, HogQLQuery, HogQLQueryResponse } from '~/queries/schema' +import { QueryTabs } from './QueryTabs' + interface HogQLDebugProps { queryKey: string query: HogQLQuery setQuery: (query: DataNode) => void } -function toLineColumn(hogql: string, position: number): { line: number; column: number } { - const lines = hogql.split('\n') - let line = 0 - let column = 0 - for (let i = 0; i < lines.length; i++) { - if (position < lines[i].length) { - line = i + 1 - column = position + 1 - break - } - position -= lines[i].length + 1 - } - return { line, column } -} - -function toLine(hogql: string, position: number): number { - return toLineColumn(hogql, position).line -} - -function toColumn(hogql: string, position: number): number { - return toLineColumn(hogql, position).column -} - export function HogQLDebug({ query, setQuery, queryKey }: HogQLDebugProps): JSX.Element { const dataNodeLogicProps: DataNodeLogicProps = { query, key: queryKey, dataNodeCollectionId: queryKey } - const { - dataLoading, - response: _response, - responseErrorObject, - elapsedTime, - } = useValues(dataNodeLogic(dataNodeLogicProps)) + const { dataLoading, response: _response } = useValues(dataNodeLogic(dataNodeLogicProps)) const response = _response as HogQLQueryResponse | null - const clickHouseTime = response?.timings?.find(({ k }) => k === './clickhouse_execute')?.t return (
+ +
- {dataLoading ? ( <>

Running query...

- Time elapsed: + Time elapsed:  +
) : ( <> - {response?.error ? ( - <> -

Error Running Query!

- - {response.error} - - - ) : null} - {response?.hogql ? ( - <> -

Executed HogQL

- - {response.hogql} - - - ) : null} - {response?.clickhouse ? ( - <> -

- Executed ClickHouse SQL - {clickHouseTime !== undefined - ? ` (${Math.floor(clickHouseTime * 1000) / 1000}s)` - : ''} -

- - {response.clickhouse} - - - ) : null} - {response?.metadata ? ( - <> -

Metadata

- ({ - type: 'error', - line: toLine(response.hogql ?? '', error.start ?? 0), - column: toColumn(response.hogql ?? '', error.start ?? 0), - ...error, - })), - ...response.metadata.warnings.map((warn) => ({ - type: 'warning', - line: toLine(response.hogql ?? '', warn.start ?? 0), - column: toColumn(response.hogql ?? '', warn.start ?? 0), - ...warn, - })), - ...response.metadata.notices.map((notice) => ({ - type: 'notice', - line: toLine(response.hogql ?? '', notice.start ?? 0), - column: toColumn(response.hogql ?? '', notice.start ?? 0), - ...notice, - })), - ].sort((a, b) => (a.start ?? 0) - (b.start ?? 0))} - columns={[ - { title: 'Line', dataIndex: 'line', key: 'line', width: '40px' }, - { title: 'Column', dataIndex: 'column', key: 'column', width: '40px' }, - { title: 'Type', dataIndex: 'type', key: 'type', width: '80px' }, - { title: 'Message', dataIndex: 'message', key: 'message' }, - ]} - /> - - ) : null} - {response?.explain ? ( - <> -

Explained ClickHouseSQL

- {response.explain.join('\n')} - - ) : null} - {response?.timings && elapsedTime !== null ? ( - <> -

Time spent

- - - ) : null} -

Raw response

- + )}
diff --git a/frontend/src/scenes/debug/Modifiers.tsx b/frontend/src/scenes/debug/Modifiers.tsx index ea6551ef65f6c..4b1e2792c9171 100644 --- a/frontend/src/scenes/debug/Modifiers.tsx +++ b/frontend/src/scenes/debug/Modifiers.tsx @@ -13,10 +13,11 @@ export function Modifiers({ setQuery, query, response = null }: ModifiersProps): if (query === null) { return null } + const labelClassName = 'flex flex-col gap-1 items-start' return (
- - POE: + +
POE:
- - Persons ArgMax: + +
Persons ArgMax:
- - In Cohort Via: + +
In Cohort Via:
- - Materialization Mode: + +
Materialization Mode:
| null + setQuery: (query: DataNode) => void +} +export function QueryTabs({ query, queryKey, setQuery, response }: QueryTabsProps): JSX.Element { + const [tab, setTab] = useState(null) + const clickHouseTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === './clickhouse_execute')?.t ?? 0 + const explainTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === './explain')?.t ?? 0 + const totalTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === '.')?.t ?? 0 + const hogQLTime = totalTime - explainTime - clickHouseTime + const tabs: LemonTabsProps['tabs'] = query + ? [ + response?.error && { + key: 'error', + label: 'Error', + content: ( + <> +

Error Running Query!

+ + {response.error} + + + ), + }, + isInsightVizNode(query) && { + key: 'viz', + label: 'Visualization', + content: setQuery(query)} />, + }, + isInsightQueryNode(query) && { + key: 'insight', + label: 'Insight', + content: ( + setQuery(query)} + /> + ), + }, + isDataTableNode(query) && { + key: 'table', + label: 'Data Table', + content: setQuery(query)} />, + }, + + (response?.result || response?.results) && { + key: 'result', + label: 'Result JSON', + content: ( + + ), + }, + response?.hogql && { + key: 'hogql', + label: ( + <> + HogQL + {hogQLTime && {Math.floor(hogQLTime * 10) / 10}s} + + ), + content: ( + + ), + }, + response?.clickhouse && { + key: 'clickhouse', + label: ( + <> + Clickhouse + {clickHouseTime && ( + {Math.floor(clickHouseTime * 10) / 10}s + )} + + ), + content: ( + + ), + }, + response?.explain && { + key: 'explain', + label: 'Explain', + content: {response.explain.join('\n')}, + }, + response?.timings && { + key: 'timings', + label: 'Timings', + content: , + }, + response && { + key: 'response', + label: 'Full response', + content: ( + + ), + }, + response?.metadata && { + key: 'metadata', + label: 'Metadata', + content: ( + ({ + type: 'error', + line: toLine(response.hogql ?? '', error.start ?? 0), + column: toColumn(response.hogql ?? '', error.start ?? 0), + ...error, + })), + ...(response.metadata as HogQLMetadataResponse).warnings.map((warn) => ({ + type: 'warning', + line: toLine(response.hogql ?? '', warn.start ?? 0), + column: toColumn(response.hogql ?? '', warn.start ?? 0), + ...warn, + })), + ...(response.metadata as HogQLMetadataResponse).notices.map((notice) => ({ + type: 'notice', + line: toLine(response.hogql ?? '', notice.start ?? 0), + column: toColumn(response.hogql ?? '', notice.start ?? 0), + ...notice, + })), + ].sort((a, b) => (a.start ?? 0) - (b.start ?? 0))} + columns={[ + { title: 'Line', dataIndex: 'line', key: 'line', width: '40px' }, + { title: 'Column', dataIndex: 'column', key: 'column', width: '40px' }, + { title: 'Type', dataIndex: 'type', key: 'type', width: '80px' }, + { title: 'Message', dataIndex: 'message', key: 'message' }, + ]} + /> + ), + }, + ] + .filter(Boolean) + .map((tab) => ({ ...tab, content: {tab.content} })) + : [] + + return ( + + t && t.key === tab) ? tab : (tabs[0] && tabs[0].key) || 'response'} + onChange={(t) => setTab(t)} + tabs={tabs} + /> + + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index df029a26b39d5..7fac1d995a939 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -48,8 +48,8 @@ const StepInfo = (): JSX.Element => {
-

Variants

-
Add up to 9 variants to test against your control.
+

Variants

+
Add up to 9 variants to test against your control.
@@ -148,8 +148,7 @@ const StepInfo = (): JSX.Element => { } const StepGoal = (): JSX.Element => { - const { experiment, exposureAndSampleSize, experimentInsightType, groupTypes, aggregationLabel } = - useValues(experimentLogic) + const { experiment, experimentInsightType, groupTypes, aggregationLabel } = useValues(experimentLogic) const { setExperiment, setNewExperimentInsight, createExperiment } = useActions(experimentLogic) // insightLogic @@ -164,8 +163,8 @@ const StepGoal = (): JSX.Element => {
{groupTypes.size > 0 && (
-

Participant type

-
+

Participant type

+
The type on which to aggregate metrics. You can change this at any time during the experiment.
@@ -240,8 +239,8 @@ const StepGoal = (): JSX.Element => { />
-

Goal criteria

-
+

Goal criteria

+
{experimentInsightType === InsightType.FUNNELS ? 'Create the funnel you want to measure.' : 'Select a single metric to track.'} @@ -264,10 +263,7 @@ const StepGoal = (): JSX.Element => { className="mt-2" type="primary" data-attr="save-experiment" - onClick={() => { - const { exposure, sampleSize } = exposureAndSampleSize - createExperiment(true, exposure, sampleSize) - }} + onClick={() => createExperiment(true)} > Save as draft diff --git a/frontend/src/scenes/experiments/ExperimentNext.tsx b/frontend/src/scenes/experiments/ExperimentNext.tsx index f77625045a335..7897d7d302bbf 100644 --- a/frontend/src/scenes/experiments/ExperimentNext.tsx +++ b/frontend/src/scenes/experiments/ExperimentNext.tsx @@ -59,7 +59,15 @@ export function ExperimentView(): JSX.Element { ) : ( <> - +
+
+ +
+ +
+ +
+
{experiment.start_date && (
diff --git a/frontend/src/scenes/experiments/ExperimentPreview.tsx b/frontend/src/scenes/experiments/ExperimentPreview.tsx index 9db0b56597022..84132c303a3fb 100644 --- a/frontend/src/scenes/experiments/ExperimentPreview.tsx +++ b/frontend/src/scenes/experiments/ExperimentPreview.tsx @@ -114,7 +114,7 @@ export function ExperimentPreview({
%} - value={minimumDetectableChange} + value={experiment.parameters.minimum_detectable_effect || 5} onChange={(value) => { setExperiment({ parameters: { @@ -161,8 +161,8 @@ export function ExperimentPreview({
Minimum Acceptable Count
{humanFriendlyNumber( - trendCount + Math.ceil(trendCount * (minimumDetectableChange / 100)) || - 0 + trendCount + + Math.ceil(trendCount * ((minimumDetectableChange || 5) / 100)) || 0 )}
@@ -186,7 +186,7 @@ export function ExperimentPreview({
Minimum Acceptable Conversion Rate
- {(funnelConversionRate + minimumDetectableChange).toFixed(1)}% + {(funnelConversionRate + (minimumDetectableChange || 5)).toFixed(1)}%
diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx index 651528abaf908..a711388bb4471 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -91,7 +91,9 @@ export function ExposureMetric({ experimentId }: { experimentId: Experiment['id' } export function ExperimentGoalModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { experiment, isExperimentGoalModalOpen, experimentLoading } = useValues(experimentLogic({ experimentId })) + const { experiment, isExperimentGoalModalOpen, experimentLoading, goalInsightDataLoading } = useValues( + experimentLogic({ experimentId }) + ) const { closeExperimentGoalModal, updateExperimentGoal, setNewExperimentInsight } = useActions( experimentLogic({ experimentId }) ) @@ -108,6 +110,9 @@ export function ExperimentGoalModal({ experimentId }: { experimentId: Experiment Cancel { updateExperimentGoal(experiment.filters) @@ -205,16 +210,27 @@ export function Goal(): JSX.Element { return (
-

Experiment goal

-
- This {experimentInsightType === InsightType.FUNNELS ? 'funnel' : 'trend'}{' '} - {experimentInsightType === InsightType.FUNNELS - ? 'experiment measures conversion at each stage.' - : 'experiment tracks the count of a single metric.'} +
+
+

Experiment goal

+ + {' '} + This {experimentInsightType === InsightType.FUNNELS ? 'funnel' : 'trend'}{' '} + {experimentInsightType === InsightType.FUNNELS + ? 'experiment measures conversion at each stage.' + : 'experiment tracks the count of a single metric.'} + + } + > + + +
-
+
{experimentInsightType === InsightType.FUNNELS ? 'Conversion goal steps' : 'Trend goal'}
diff --git a/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx b/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx index 568c37b254758..0aea97a5a37a2 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx @@ -1,5 +1,7 @@ import '../Experiment.scss' +import { IconInfo } from '@posthog/icons' +import { Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' import { LemonProgress } from 'lib/lemon-ui/LemonProgress' import { humanFriendlyNumber } from 'lib/utils' @@ -10,28 +12,35 @@ import { experimentLogic } from '../experimentLogic' import { formatUnitByQuantity } from '../utils' export function ProgressBar(): JSX.Element { - const { - experiment, - experimentInsightType, - funnelResultsPersonsTotal, - recommendedSampleSize, - actualRunningTime, - recommendedRunningTime, - } = useValues(experimentLogic) + const { experiment, experimentInsightType, funnelResultsPersonsTotal, actualRunningTime } = + useValues(experimentLogic) + + const recommendedRunningTime = experiment?.parameters?.recommended_running_time || 1 + const recommendedSampleSize = experiment?.parameters?.recommended_sample_size || 100 const experimentProgressPercent = experimentInsightType === InsightType.FUNNELS ? (funnelResultsPersonsTotal / recommendedSampleSize) * 100 : (actualRunningTime / recommendedRunningTime) * 100 + const goalTooltipText = + experiment?.parameters?.minimum_detectable_effect && + `Based on the recommended Minimum Acceptable Improvement of ${experiment.parameters.minimum_detectable_effect}%` + + const hasHighRunningTime = recommendedRunningTime > 62 + return (
-

Data collection

-
- Estimated target for the number of participants. Actual data may reveal significance earlier or later - than predicted. +
+

Data collection

+ + +
-
{`${ +
{`${ experimentProgressPercent > 100 ? 100 : experimentProgressPercent.toFixed(2) }% complete`}
- {experimentInsightType === InsightType.TRENDS && experiment.start_date && ( -
+ {experimentInsightType === InsightType.TRENDS && ( +
{experiment.end_date ? (
Ran for {actualRunningTime} {formatUnitByQuantity(actualRunningTime, 'day')} @@ -51,13 +60,31 @@ export function ProgressBar(): JSX.Element { {actualRunningTime} {formatUnitByQuantity(actualRunningTime, 'day')} running
)} -
- Goal: {recommendedRunningTime} {formatUnitByQuantity(recommendedRunningTime, 'day')} -
+ + +
+ {hasHighRunningTime ? ( + <> + Goal: > 2 months + + ) : ( + <> + Goal: {recommendedRunningTime}{' '} + {formatUnitByQuantity(recommendedRunningTime, 'day')} + + )} +
+
+ {hasHighRunningTime && ( + + + + )} +
)} {experimentInsightType === InsightType.FUNNELS && ( -
+
{experiment.end_date ? (
Saw {humanFriendlyNumber(funnelResultsPersonsTotal)}{' '} @@ -69,10 +96,12 @@ export function ProgressBar(): JSX.Element { {formatUnitByQuantity(funnelResultsPersonsTotal, 'participant')} seen
)} -
- Goal: {humanFriendlyNumber(recommendedSampleSize)}{' '} - {formatUnitByQuantity(recommendedSampleSize, 'participant')} -
+ +
+ Goal: {humanFriendlyNumber(recommendedSampleSize)}{' '} + {formatUnitByQuantity(recommendedSampleSize, 'participant')} +
+
)}
diff --git a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx index 6337d23c88319..666238a3ea198 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx @@ -1,7 +1,7 @@ import '../Experiment.scss' -import { IconPencil, IconPlus } from '@posthog/icons' -import { LemonButton, LemonInput, LemonModal, LemonTable, LemonTableColumns } from '@posthog/lemon-ui' +import { IconInfo, IconPencil, IconPlus } from '@posthog/icons' +import { LemonButton, LemonInput, LemonModal, LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' import { Empty } from 'antd' import { useActions, useValues } from 'kea' import { Form } from 'kea-forms' @@ -286,11 +286,15 @@ export function SecondaryMetricsTable({ <>
-
-

Secondary metrics

- {metrics.length > 0 && ( -
Monitor side effects of your experiment.
- )} +
+
+

Secondary metrics

+ {metrics.length > 0 && ( + + + + )} +
diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index 78d50619bfead..8d470766ffb0c 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -273,7 +273,7 @@ export function ExperimentLoadingAnimation(): JSX.Element { } export function PageHeaderCustom(): JSX.Element { - const { experiment, isExperimentRunning } = useValues(experimentLogic) + const { experiment, isExperimentRunning, isExperimentStopped } = useValues(experimentLogic) const { launchExperiment, resetRunningExperiment, @@ -307,38 +307,44 @@ export function PageHeaderCustom(): JSX.Element { )} {experiment && isExperimentRunning && (
- <> - - (exposureCohortId ? undefined : createExposureCohort())} - fullWidth - data-attr={`${exposureCohortId ? 'view' : 'create'}-exposure-cohort`} - to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} - targetBlank={!!exposureCohortId} - > - {exposureCohortId ? 'View' : 'Create'} exposure cohort - - loadExperimentResults(true)} - fullWidth - data-attr="refresh-experiment" - > - Refresh experiment results - - loadSecondaryMetricResults(true)} - fullWidth - data-attr="refresh-secondary-metrics" - > - Refresh secondary metrics - - - } - /> - - + {!isExperimentStopped && !experiment.archived && ( + <> + + + exposureCohortId ? undefined : createExposureCohort() + } + fullWidth + data-attr={`${ + exposureCohortId ? 'view' : 'create' + }-exposure-cohort`} + to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} + targetBlank={!!exposureCohortId} + > + {exposureCohortId ? 'View' : 'Create'} exposure cohort + + loadExperimentResults(true)} + fullWidth + data-attr="refresh-experiment" + > + Refresh experiment results + + loadSecondaryMetricResults(true)} + fullWidth + data-attr="refresh-secondary-metrics" + > + Refresh secondary metrics + + + } + /> + + + )} {!experiment.end_date && ( )} - {experiment?.end_date && - dayjs().isSameOrAfter(dayjs(experiment.end_date), 'day') && - !experiment.archived && ( - archiveExperiment()}> - Archive - - )} + {isExperimentStopped && ( + archiveExperiment()}> + Archive + + )}
)} @@ -376,14 +380,15 @@ export function ActionBanner(): JSX.Element { areResultsSignificant, isExperimentStopped, funnelResultsPersonsTotal, - recommendedSampleSize, actualRunningTime, - recommendedRunningTime, getHighestProbabilityVariant, } = useValues(experimentLogic) const { archiveExperiment } = useActions(experimentLogic) + const recommendedRunningTime = experiment?.parameters?.recommended_running_time || 1 + const recommendedSampleSize = experiment?.parameters?.recommended_sample_size || 100 + if (!experiment || experimentLoading || experimentResultsLoading) { return <> } diff --git a/frontend/src/scenes/experiments/experimentLogic.test.ts b/frontend/src/scenes/experiments/experimentLogic.test.ts index 8720a3c81ca4b..2b1e0f0c83a19 100644 --- a/frontend/src/scenes/experiments/experimentLogic.test.ts +++ b/frontend/src/scenes/experiments/experimentLogic.test.ts @@ -87,15 +87,13 @@ describe('experimentLogic', () => { describe('selector values', () => { it('given an mde, calculates correct sample size', async () => { - logic.actions.setExperiment({ parameters: { feature_flag_variants: [], minimum_detectable_effect: 10 } }) - await expectLogic(logic).toMatchValues({ - minimumDetectableChange: 10, + minimumDetectableChange: 1, }) - expect(logic.values.minimumSampleSizePerVariant(20)).toEqual(256) + expect(logic.values.minimumSampleSizePerVariant(20)).toEqual(25600) - expect(logic.values.minimumSampleSizePerVariant(40)).toEqual(384) + expect(logic.values.minimumSampleSizePerVariant(40)).toEqual(38400) expect(logic.values.minimumSampleSizePerVariant(0)).toEqual(0) }) @@ -113,11 +111,11 @@ describe('experimentLogic', () => { it('given control count data, calculates correct running time', async () => { // 1000 count over 14 days - expect(logic.values.recommendedExposureForCountData(1000)).toEqual(91.8) + expect(logic.values.recommendedExposureForCountData(1000)).toEqual(2251.2) // 10,000 entrants over 14 days // 10x entrants, so 1/10th running time - expect(logic.values.recommendedExposureForCountData(10000)).toEqual(9.2) + expect(logic.values.recommendedExposureForCountData(10000)).toEqual(225.1) // 0 entrants over 14 days, so infinite running time expect(logic.values.recommendedExposureForCountData(0)).toEqual(Infinity) diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 527d7dd142076..f0bff3999795f 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -12,12 +12,14 @@ import { hasFormErrors, toParams } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { ReactElement } from 'react' import { validateFeatureFlagKey } from 'scenes/feature-flags/featureFlagLogic' +import { funnelDataLogic } from 'scenes/funnels/funnelDataLogic' import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { cleanFilters, getDefaultEvent } from 'scenes/insights/utils/cleanFilters' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' import { teamLogic } from 'scenes/teamLogic' +import { trendsDataLogic } from 'scenes/trends/trendsDataLogic' import { urls } from 'scenes/urls' import { cohortsModel } from '~/models/cohortsModel' @@ -50,6 +52,7 @@ import { import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from './constants' import type { experimentLogicType } from './experimentLogicType' import { experimentsLogic } from './experimentsLogic' +import { getMinimumDetectableEffect } from './utils' const NEW_EXPERIMENT: Experiment = { id: 'new', @@ -99,6 +102,12 @@ export const experimentLogic = kea([ ['aggregationLabel', 'groupTypes', 'showGroupsOptions'], sceneLogic, ['activeScene'], + funnelDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), + ['conversionMetrics'], + trendsDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), + ['results as trendResults'], + insightDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), + ['insightDataLoading as goalInsightDataLoading'], ], actions: [ experimentsLogic, @@ -126,11 +135,7 @@ export const experimentLogic = kea([ actions({ setExperimentMissing: true, setExperiment: (experiment: Partial) => ({ experiment }), - createExperiment: (draft?: boolean, runningTime?: number, sampleSize?: number) => ({ - draft, - runningTime, - sampleSize, - }), + createExperiment: (draft?: boolean) => ({ draft }), setNewExperimentInsight: (filters?: Partial) => ({ filters }), setExperimentExposureInsight: (filters?: Partial) => ({ filters }), removeExperimentGroup: (idx: number) => ({ idx }), @@ -262,6 +267,7 @@ export const experimentLogic = kea([ setFlagImplementationWarning: (_, { warning }) => warning, }, ], + // TODO: delete with the old UI exposureAndSampleSize: [ { exposure: 0, sampleSize: 0 } as { exposure: number; sampleSize: number }, { @@ -298,7 +304,18 @@ export const experimentLogic = kea([ ], }), listeners(({ values, actions }) => ({ - createExperiment: async ({ draft, runningTime, sampleSize }) => { + createExperiment: async ({ draft }) => { + const { recommendedRunningTime, recommendedSampleSize, minimumDetectableChange } = values + + // Minimum Detectable Effect is calculated based on a loaded insight + // Terminate if the insight did not manage to load in time + if (!minimumDetectableChange) { + eventUsageLogic.actions.reportExperimentInsightLoadFailed() + return lemonToast.error( + 'Failed to load insight. Experiment cannot be saved without this value. Try changing the experiment goal.' + ) + } + let response: Experiment | null = null const isUpdate = !!values.experimentId && values.experimentId !== 'new' try { @@ -309,8 +326,9 @@ export const experimentLogic = kea([ ...values.experiment, parameters: { ...values.experiment?.parameters, - recommended_running_time: runningTime, - recommended_sample_size: sampleSize, + recommended_running_time: recommendedRunningTime, + recommended_sample_size: recommendedSampleSize, + minimum_detectable_effect: minimumDetectableChange, }, ...(!draft && { start_date: dayjs() }), // backwards compatibility: Remove any global properties set on the experiment. @@ -335,8 +353,9 @@ export const experimentLogic = kea([ ...values.experiment, parameters: { ...values.experiment?.parameters, - recommended_running_time: runningTime, - recommended_sample_size: sampleSize, + recommended_running_time: recommendedRunningTime, + recommended_sample_size: recommendedSampleSize, + minimum_detectable_effect: minimumDetectableChange, }, ...(!draft && { start_date: dayjs() }), }) @@ -396,7 +415,7 @@ export const experimentLogic = kea([ // the new query with any existing query and that causes validation problems when there are // unsupported properties in the now merged query. const newQuery = filtersToQueryNode(newInsightFilters) - if (filters?.insight === InsightType.FUNNELS) { + if (newInsightFilters?.insight === InsightType.FUNNELS) { ;(newQuery as TrendsQuery).trendsFilter = undefined } else { ;(newQuery as FunnelsQuery).funnelsFilter = undefined @@ -464,10 +483,26 @@ export const experimentLogic = kea([ values.experiment && actions.reportExperimentArchived(values.experiment) }, updateExperimentGoal: async ({ filters }) => { - // We never want to update global properties in the experiment + const { recommendedRunningTime, recommendedSampleSize, minimumDetectableChange } = values + if (!minimumDetectableChange) { + eventUsageLogic.actions.reportExperimentInsightLoadFailed() + return lemonToast.error( + 'Failed to load insight. Experiment cannot be saved without this value. Try changing the experiment goal.' + ) + } + const filtersToUpdate = { ...filters } delete filtersToUpdate.properties - actions.updateExperiment({ filters: filtersToUpdate }) + + actions.updateExperiment({ + filters: filtersToUpdate, + parameters: { + ...values.experiment?.parameters, + recommended_running_time: recommendedRunningTime, + recommended_sample_size: recommendedSampleSize, + minimum_detectable_effect: minimumDetectableChange, + }, + }) actions.closeExperimentGoalModal() }, updateExperimentExposure: async ({ filters }) => { @@ -784,10 +819,12 @@ export const experimentLogic = kea([ return (userMathValue ?? propertyMathValue) as PropertyMathType | CountPerActorMathType | undefined }, ], + // TODO: unify naming (Minimum detectable change/Minimum detectable effect/Minimum acceptable improvement) minimumDetectableChange: [ - (s) => [s.experiment], - (newexperiment): number => { - return newexperiment?.parameters?.minimum_detectable_effect || 5 + (s) => [s.experimentInsightType, s.conversionMetrics, s.trendResults], + (experimentInsightType, conversionMetrics, trendResults): number | null => { + // :KLUDGE: extracted the method due to difficulties with logic tests + return getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults) }, ], minimumSampleSizePerVariant: [ @@ -797,6 +834,10 @@ export const experimentLogic = kea([ // refer https://en.wikipedia.org/wiki/Sample_size_determination with default beta and alpha // The results are same as: https://www.evanmiller.org/ab-testing/sample-size.html // and also: https://marketing.dynamicyield.com/ab-test-duration-calculator/ + if (!mde) { + return 0 + } + return Math.ceil((1600 * conversionRate * (1 - conversionRate / 100)) / (mde * mde)) }, ], @@ -879,11 +920,32 @@ export const experimentLogic = kea([ return '' }, ], + recommendedSampleSize: [ + (s) => [s.conversionMetrics, s.minimumSampleSizePerVariant, s.variants], + (conversionMetrics, minimumSampleSizePerVariant, variants): number => { + const conversionRate = conversionMetrics.totalRate * 100 + const sampleSizePerVariant = minimumSampleSizePerVariant(conversionRate) + const sampleSize = sampleSizePerVariant * variants.length + return sampleSize + }, + ], + recommendedRunningTime: [ + (s) => [s.trendResults, s.recommendedExposureForCountData], + (trendResults, recommendedExposureForCountData): number => { + const trendCount = trendResults[0]?.count + const runningTime = recommendedExposureForCountData(trendCount) + return runningTime + }, + ], recommendedExposureForCountData: [ (s) => [s.minimumDetectableChange], (mde) => (baseCountData: number): number => { // http://www.columbia.edu/~cjd11/charles_dimaggio/DIRE/styled-4/code-12/ + if (!mde) { + return 0 + } + const minCountData = (baseCountData * mde) / 100 const lambda1 = baseCountData const lambda2 = minCountData + baseCountData @@ -1167,10 +1229,6 @@ export const experimentLogic = kea([ }) }, ], - recommendedSampleSize: [ - (s) => [s.experiment], - (experiment: Experiment): number => experiment?.parameters?.recommended_sample_size || 100, - ], funnelResultsPersonsTotal: [ (s) => [s.experimentResults, s.experimentInsightType], (experimentResults: ExperimentResults['result'], experimentInsightType: InsightType): number => { @@ -1201,12 +1259,8 @@ export const experimentLogic = kea([ return dayjs().diff(experiment.start_date, 'day') }, ], - recommendedRunningTime: [ - (s) => [s.experiment], - (experiment: Experiment): number => experiment?.parameters?.recommended_running_time || 1, - ], }), - forms(({ actions, values }) => ({ + forms(({ actions }) => ({ experiment: { options: { showErrorsOnTouch: true }, defaults: { ...NEW_EXPERIMENT } as Experiment, @@ -1221,10 +1275,7 @@ export const experimentLogic = kea([ })), }, }), - submit: () => { - const { exposure, sampleSize } = values.exposureAndSampleSize - actions.createExperiment(true, exposure, sampleSize) - }, + submit: () => actions.createExperiment(true), }, })), urlToAction(({ actions, values }) => ({ diff --git a/frontend/src/scenes/experiments/utils.test.ts b/frontend/src/scenes/experiments/utils.test.ts new file mode 100644 index 0000000000000..05b5d271203e2 --- /dev/null +++ b/frontend/src/scenes/experiments/utils.test.ts @@ -0,0 +1,95 @@ +import { EntityType, InsightType } from '~/types' + +import { getMinimumDetectableEffect } from './utils' + +describe('utils', () => { + it('Funnel experiment returns correct MDE', async () => { + const experimentInsightType = InsightType.FUNNELS + const trendResults = [ + { + action: { + id: '$pageview', + type: 'events' as EntityType, + order: 0, + name: '$pageview', + custom_name: null, + math: 'total', + math_group_type_index: null, + }, + aggregated_value: 0, + label: '$pageview', + count: 0, + data: [], + labels: [], + days: [], + }, + ] + + let conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 1 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.01 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.99 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(1) + + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.1 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.9 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.3 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(3) + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.7 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(3) + + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.2 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(4) + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.8 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(4) + + conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0.5 } + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + }) + + it('Trend experiment returns correct MDE', async () => { + const experimentInsightType = InsightType.TRENDS + const conversionMetrics = { averageTime: 0, stepRate: 0, totalRate: 0 } + const trendResults = [ + { + action: { + id: '$pageview', + type: 'events' as EntityType, + order: 0, + name: '$pageview', + custom_name: null, + math: 'total', + math_group_type_index: null, + }, + aggregated_value: 0, + label: '$pageview', + count: 0, + data: [], + labels: [], + days: [], + }, + ] + + trendResults[0].count = 0 + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(100) + + trendResults[0].count = 200 + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(100) + + trendResults[0].count = 201 + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(20) + + trendResults[0].count = 1001 + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + + trendResults[0].count = 20000 + expect(getMinimumDetectableEffect(experimentInsightType, conversionMetrics, trendResults)).toEqual(5) + }) +}) diff --git a/frontend/src/scenes/experiments/utils.ts b/frontend/src/scenes/experiments/utils.ts index 6f71d6c1829b2..9d6a1fdc88dcc 100644 --- a/frontend/src/scenes/experiments/utils.ts +++ b/frontend/src/scenes/experiments/utils.ts @@ -1,7 +1,14 @@ import { getSeriesColor } from 'lib/colors' import { FunnelLayout } from 'lib/constants' -import { ChartDisplayType, FilterType, FunnelVizType, InsightType } from '~/types' +import { + ChartDisplayType, + FilterType, + FunnelTimeConversionMetrics, + FunnelVizType, + InsightType, + TrendResult, +} from '~/types' export function getExperimentInsightColour(variantIndex: number | null): string { return variantIndex !== null ? getSeriesColor(variantIndex) : 'var(--muted-3000)' @@ -21,3 +28,61 @@ export const transformResultFilters = (filters: Partial): Partial perform a flip above 50% + if (currentConversionRate > 50) { + currentConversionRate = 100 - currentConversionRate + } + + // Multiplication would result in 0; return MDE = 1 + if (currentConversionRate === 0) { + return 1 + } + + // CR = 50% requires a high running time + // CR = 1% or 99% requires a low running time + const midpointDistance = Math.abs(50 - currentConversionRate) + + let targetConversionRateIncrease + if (midpointDistance <= 20) { + targetConversionRateIncrease = 0.1 + } else if (midpointDistance <= 35) { + targetConversionRateIncrease = 0.2 + } else { + targetConversionRateIncrease = 0.5 + } + + const targetConversionRate = Math.round(currentConversionRate * (1 + targetConversionRateIncrease)) + const mde = Math.ceil(targetConversionRate - currentConversionRate) + + return mde || 5 + } + + // TRENDS + // Given current count of the Trend metric, what percentage increase are we targeting? + if (trendResults[0]?.count === undefined) { + return null + } + + const baselineCount = trendResults[0].count + + if (baselineCount <= 200) { + return 100 + } else if (baselineCount <= 1000) { + return 20 + } + return 5 +} diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 61ebd51c745b1..c6affd75911ee 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -101,7 +101,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { deleteFeatureFlag, editFeatureFlag, loadFeatureFlag, - triggerFeatureFlagUpdate, + saveFeatureFlag, createStaticCohort, setFeatureFlagFilters, setActiveTab, @@ -384,7 +384,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { person is identified. This ensures the experience for the anonymous person is carried forward to the authenticated person.{' '} Learn more @@ -485,28 +485,45 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { href: urls.featureFlag(id), }} caption={ - <> - {featureFlag.name || Description (optional)} - {featureFlag?.tags && ( - <> - {featureFlag.can_edit ? ( - { - // TODO: Use an existing function instead of this new one for updates - triggerFeatureFlagUpdate({ tags }) - }} - tagsAvailable={tags.filter( - (tag) => !featureFlag.tags?.includes(tag) - )} - className="mt-2" - /> - ) : featureFlag.tags.length ? ( - - ) : null} - - )} - +
+
+
+
+ Key:{' '} + + {featureFlag.key} + +
+
+
+ {featureFlag?.tags && ( + <> + {featureFlag.tags.length > 0 ? ( + Tags: + ) : null}{' '} + {featureFlag.can_edit ? ( + { + saveFeatureFlag({ tags }) + }} + tagsAvailable={tags.filter( + (tag) => !featureFlag.tags?.includes(tag) + )} + /> + ) : featureFlag.tags.length > 0 ? ( + + ) : null} + + )} +
+
+
{featureFlag.name || Description (optional)}
+
} buttons={ <> diff --git a/frontend/src/scenes/feature-flags/FeatureFlagSchedule.tsx b/frontend/src/scenes/feature-flags/FeatureFlagSchedule.tsx index a49512e672d88..b9d5d4260ff83 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagSchedule.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagSchedule.tsx @@ -1,5 +1,6 @@ import { LemonButton, + LemonCalendarSelectInput, LemonCheckbox, LemonDivider, LemonSelect, @@ -10,7 +11,6 @@ import { LemonTagType, } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { DatePicker } from 'lib/components/DatePicker' import { dayjs } from 'lib/dayjs' import { More } from 'lib/lemon-ui/LemonButton/More' import { atColumn, createdAtColumn, createdByColumn } from 'lib/lemon-ui/LemonTable/columnUtils' @@ -158,21 +158,14 @@ export default function FeatureFlagSchedule(): JSX.Element { ]} />
-
+
Date and time
- { - const now = new Date() - return dateMarker.toDate().getTime() < now.getTime() - }} + setScheduleDateMarker(value)} - className="h-10 w-60" - allowClear={false} + placeholder="Select date" + onlyAllowUpcoming showTime - showSecond={false} - format={DAYJS_FORMAT} - showNow={false} />
diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index c4e6842aff7e5..291656e9f4bc9 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -3,7 +3,6 @@ import { DeepPartialMap, forms, ValidationErrorType } from 'kea-forms' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' import api from 'lib/api' -import { convertPropertyGroupToProperties } from 'lib/components/PropertyFilters/utils' import { dayjs } from 'lib/dayjs' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' @@ -23,7 +22,6 @@ import { userLogic } from 'scenes/userLogic' import { groupsModel } from '~/models/groupsModel' import { - AnyPropertyFilter, AvailableFeature, Breadcrumb, CohortType, @@ -196,6 +194,8 @@ export const featureFlagLogic = kea([ actions: [ newDashboardLogic({ featureFlagId: typeof props.id === 'number' ? props.id : undefined }), ['submitNewDashboardSuccessWithResult'], + featureFlagsLogic, + ['updateFlag', 'deleteFlag'], ], })), actions({ @@ -216,7 +216,6 @@ export const featureFlagLogic = kea([ loadInsightAtIndex: (index: number, filters: Partial) => ({ index, filters }), setInsightResultAtIndex: (index: number, average: number) => ({ index, average }), loadAllInsightsForFlag: true, - triggerFeatureFlagUpdate: (payload: Partial) => ({ payload }), generateUsageDashboard: true, enrichUsageDashboard: true, setCopyDestinationProject: (id: number | null) => ({ id }), @@ -259,21 +258,6 @@ export const featureFlagLogic = kea([ { ...NEW_FLAG } as FeatureFlagType, { setFeatureFlag: (_, { featureFlag }) => { - if (featureFlag.filters.groups) { - // TODO: This propertygroup conversion is non-sensical, don't need it here. - const groups = featureFlag.filters.groups.map((group) => { - if (group.properties) { - return { - ...group, - properties: convertPropertyGroupToProperties( - group.properties - ) as AnyPropertyFilter[], - } - } - return group - }) - return { ...featureFlag, filters: { ...featureFlag?.filters, groups } } - } return featureFlag }, setFeatureFlagFilters: (state, { filters }) => { @@ -673,7 +657,7 @@ export const featureFlagLogic = kea([ }, saveFeatureFlagSuccess: ({ featureFlag }) => { lemonToast.success('Feature flag saved') - featureFlagsLogic.findMounted()?.actions.updateFlag(featureFlag) + actions.updateFlag(featureFlag) featureFlag.id && router.actions.replace(urls.featureFlag(featureFlag.id)) actions.editFeatureFlag(false) }, @@ -682,8 +666,7 @@ export const featureFlagLogic = kea([ endpoint: `projects/${values.currentTeamId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: () => { - featureFlag.id && featureFlagsLogic.findMounted()?.actions.deleteFlag(featureFlag.id) - featureFlagsLogic.findMounted()?.actions.loadFeatureFlags() + featureFlag.id && actions.deleteFlag(featureFlag.id) router.actions.push(urls.featureFlags()) }, }) @@ -725,16 +708,6 @@ export const featureFlagLogic = kea([ values.featureFlag.rollback_conditions[index].threshold_metric as FilterType ) }, - triggerFeatureFlagUpdate: async ({ payload }) => { - if (values.featureFlag) { - const updatedFlag = await api.update( - `api/projects/${values.currentTeamId}/feature_flags/${values.featureFlag.id}`, - payload - ) - actions.setFeatureFlag(updatedFlag) - featureFlagsLogic.findMounted()?.actions.updateFlag(updatedFlag) - } - }, copyFlagSuccess: ({ featureFlagCopy }) => { if (featureFlagCopy?.success.length) { const operation = values.projectsWithCurrentFlag.find( @@ -897,9 +870,8 @@ export const featureFlagLogic = kea([ }, ], } - } else { - return defaultEntityFilterOnFlag } + return defaultEntityFilterOnFlag }, ], hasEarlyAccessFeatures: [ @@ -953,13 +925,12 @@ export const featureFlagLogic = kea([ afterMount(({ props, actions }) => { const foundFlag = featureFlagsLogic.findMounted()?.values.featureFlags.find((flag) => flag.id === props.id) if (foundFlag) { - const formatPayloads = variantKeyToIndexFeatureFlagPayloads(foundFlag) - actions.setFeatureFlag(formatPayloads) + const formatPayloadsWithFlag = variantKeyToIndexFeatureFlagPayloads(foundFlag) + actions.setFeatureFlag(formatPayloadsWithFlag) actions.loadRelatedInsights() actions.loadAllInsightsForFlag() } else if (props.id !== 'new') { actions.loadFeatureFlag() } - actions.loadSentryStats() }), ]) diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index 6773878fd071b..709c08f803f41 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -67,9 +67,8 @@ export const featureFlagsLogic = kea([ updateFlag: (state, { flag }) => { if (state.find(({ id }) => id === flag.id)) { return state.map((stateFlag) => (stateFlag.id === flag.id ? flag : stateFlag)) - } else { - return [flag, ...state] } + return [flag, ...state] }, deleteFlag: (state, { id }) => state.filter((flag) => flag.id !== id), }, diff --git a/frontend/src/scenes/funnels/funnelCorrelationLogic.ts b/frontend/src/scenes/funnels/funnelCorrelationLogic.ts index d76e569d1f2d6..0f8441a5733ea 100644 --- a/frontend/src/scenes/funnels/funnelCorrelationLogic.ts +++ b/frontend/src/scenes/funnels/funnelCorrelationLogic.ts @@ -64,21 +64,20 @@ export const funnelCorrelationLogic = kea([ result_type: FunnelCorrelationResultsType.Events, })) as FunnelCorrelation[], } - } else { - const results: Omit[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'events', - funnel_correlation_exclude_event_names: values.excludedEventNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'events', + funnel_correlation_exclude_event_names: values.excludedEventNames, + }) + ).result?.events - return { - events: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.Events, - })), - } + return { + events: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.Events, + })), } } catch (error) { lemonToast.error('Failed to load correlation results', { toastId: 'funnel-correlation-error' }) @@ -110,22 +109,21 @@ export const funnelCorrelationLogic = kea([ result_type: FunnelCorrelationResultsType.EventWithProperties, })) as FunnelCorrelation[], } - } else { - const results: Omit[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'event_with_properties', - funnel_correlation_event_names: [eventName], - funnel_correlation_event_exclude_property_names: values.excludedEventPropertyNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'event_with_properties', + funnel_correlation_event_names: [eventName], + funnel_correlation_event_exclude_property_names: values.excludedEventPropertyNames, + }) + ).result?.events - return { - [eventName]: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.EventWithProperties, - })), - } + return { + [eventName]: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.EventWithProperties, + })), } }, }, diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts index 3080f0db0560f..555cfd116775f 100644 --- a/frontend/src/scenes/funnels/funnelDataLogic.ts +++ b/frontend/src/scenes/funnels/funnelDataLogic.ts @@ -158,9 +158,8 @@ export const funnelDataLogic = kea([ ) } return insightData.result - } else { - return [] } + return [] }, ], steps: [ @@ -176,9 +175,8 @@ export const funnelDataLogic = kea([ return aggregateBreakdownResult(results, breakdownProperty).sort((a, b) => a.order - b.order) } return results.sort((a, b) => a.order - b.order) - } else { - return [] } + return [] }, ], stepsWithConversionMetrics: [ @@ -270,9 +268,8 @@ export const funnelDataLogic = kea([ return (histogramGraphData?.length ?? 0) > 0 } else if (funnelsFilter.funnelVizType === FunnelVizType.Trends) { return (steps?.length ?? 0) > 0 && !!steps?.[0]?.labels - } else { - return false } + return false }, ], numericBinCount: [ @@ -352,9 +349,8 @@ export const funnelDataLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - steps[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts index a12aa0ca2ec08..7a9be00e15782 100644 --- a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts +++ b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts @@ -94,22 +94,21 @@ export const funnelPropertyCorrelationLogic = kea[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'properties', - funnel_correlation_names: targetProperties, - funnel_correlation_exclude_names: values.excludedPropertyNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'properties', + funnel_correlation_names: targetProperties, + funnel_correlation_exclude_names: values.excludedPropertyNames, + }) + ).result?.events - return { - events: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.Properties, - })), - } + return { + events: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.Properties, + })), } } catch (error) { lemonToast.error('Failed to load correlation results', { toastId: 'funnel-correlation-error' }) diff --git a/frontend/src/scenes/funnels/funnelUtils.ts b/frontend/src/scenes/funnels/funnelUtils.ts index fc3c5cad3fc8a..1bcd3569983a0 100644 --- a/frontend/src/scenes/funnels/funnelUtils.ts +++ b/frontend/src/scenes/funnels/funnelUtils.ts @@ -468,11 +468,10 @@ export const parseBreakdownValue = ( const components = item.split('::') if (components.length === 1) { return { breakdown: components[0], breakdown_value: '' } - } else { - return { - breakdown: components[0], - breakdown_value: components[1], - } + } + return { + breakdown: components[0], + breakdown_value: components[1], } } @@ -502,18 +501,17 @@ export const parseEventAndProperty = ( value: [propertyValue as string], })), } - } else { - return { - name: components[0], - properties: [ - { - key: components[1], - operator: PropertyOperator.Exact, - value: components[2], - type: PropertyFilterType.Event, - }, - ], - } + } + return { + name: components[0], + properties: [ + { + key: components[1], + operator: PropertyOperator.Exact, + value: components[2], + type: PropertyFilterType.Event, + }, + ], } } @@ -538,11 +536,10 @@ export const parseDisplayNameForCorrelation = ( event: '$autocapture', }) return { first_value, second_value } - } else { - // FunnelCorrelationResultsType.EventWithProperties - // Events here come in the form of event::property::value - return { first_value: values[1], second_value: values[2] } } + // FunnelCorrelationResultsType.EventWithProperties + // Events here come in the form of event::property::value + return { first_value: values[1], second_value: values[2] } } export const appendToCorrelationConfig = ( diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 533a0a87fa1f8..2c24bf32f016e 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -21,7 +21,7 @@ import { urls } from 'scenes/urls' import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { seriesToActionsAndEvents } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery } from '~/queries/schema' +import { FunnelsQuery, Node } from '~/queries/schema' import { FilterType, InsightLogicProps, SavedInsightsTabs } from '~/types' import { samplingFilterLogic } from '../EditorFilters/samplingFilterLogic' @@ -139,10 +139,11 @@ export function InsightTimeoutState({ export interface InsightErrorStateProps { excludeDetail?: boolean title?: string + query?: Record | Node | null queryId?: string | null } -export function InsightErrorState({ excludeDetail, title, queryId }: InsightErrorStateProps): JSX.Element { +export function InsightErrorState({ excludeDetail, title, query, queryId }: InsightErrorStateProps): JSX.Element { const { preflight } = useValues(preflightLogic) const { openSupportForm } = useActions(supportLogic) @@ -181,6 +182,18 @@ export function InsightErrorState({ excludeDetail, title, queryId }: InsightErro
)} {queryId ?
Query ID: {queryId}
: null} + {query && ( + + Open in query debugger + + )}
) @@ -243,7 +256,13 @@ export function FunnelSingleStepState({ actionable = true }: FunnelSingleStepSta ) } -export function InsightValidationError({ detail }: { detail: string }): JSX.Element { +export function InsightValidationError({ + detail, + query, +}: { + detail: string + query?: Record | null +}): JSX.Element { return (
@@ -256,6 +275,19 @@ export function InsightValidationError({ detail }: { detail: string }): JSX.Elem {/* but rather that it's something with the definition of the query itself */}

{detail}

+ {query ? ( +

+ + Open in query debugger + +

+ ) : null} {detail.includes('Exclusion') && (
{ diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx index 0cb3eaeb086b3..f544e601e506d 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx @@ -390,7 +390,7 @@ export function ActionFilterRow({ groupTypes={[ TaxonomicFilterGroupType.DataWarehouseProperties, TaxonomicFilterGroupType.NumericalEventProperties, - TaxonomicFilterGroupType.Sessions, + TaxonomicFilterGroupType.SessionProperties, ]} schemaColumns={ filter.type == TaxonomicFilterGroupType.DataWarehouse && filter.name diff --git a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts index 9614c49c7542a..65864c06b8054 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts +++ b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts @@ -2,6 +2,7 @@ import { actions, connect, events, kea, key, listeners, path, props, reducers, s import { convertPropertyGroupToProperties } from 'lib/components/PropertyFilters/utils' import { uuid } from 'lib/utils' import { eventUsageLogic, GraphSeriesAddedSource } from 'lib/utils/eventUsageLogic' +import { getDefaultEventLabel, getDefaultEventName } from 'lib/utils/getAppContext' import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { @@ -220,18 +221,17 @@ export const entityFilterLogic = kea([ : distinct_id_field, table_name: typeof table_name === 'undefined' ? filter.table_name : table_name, } - } else { - delete filter.id_field - delete filter.timestamp_field - delete filter.distinct_id_field - delete filter.table_name - return { - ...filter, - id: typeof id === 'undefined' ? filter.id : id, - name: typeof name === 'undefined' ? filter.name : name, - type: typeof type === 'undefined' ? filter.type : type, - custom_name: typeof custom_name === 'undefined' ? filter.custom_name : custom_name, - } + } + delete filter.id_field + delete filter.timestamp_field + delete filter.distinct_id_field + delete filter.table_name + return { + ...filter, + id: typeof id === 'undefined' ? filter.id : id, + name: typeof name === 'undefined' ? filter.name : name, + type: typeof type === 'undefined' ? filter.type : type, + custom_name: typeof custom_name === 'undefined' ? filter.custom_name : custom_name, } } @@ -261,8 +261,9 @@ export const entityFilterLogic = kea([ const newLength = previousLength + 1 const precedingEntity = values.localFilters[previousLength - 1] as LocalFilter | undefined const order = precedingEntity ? precedingEntity.order + 1 : 0 - const newFilter = { - id: null, + const newFilter: LocalFilter = { + id: getDefaultEventName(), + name: getDefaultEventLabel(), uuid: uuid(), type: EntityTypes.EVENTS, order: order, diff --git a/frontend/src/scenes/insights/filters/BreakdownFilter/TaxonomicBreakdownPopover.tsx b/frontend/src/scenes/insights/filters/BreakdownFilter/TaxonomicBreakdownPopover.tsx index abbcb7020c116..9a8df4a8c392f 100644 --- a/frontend/src/scenes/insights/filters/BreakdownFilter/TaxonomicBreakdownPopover.tsx +++ b/frontend/src/scenes/insights/filters/BreakdownFilter/TaxonomicBreakdownPopover.tsx @@ -28,7 +28,7 @@ export const TaxonomicBreakdownPopover = ({ open, setOpen, children }: Taxonomic TaxonomicFilterGroupType.EventFeatureFlags, ...groupsTaxonomicTypes, TaxonomicFilterGroupType.CohortsWithAllUsers, - ...(includeSessions ? [TaxonomicFilterGroupType.Sessions] : []), + ...(includeSessions ? [TaxonomicFilterGroupType.SessionProperties] : []), TaxonomicFilterGroupType.HogQLExpression, TaxonomicFilterGroupType.DataWarehouseProperties, ] diff --git a/frontend/src/scenes/insights/insightLogic.ts b/frontend/src/scenes/insights/insightLogic.ts index 9b3b87114a0ef..cd6b4b6ac88e5 100644 --- a/frontend/src/scenes/insights/insightLogic.ts +++ b/frontend/src/scenes/insights/insightLogic.ts @@ -290,9 +290,8 @@ export const insightLogic = kea([ targetDashboards.includes(props.dashboardId) if (updateIsForThisDashboard) { return { ...state, ...item } - } else { - return state } + return state }, [insightsModel.actionTypes.renameInsightSuccess]: (state, { item }) => { if (item.id === state.id) { @@ -303,9 +302,8 @@ export const insightLogic = kea([ [insightsModel.actionTypes.insightsAddedToDashboard]: (state, { dashboardId, insightIds }) => { if (insightIds.includes(state.id)) { return { ...state, dashboards: [...(state.dashboards || []), dashboardId] } - } else { - return state } + return state }, [dashboardsModel.actionTypes.tileRemovedFromDashboard]: (state, { tile, dashboardId }) => { if (tile.insight?.id === state.id) { @@ -480,37 +478,35 @@ export const insightLogic = kea([ if (insight.query) { return { ...queryExportContext(insight.query, undefined, undefined, false), filename } - } else { - if (isTrendsFilter(filters) || isStickinessFilter(filters) || isLifecycleFilter(filters)) { - return { - path: `api/projects/${currentTeamId}/insights/trend/?${toParams( - filterTrendsClientSideParams(params) - )}`, - filename, - } - } else if (isRetentionFilter(filters)) { - return { - filename, - path: `api/projects/${currentTeamId}/insights/retention/?${toParams(params)}`, - } - } else if (isFunnelsFilter(filters)) { - return { - filename, - method: 'POST', - path: `api/projects/${currentTeamId}/insights/funnel`, - body: params, - } - } else if (isPathsFilter(filters)) { - return { - filename, - method: 'POST', - path: `api/projects/${currentTeamId}/insights/path`, - body: params, - } - } else { - return null + } + if (isTrendsFilter(filters) || isStickinessFilter(filters) || isLifecycleFilter(filters)) { + return { + path: `api/projects/${currentTeamId}/insights/trend/?${toParams( + filterTrendsClientSideParams(params) + )}`, + filename, + } + } else if (isRetentionFilter(filters)) { + return { + filename, + path: `api/projects/${currentTeamId}/insights/retention/?${toParams(params)}`, + } + } else if (isFunnelsFilter(filters)) { + return { + filename, + method: 'POST', + path: `api/projects/${currentTeamId}/insights/funnel`, + body: params, + } + } else if (isPathsFilter(filters)) { + return { + filename, + method: 'POST', + path: `api/projects/${currentTeamId}/insights/path`, + body: params, } } + return null }, ], isUsingSessionAnalysis: [ diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index 250ac40172add..4a4d94938e305 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -151,9 +151,8 @@ export const insightVizDataLogic = kea([ return !NON_VALUES_ON_SERIES_DISPLAY_TYPES.includes(display || ChartDisplayType.ActionsLineGraph) } else if (isLifecycle) { return true - } else { - return false } + return false }, ], diff --git a/frontend/src/scenes/insights/summarizeInsight.ts b/frontend/src/scenes/insights/summarizeInsight.ts index 31a498815b2a0..f14ec09dfd36a 100644 --- a/frontend/src/scenes/insights/summarizeInsight.ts +++ b/frontend/src/scenes/insights/summarizeInsight.ts @@ -60,22 +60,21 @@ function summarizeBreakdown(filters: Partial | BreakdownFilter, cont : `ID ${cohortId}`) ) .join(', ')}` - } else { - const noun = - breakdown_type !== 'group' - ? breakdown_type - : context.aggregationLabel(breakdown_group_type_index, true).singular - const propertyLabel = - typeof breakdown === 'string' && - breakdown_type && - breakdown_type in PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE - ? getCoreFilterDefinition( - breakdown, - PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE[breakdown_type] - )?.label || breakdown - : breakdown - return `${noun}'s ${propertyLabel}` } + const noun = + breakdown_type !== 'group' + ? breakdown_type + : context.aggregationLabel(breakdown_group_type_index, true).singular + const propertyLabel = + typeof breakdown === 'string' && + breakdown_type && + breakdown_type in PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE + ? getCoreFilterDefinition( + breakdown, + PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE[breakdown_type] + )?.label || breakdown + : breakdown + return `${noun}'s ${propertyLabel}` } return null } @@ -295,9 +294,8 @@ export function summarizeInsightQuery(query: InsightQueryNode, context: SummaryC ) } else if (isLifecycleQuery(query)) { return `User lifecycle based on ${getDisplayNameFromEntityNode(query.series[0])}` - } else { - return '' } + return '' } function summarizeQuery(query: Node): string { diff --git a/frontend/src/scenes/insights/utils/cleanFilters.ts b/frontend/src/scenes/insights/utils/cleanFilters.ts index 879b7851c5a1a..3712017321ea5 100644 --- a/frontend/src/scenes/insights/utils/cleanFilters.ts +++ b/frontend/src/scenes/insights/utils/cleanFilters.ts @@ -259,9 +259,8 @@ export function autocorrectInterval(filters: Partial): IntervalTy return 'hour' } else if (hour_disabled) { return 'day' - } else { - return filters.interval } + return filters.interval } export function cleanFilters( diff --git a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx index 98d1be89d540e..bc74a14c89d09 100644 --- a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx @@ -584,6 +584,8 @@ export function LineGraph_({ }, } + const truncateRows = !inSurveyView && !!insightProps.dashboardId + if (type === GraphType.Bar) { if (hideXAxis || hideYAxis) { options.layout = { padding: 20 } @@ -674,21 +676,22 @@ export function LineGraph_({ y: { display: true, beforeFit: (scale) => { - if (inSurveyView) { - scale.ticks = scale.ticks.map((tick) => { - if (typeof tick.label === 'string') { - return { ...tick, label: truncateString(tick.label, 50) } - } - return tick - }) - - const ROW_HEIGHT = 60 - const dynamicHeight = scale.ticks.length * ROW_HEIGHT - const height = dynamicHeight - const parentNode: any = scale.chart?.canvas?.parentNode - parentNode.style.height = `${height}px` - } else { - // display only as many bars, as we can fit labels + scale.ticks = scale.ticks.map((tick) => { + if (typeof tick.label === 'string') { + return { ...tick, label: truncateString(tick.label, 50) } + } + return tick + }) + + const ROW_HEIGHT = 20 + const height = scale.ticks.length * ROW_HEIGHT + const parentNode: any = scale.chart?.canvas?.parentNode + parentNode.style.height = `${height}px` + + if (truncateRows) { + // Display only as many bars, as we can fit labels + // Important: Make sure the query result does not deliver more data than we can display + // See apply_dashboard_filters function in query runners scale.max = scale.ticks.length } }, @@ -696,7 +699,8 @@ export function LineGraph_({ ticks: { ...tickOptions, precision, - autoSkip: true, + stepSize: !truncateRows ? 1 : undefined, + autoSkip: !truncateRows ? false : undefined, callback: function _renderYLabel(_, i) { const d = datasets?.[0] if (!d) { @@ -738,10 +742,7 @@ export function LineGraph_({ }, [datasets, hiddenLegendKeys, isDarkModeOn, trendsFilter, formula, showValueOnSeries, showPercentStackView]) return ( -
+
{showAnnotations && myLineChart && chartWidth && chartHeight ? ( ([ registerNodeLogic: (state, { nodeId, nodeLogic }) => { if (nodeId === null) { return state - } else { - return { - ...state, - [nodeId]: nodeLogic, - } + } + return { + ...state, + [nodeId]: nodeLogic, } }, unregisterNodeLogic: (state, { nodeId }) => { @@ -290,9 +289,8 @@ export const notebookLogic = kea([ if (error.code === 'conflict') { actions.showConflictWarning() return null - } else { - throw error } + throw error } }, renameNotebook: async ({ title }) => { diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index adcd6d3476b4d..aac4f4f81efd3 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -1,5 +1,6 @@ import { useActions, useValues } from 'kea' import { FEATURE_FLAGS, SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect, useState } from 'react' import { AndroidInstructions } from 'scenes/onboarding/sdks/session-replay' @@ -81,6 +82,8 @@ const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Ele const ProductAnalyticsOnboarding = (): JSX.Element => { const { currentTeam } = useValues(teamLogic) + const heatmapsEnabled = useFeatureFlag('TOOLBAR_HEATMAPS') + return ( { type: 'toggle', inverseToggle: true, }, + + heatmapsEnabled + ? { + title: 'Enable heatmaps', + description: `If you use our JavaScript libraries, we can capture general clicks, mouse movements, + and scrolling to create heatmaps. + No additional events are created, and you can disable this at any time.`, + teamProperty: 'heatmaps_opt_in', + value: currentTeam?.heatmaps_opt_in ?? true, + type: 'toggle', + } + : undefined, ]} /> diff --git a/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx b/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx index 8698b6bf64dbd..9cee16d3a1c60 100644 --- a/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx +++ b/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx @@ -44,7 +44,7 @@ export const OnboardingProductConfiguration = ({ options, }: { stepKey?: OnboardingStepKey - options: ProductConfigOption[] + options: (ProductConfigOption | undefined)[] }): JSX.Element | null => { const { configOptions } = useValues(onboardingProductConfigurationLogic) const { defaultEnabledPlugins } = useValues(pluginsLogic) @@ -58,7 +58,7 @@ export const OnboardingProductConfiguration = ({ }, [configOptions]) useEffect(() => { - setConfigOptions(options) + setConfigOptions(options.filter((option): option is ProductConfigOption => !!option)) }, []) const combinedList: ConfigOption[] = [ diff --git a/frontend/src/scenes/onboarding/onboardingProductConfigurationLogic.ts b/frontend/src/scenes/onboarding/onboardingProductConfigurationLogic.ts index 09ac660553083..a3b676864bd76 100644 --- a/frontend/src/scenes/onboarding/onboardingProductConfigurationLogic.ts +++ b/frontend/src/scenes/onboarding/onboardingProductConfigurationLogic.ts @@ -2,12 +2,14 @@ import { LemonSelectOptions } from '@posthog/lemon-ui' import { actions, connect, kea, listeners, path, reducers } from 'kea' import { teamLogic } from 'scenes/teamLogic' +import { TeamType } from '~/types' + import type { onboardingProductConfigurationLogicType } from './onboardingProductConfigurationLogicType' export interface ProductConfigOptionBase { title: string description: string - teamProperty: string + teamProperty: keyof TeamType } export interface ProductConfigurationToggle extends ProductConfigOptionBase { diff --git a/frontend/src/scenes/paths/Paths.tsx b/frontend/src/scenes/paths/Paths.tsx index 933f2825567f9..bb40f2c1701de 100644 --- a/frontend/src/scenes/paths/Paths.tsx +++ b/frontend/src/scenes/paths/Paths.tsx @@ -25,7 +25,7 @@ export function Paths(): JSX.Element { const [nodeCards, setNodeCards] = useState([]) const { insight, insightProps } = useValues(insightLogic) - const { paths, pathsFilter, funnelPathsFilter, insightDataLoading, insightDataError } = useValues( + const { insightQuery, paths, pathsFilter, funnelPathsFilter, insightDataLoading, insightDataError } = useValues( pathsDataLogic(insightProps) ) @@ -51,7 +51,7 @@ export function Paths(): JSX.Element { }, [paths, !insightDataLoading, canvasWidth, canvasHeight]) if (insightDataError) { - return + return } return ( diff --git a/frontend/src/scenes/persons/PersonPreview.tsx b/frontend/src/scenes/persons/PersonPreview.tsx index 2c874726304c7..f0fe0d838179e 100644 --- a/frontend/src/scenes/persons/PersonPreview.tsx +++ b/frontend/src/scenes/persons/PersonPreview.tsx @@ -28,9 +28,18 @@ export function PersonPreview(props: PersonPreviewProps): JSX.Element | null { return } - // NOTE: This should pretty much never happen, but it's here just in case + // NOTE: This can happen if the Person was deleted or the events associated with the distinct_id had person processing disabled if (!person) { - return <>Not found + return ( +
+

Person profile not found

+

+ The Person may have been deleted. +
+ Alternatively, the events for this user may have had Person Profiles disabled. +

+
+ ) } const display = asDisplay(person) diff --git a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx index 150ed59840cc7..90b9233ce6603 100644 --- a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx +++ b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx @@ -154,7 +154,7 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element } } }} - value="all" + value={filters.type || 'all'} dropdownMaxContentWidth /> @@ -180,7 +180,7 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element } } }} - value="all" + value={filters.reason || 'all'} dropdownMaxContentWidth /> @@ -206,7 +206,7 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element { label: 'Disabled', value: 'false' }, ] as { label: string; value: string }[] } - value="all" + value={filters.active || 'all'} dropdownMaxContentWidth />
diff --git a/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts b/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts index 7f60dfda38c67..08e27edffb5c5 100644 --- a/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts +++ b/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts @@ -88,7 +88,9 @@ export const relatedFeatureFlagsLogic = kea([ (selectors) => [selectors.relatedFeatureFlags, selectors.featureFlags], (relatedFlags, featureFlags): RelatedFeatureFlag[] => { if (relatedFlags && featureFlags) { - return featureFlags.map((flag) => ({ ...relatedFlags[flag.key], ...flag })) + return featureFlags + .map((flag) => ({ ...relatedFlags[flag.key], ...flag })) + .filter((flag) => flag.evaluation !== undefined) } return [] }, diff --git a/frontend/src/scenes/pipeline/Destinations.tsx b/frontend/src/scenes/pipeline/Destinations.tsx index a96716246a70a..525a5418da8a2 100644 --- a/frontend/src/scenes/pipeline/Destinations.tsx +++ b/frontend/src/scenes/pipeline/Destinations.tsx @@ -1,4 +1,4 @@ -import { LemonTable, LemonTableColumn, LemonTag, lemonToast, Tooltip } from '@posthog/lemon-ui' +import { LemonTable, LemonTableColumn, LemonTag, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' @@ -8,7 +8,6 @@ import { LemonMenuOverlay } from 'lib/lemon-ui/LemonMenu/LemonMenu' import { updatedAtColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { urls } from 'scenes/urls' import { AvailableFeature, PipelineNodeTab, PipelineStage, ProductKey } from '~/types' @@ -17,7 +16,7 @@ import { AppMetricSparkLine } from './AppMetricSparkLine' import { pipelineDestinationsLogic } from './destinationsLogic' import { NewButton } from './NewButton' import { pipelineLogic } from './pipelineLogic' -import { Destination, PipelineBackend } from './types' +import { Destination } from './types' import { pipelineNodeMenuCommonItems, RenderApp, RenderBatchExportIcon } from './utils' export function Destinations(): JSX.Element { @@ -146,16 +145,16 @@ export const DestinationMoreOverlay = ({ inOverview?: boolean }): JSX.Element => { const { canConfigurePlugins, canEnableNewDestinations } = useValues(pipelineLogic) - const { toggleEnabled, loadPluginConfigs } = useActions(pipelineDestinationsLogic) + const { toggleNode, deleteNode } = useActions(pipelineDestinationsLogic) return ( toggleEnabled(destination, !destination.enabled), + onClick: () => toggleNode(destination, !destination.enabled), disabledReason: !canConfigurePlugins - ? 'You do not have permission to enable/disable destinations.' + ? 'You do not have permission to toggle destinations.' : !canEnableNewDestinations && !destination.enabled ? 'Data pipelines add-on is required for enabling new destinations' : undefined, @@ -165,22 +164,8 @@ export const DestinationMoreOverlay = ({ ? [ { label: 'Delete destination', - onClick: () => { - if (destination.backend === PipelineBackend.Plugin) { - void deleteWithUndo({ - endpoint: `plugin_config`, // TODO: Batch exports too - object: { - id: destination.id, - name: destination.name, - }, - callback: loadPluginConfigs, - }) - } else { - lemonToast.warning( - 'Deleting batch export destinations is not yet supported here.' - ) - } - }, + status: 'danger' as const, // for typechecker happiness + onClick: () => deleteNode(destination), disabledReason: canConfigurePlugins ? undefined : 'You do not have permission to delete destinations.', diff --git a/frontend/src/scenes/pipeline/NewButton.tsx b/frontend/src/scenes/pipeline/NewButton.tsx index d0b58b3f9aa64..42cae5a5e385b 100644 --- a/frontend/src/scenes/pipeline/NewButton.tsx +++ b/frontend/src/scenes/pipeline/NewButton.tsx @@ -1,7 +1,7 @@ import { LemonButton } from 'lib/lemon-ui/LemonButton' import { urls } from 'scenes/urls' -import { PipelineNodeTab, PipelineStage } from '~/types' +import { PipelineStage } from '~/types' type NewButtonProps = { stage: PipelineStage @@ -9,11 +9,7 @@ type NewButtonProps = { export function NewButton({ stage }: NewButtonProps): JSX.Element { return ( - + New {stage} ) diff --git a/frontend/src/scenes/pipeline/Pipeline.stories.tsx b/frontend/src/scenes/pipeline/Pipeline.stories.tsx index eeab0de660a90..c539caf709531 100644 --- a/frontend/src/scenes/pipeline/Pipeline.stories.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.stories.tsx @@ -66,7 +66,6 @@ const geoIpConfigId = pluginConfigs.results.find( )!.id export function PipelineLandingPage(): JSX.Element { - // also Destinations page useEffect(() => { router.actions.push(urls.pipeline()) pipelineLogic.mount() @@ -106,6 +105,20 @@ export function PipelineDestinationsPage(): JSX.Element { return } +export function PipelineNodeNewTransformation(): JSX.Element { + useEffect(() => { + router.actions.push(urls.pipelineNodeNew(PipelineStage.Transformation)) + }, []) + return +} + +export function PipelineNodeNewDestination(): JSX.Element { + useEffect(() => { + router.actions.push(urls.pipelineNodeNew(PipelineStage.Destination)) + }, []) + return +} + export function PipelineNodeConfiguration(): JSX.Element { useEffect(() => { router.actions.push( diff --git a/frontend/src/scenes/pipeline/Pipeline.tsx b/frontend/src/scenes/pipeline/Pipeline.tsx index bb9c32c4924b6..02e68e8ac7dc3 100644 --- a/frontend/src/scenes/pipeline/Pipeline.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.tsx @@ -35,7 +35,10 @@ export function Pipeline(): JSX.Element { } const maybeKind = PIPELINE_TAB_TO_NODE_STAGE[currentTab] - const showNewButton = maybeKind && (currentTab !== PipelineTab.Destinations || canEnableNewDestinations) + const showNewButton = + maybeKind && + currentTab !== PipelineTab.ImportApps && + (currentTab !== PipelineTab.Destinations || canEnableNewDestinations) return (
diff --git a/frontend/src/scenes/pipeline/PipelineNode.tsx b/frontend/src/scenes/pipeline/PipelineNode.tsx index 91316bc469453..afaac7481f8ee 100644 --- a/frontend/src/scenes/pipeline/PipelineNode.tsx +++ b/frontend/src/scenes/pipeline/PipelineNode.tsx @@ -64,11 +64,6 @@ export function PipelineNode(params: { stage?: string; id?: string } = {}): JSX. return } - if (id === 'new') { - // If it's new we don't want to show any tabs - return - } - if (!node) { return } diff --git a/frontend/src/scenes/pipeline/PipelineNodeNew.tsx b/frontend/src/scenes/pipeline/PipelineNodeNew.tsx new file mode 100644 index 0000000000000..e040793c3415d --- /dev/null +++ b/frontend/src/scenes/pipeline/PipelineNodeNew.tsx @@ -0,0 +1,154 @@ +import { useValues } from 'kea' +import { NotFound } from 'lib/components/NotFound' +import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { LemonTable } from 'lib/lemon-ui/LemonTable' +import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' +import { SceneExport } from 'scenes/sceneTypes' +import { urls } from 'scenes/urls' + +import { BatchExportService, PipelineStage, PluginType } from '~/types' + +import { pipelineDestinationsLogic } from './destinationsLogic' +import { frontendAppsLogic } from './frontendAppsLogic' +import { PIPELINE_TAB_TO_NODE_STAGE } from './PipelineNode' +import { pipelineNodeNewLogic, PipelineNodeNewLogicProps } from './pipelineNodeNewLogic' +import { pipelineTransformationsLogic } from './transformationsLogic' +import { RenderApp, RenderBatchExportIcon } from './utils' + +const paramsToProps = ({ + params: { stage, pluginIdOrBatchExportDestination }, +}: { + params: { stage?: string; pluginIdOrBatchExportDestination?: string } +}): PipelineNodeNewLogicProps => { + const numericId = + pluginIdOrBatchExportDestination && /^\d+$/.test(pluginIdOrBatchExportDestination) + ? parseInt(pluginIdOrBatchExportDestination) + : undefined + const pluginId = numericId && !isNaN(numericId) ? numericId : null + const batchExportDestination = pluginId ? null : pluginIdOrBatchExportDestination ?? null + + return { + stage: PIPELINE_TAB_TO_NODE_STAGE[stage + 's'] || null, // pipeline tab has stage plural here we have singular + pluginId: pluginId, + batchExportDestination: batchExportDestination, + } +} + +export const scene: SceneExport = { + component: PipelineNodeNew, + logic: pipelineNodeNewLogic, + paramsToProps, +} + +interface PluginEntry { + id: number + name: string + description: string | undefined + plugin: PluginType + service: null +} +interface BatchExportEntry { + id: string + name: string + description: string | undefined + plugin: null + service: BatchExportService +} + +type TableEntry = PluginEntry | BatchExportEntry + +function convertPluginToTableEntry(plugin: PluginType): TableEntry { + return { + id: plugin.id, + name: plugin.name, + description: plugin.description, + plugin: plugin, + service: null, + } +} + +export function PipelineNodeNew( + params: { stage?: string; pluginIdOrBatchExportDestination?: string } = {} +): JSX.Element { + const { stage, pluginId, batchExportDestination } = paramsToProps({ params }) + + if (!stage) { + return + } + + if (pluginId) { + return <>Plugin ID {pluginId} + } + if (batchExportDestination) { + return <>Batch Export Destination {batchExportDestination} + } + + if (stage === PipelineStage.Transformation) { + // Show a list of transformations + const { plugins, loading } = useValues(pipelineTransformationsLogic) + const targets = Object.values(plugins).map(convertPluginToTableEntry) + return nodeOptionsTable(stage, targets, loading) + } else if (stage === PipelineStage.Destination) { + const { plugins, loading } = useValues(pipelineDestinationsLogic) + // Show a list of destinations - TODO: add batch export destinations too + const targets = Object.values(plugins).map(convertPluginToTableEntry) + return nodeOptionsTable(stage, targets, loading) + } else if (stage === PipelineStage.SiteApp) { + const { plugins, loading } = useValues(frontendAppsLogic) + const targets = Object.values(plugins).map(convertPluginToTableEntry) + return nodeOptionsTable(stage, targets, loading) + } + return <>Creation is unavailable for {stage} +} + +function nodeOptionsTable(stage: PipelineStage, targets: TableEntry[], loading: boolean): JSX.Element { + return ( + <> + + ) + }, + }, + { + title: 'App', + render: function RenderAppInfo(_, target) { + if (target.plugin) { + return + } + return + }, + }, + { + title: 'Actions', + width: 100, + align: 'right', + render: function RenderActions(_, target) { + return ( + + Create + + ) + }, + }, + ]} + /> + + ) +} diff --git a/frontend/src/scenes/pipeline/configUtils.ts b/frontend/src/scenes/pipeline/configUtils.ts index 9835e0cc3a63f..255c1e8dffd5a 100644 --- a/frontend/src/scenes/pipeline/configUtils.ts +++ b/frontend/src/scenes/pipeline/configUtils.ts @@ -10,11 +10,10 @@ export function getConfigSchemaArray( ): PluginConfigSchema[] { if (Array.isArray(configSchema)) { return configSchema - } else { - return Object.entries(configSchema) - .map(([key, value]) => ({ key, ...value })) - .sort((a, b) => (a.order || 999999) - (b.order || 999999)) } + return Object.entries(configSchema) + .map(([key, value]) => ({ key, ...value })) + .sort((a, b) => (a.order || 999999) - (b.order || 999999)) } export function getConfigSchemaObject( @@ -28,9 +27,8 @@ export function getConfigSchemaObject( } }) return newSchema - } else { - return configSchema } + return configSchema } export function defaultConfigForPlugin(plugin: PluginType): Record { diff --git a/frontend/src/scenes/pipeline/destinationsLogic.tsx b/frontend/src/scenes/pipeline/destinationsLogic.tsx index 1108f6b572573..a25d6399c67b7 100644 --- a/frontend/src/scenes/pipeline/destinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinationsLogic.tsx @@ -2,6 +2,7 @@ import { lemonToast } from '@posthog/lemon-ui' import { actions, afterMount, connect, kea, listeners, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' @@ -16,7 +17,7 @@ import { import type { pipelineDestinationsLogicType } from './destinationsLogicType' import { pipelineLogic } from './pipelineLogic' -import { convertToPipelineNode, Destination } from './types' +import { BatchExportDestination, convertToPipelineNode, Destination, PipelineBackend } from './types' import { captureBatchExportEvent, capturePluginEvent } from './utils' export const pipelineDestinationsLogic = kea([ @@ -32,7 +33,9 @@ export const pipelineDestinationsLogic = kea([ ], }), actions({ - toggleEnabled: (destination: Destination, enabled: boolean) => ({ destination, enabled }), + toggleNode: (destination: Destination, enabled: boolean) => ({ destination, enabled }), + deleteNode: (destination: Destination) => ({ destination }), + deleteNodeBatchExport: (destination: BatchExportDestination) => ({ destination }), }), loaders(({ values }) => ({ plugins: [ @@ -70,7 +73,7 @@ export const pipelineDestinationsLogic = kea([ } return pluginConfigs }, - toggleEnabledWebhook: async ({ destination, enabled }) => { + toggleNodeWebhook: async ({ destination, enabled }) => { const { pluginConfigs, plugins } = values const pluginConfig = pluginConfigs[destination.id] const plugin = plugins[pluginConfig.plugin] @@ -91,7 +94,7 @@ export const pipelineDestinationsLogic = kea([ ) return Object.fromEntries(results.map((batchExport) => [batchExport.id, batchExport])) }, - toggleEnabledBatchExport: async ({ destination, enabled }) => { + toggleNodeBatchExport: async ({ destination, enabled }) => { const batchExport = values.batchExportConfigs[destination.id] if (enabled) { await api.batchExports.unpause(destination.id) @@ -101,6 +104,12 @@ export const pipelineDestinationsLogic = kea([ captureBatchExportEvent(`batch export ${enabled ? 'enabled' : 'disabled'}`, batchExport) return { ...values.batchExportConfigs, [destination.id]: { ...batchExport, paused: !enabled } } }, + deleteNodeBatchExport: async ({ destination }) => { + await api.batchExports.delete(destination.id) + return Object.fromEntries( + Object.entries(values.batchExportConfigs).filter(([id]) => id !== destination.id) + ) + }, }, ], })), @@ -139,8 +148,8 @@ export const pipelineDestinationsLogic = kea([ }, ], }), - listeners(({ actions, values }) => ({ - toggleEnabled: async ({ destination, enabled }) => { + listeners(({ actions, asyncActions, values }) => ({ + toggleNode: ({ destination, enabled }) => { if (!values.canConfigurePlugins) { lemonToast.error("You don't have permission to enable or disable destinations") return @@ -150,9 +159,23 @@ export const pipelineDestinationsLogic = kea([ return } if (destination.backend === 'plugin') { - actions.toggleEnabledWebhook({ destination: destination, enabled: enabled }) + actions.toggleNodeWebhook({ destination: destination, enabled: enabled }) + } else { + actions.toggleNodeBatchExport({ destination: destination, enabled: enabled }) + } + }, + deleteNode: async ({ destination }) => { + if (destination.backend === PipelineBackend.BatchExport) { + await asyncActions.deleteNodeBatchExport(destination) } else { - actions.toggleEnabledBatchExport({ destination: destination, enabled: enabled }) + await deleteWithUndo({ + endpoint: `projects/${teamLogic.values.currentTeamId}/plugin_configs`, + object: { + id: destination.id, + name: destination.name, + }, + callback: actions.loadPluginConfigs, + }) } }, })), diff --git a/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx b/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx index e802dc5ebfe5e..c0a94055f769b 100644 --- a/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx @@ -23,13 +23,7 @@ import { frontendAppsLogic } from './frontendAppsLogic' import { importAppsLogic } from './importAppsLogic' import type { pipelineNodeLogicType } from './pipelineNodeLogicType' import { pipelineTransformationsLogic } from './transformationsLogic' -import { - BatchExportBasedStep, - convertToPipelineNode, - PipelineBackend, - PipelineNode, - PluginBasedStepBase, -} from './types' +import { BatchExportBasedNode, convertToPipelineNode, PipelineBackend, PipelineNode, PluginBasedNode } from './types' export interface PipelineNodeLogicProps { id: number | string @@ -37,9 +31,9 @@ export interface PipelineNodeLogicProps { stage: PipelineStage | null } -export type PluginUpdatePayload = Pick +export type PluginUpdatePayload = Pick export type BatchExportUpdatePayload = Pick< - BatchExportBasedStep, + BatchExportBasedNode, 'name' | 'description' | 'enabled' | 'service' | 'interval' > diff --git a/frontend/src/scenes/pipeline/pipelineNodeNewLogic.tsx b/frontend/src/scenes/pipeline/pipelineNodeNewLogic.tsx new file mode 100644 index 0000000000000..a83afdbceb5e8 --- /dev/null +++ b/frontend/src/scenes/pipeline/pipelineNodeNewLogic.tsx @@ -0,0 +1,52 @@ +import { kea, path, props, selectors } from 'kea' +import { capitalizeFirstLetter } from 'lib/utils' +import { Scene } from 'scenes/sceneTypes' +import { urls } from 'scenes/urls' + +import { Breadcrumb, PipelineStage, PipelineTab } from '~/types' + +import type { pipelineNodeNewLogicType } from './pipelineNodeNewLogicType' + +export const NODE_STAGE_TO_PIPELINE_TAB: Partial> = { + [PipelineStage.Transformation]: PipelineTab.Transformations, + [PipelineStage.Destination]: PipelineTab.Destinations, + [PipelineStage.SiteApp]: PipelineTab.SiteApps, +} +export interface PipelineNodeNewLogicProps { + /** Might be null if a non-existent stage is set in the URL. */ + stage: PipelineStage | null + pluginId: number | null + batchExportDestination: string | null +} + +export const pipelineNodeNewLogic = kea([ + props({} as PipelineNodeNewLogicProps), + path((pluginIdOrBatchExportDestination) => [ + 'scenes', + 'pipeline', + 'pipelineNodeNewLogic', + pluginIdOrBatchExportDestination, + ]), + selectors(() => ({ + breadcrumbs: [ + (_, p) => [p.stage, p.pluginId, p.batchExportDestination], + (stage, pluginId, batchDestination): Breadcrumb[] => [ + { + key: Scene.Pipeline, + name: 'Data pipeline', + path: urls.pipeline(), + }, + { + key: stage || 'unknown', + name: stage ? capitalizeFirstLetter(NODE_STAGE_TO_PIPELINE_TAB[stage] || '') : 'Unknown', + path: urls.pipeline(stage ? NODE_STAGE_TO_PIPELINE_TAB[stage] : undefined), + }, + { + // TODO: use the plugin name + key: pluginId || batchDestination || 'Unknown', + name: pluginId ? pluginId.toString() : batchDestination ?? 'Options', + }, + ], + ], + })), +]) diff --git a/frontend/src/scenes/pipeline/types.ts b/frontend/src/scenes/pipeline/types.ts index 5307ff232d3cb..2c2c0568f96a7 100644 --- a/frontend/src/scenes/pipeline/types.ts +++ b/frontend/src/scenes/pipeline/types.ts @@ -1,6 +1,6 @@ import { BatchExportConfiguration, - BatchExportDestination as BatchExportService, + BatchExportService, PipelineStage, PluginConfigWithPluginInfoNew, PluginType, @@ -23,14 +23,15 @@ interface PipelineNodeBase { // Split by backend -export interface PluginBasedStepBase extends PipelineNodeBase { +export interface PluginBasedNode extends PipelineNodeBase { backend: PipelineBackend.Plugin id: number plugin: PluginType config: Record } + /** NOTE: Batch exports are only used in Destinations, but we're making this a bit more abstract for clearer types. */ -export interface BatchExportBasedStep extends PipelineNodeBase { +export interface BatchExportBasedNode extends PipelineNodeBase { backend: PipelineBackend.BatchExport /** UUID */ id: string @@ -38,43 +39,37 @@ export interface BatchExportBasedStep extends PipelineNodeBase { interval: BatchExportConfiguration['interval'] } -// Stage: Filters - -export interface Filter extends PluginBasedStepBase { - stage: PipelineStage.Filter -} - // Stage: Transformations -export interface Transformation extends PluginBasedStepBase { +export interface Transformation extends PluginBasedNode { stage: PipelineStage.Transformation order: number } // Stage: Destinations -export interface WebhookDestination extends PluginBasedStepBase { +export interface WebhookDestination extends PluginBasedNode { stage: PipelineStage.Destination interval: 'realtime' } -export interface BatchExportDestination extends BatchExportBasedStep { +export interface BatchExportDestination extends BatchExportBasedNode { stage: PipelineStage.Destination } export type Destination = BatchExportDestination | WebhookDestination // Legacy: Site apps -export interface SiteApp extends PluginBasedStepBase { +export interface SiteApp extends PluginBasedNode { stage: PipelineStage.SiteApp } // Legacy: Import apps -export interface ImportApp extends PluginBasedStepBase { +export interface ImportApp extends PluginBasedNode { stage: PipelineStage.ImportApp } // Final -export type PipelineNode = Filter | Transformation | Destination | SiteApp | ImportApp +export type PipelineNode = Transformation | Destination | SiteApp | ImportApp // Utils @@ -87,9 +82,7 @@ function isPluginConfig( export function convertToPipelineNode( candidate: PluginConfigWithPluginInfoNew | BatchExportConfiguration, stage: S -): S extends PipelineStage.Filter - ? Filter - : S extends PipelineStage.Transformation +): S extends PipelineStage.Transformation ? Transformation : S extends PipelineStage.Destination ? Destination @@ -100,10 +93,7 @@ export function convertToPipelineNode( : never { let node: PipelineNode if (isPluginConfig(candidate)) { - const almostNode: Omit< - Filter | Transformation | WebhookDestination | SiteApp | ImportApp, - 'frequency' | 'order' - > = { + const almostNode: Omit = { stage: stage, backend: PipelineBackend.Plugin, id: candidate.id, @@ -127,13 +117,11 @@ export function convertToPipelineNode( stage, interval: 'realtime', } - } else if (stage === PipelineStage.SiteApp || stage === PipelineStage.ImportApp) { + } else { node = { ...almostNode, stage, } - } else { - node = almostNode as Filter } } else { node = { diff --git a/frontend/src/scenes/pipeline/utils.tsx b/frontend/src/scenes/pipeline/utils.tsx index c5fc67b80e234..8b98a9382a534 100644 --- a/frontend/src/scenes/pipeline/utils.tsx +++ b/frontend/src/scenes/pipeline/utils.tsx @@ -17,7 +17,7 @@ import { urls } from 'scenes/urls' import { BatchExportConfiguration, - BatchExportDestination, + BatchExportService, PipelineNodeTab, PipelineStage, PluginConfigTypeNew, @@ -31,7 +31,7 @@ import { Destination, ImportApp, PipelineBackend, - PluginBasedStepBase, + PluginBasedNode, SiteApp, Transformation, WebhookDestination, @@ -150,7 +150,7 @@ export function RenderApp({ plugin, imageSize }: RenderAppProps): JSX.Element { ) } -export function RenderBatchExportIcon({ type }: { type: BatchExportDestination['type'] }): JSX.Element { +export function RenderBatchExportIcon({ type }: { type: BatchExportService['type'] }): JSX.Element { const icon = { BigQuery: BigQueryIcon, Postgres: PostgresIcon, @@ -275,7 +275,7 @@ export function appColumn(): Lem } } -function pluginMenuItems(node: PluginBasedStepBase): LemonMenuItem[] { +function pluginMenuItems(node: PluginBasedNode): LemonMenuItem[] { if (node.plugin?.url) { return [ { @@ -298,6 +298,7 @@ export function pipelineNodeMenuCommonItems(node: Transformation | SiteApp | Imp }, { label: 'View metrics', + status: 'danger', to: urls.pipelineNode(node.stage, node.id, PipelineNodeTab.Metrics), }, { @@ -327,13 +328,14 @@ export function pipelinePluginBackedNodeMenuCommonItems( enabled: !node.enabled, id: node.id, }), - disabledReason: canConfigurePlugins ? undefined : 'You do not have permission to enable/disable apps.', + disabledReason: canConfigurePlugins ? undefined : 'You do not have permission to toggle.', }, ...pipelineNodeMenuCommonItems(node), ...(!inOverview ? [ { label: 'Delete app', + status: 'danger' as const, // for typechecker happiness onClick: () => { void deleteWithUndo({ endpoint: `plugin_config`, @@ -344,7 +346,7 @@ export function pipelinePluginBackedNodeMenuCommonItems( callback: loadPluginConfigs, }) }, - disabledReason: canConfigurePlugins ? undefined : 'You do not have permission to delete apps.', + disabledReason: canConfigurePlugins ? undefined : 'You do not have permission to delete.', }, ] : []), diff --git a/frontend/src/scenes/retention/retentionLineGraphLogic.ts b/frontend/src/scenes/retention/retentionLineGraphLogic.ts index ec90368adf15a..2bd1ff9663e2c 100644 --- a/frontend/src/scenes/retention/retentionLineGraphLogic.ts +++ b/frontend/src/scenes/retention/retentionLineGraphLogic.ts @@ -109,9 +109,8 @@ export const retentionLineGraphLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - trendSeries[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/retention/retentionTableLogic.ts b/frontend/src/scenes/retention/retentionTableLogic.ts index 177782fbafea4..3fd153edebcc6 100644 --- a/frontend/src/scenes/retention/retentionTableLogic.ts +++ b/frontend/src/scenes/retention/retentionTableLogic.ts @@ -24,9 +24,8 @@ const periodIsLatest = (date_to: string | null, period: string | null): boolean (period == 'Month' && curr.isSame(dayjs(), 'month')) ) { return true - } else { - return false } + return false } export const retentionTableLogic = kea([ diff --git a/frontend/src/scenes/sceneLogic.ts b/frontend/src/scenes/sceneLogic.ts index 7efd7e127e261..6a4e49d6651c8 100644 --- a/frontend/src/scenes/sceneLogic.ts +++ b/frontend/src/scenes/sceneLogic.ts @@ -298,9 +298,8 @@ export const sceneLogic = kea([ actions.reloadBrowserDueToImportError() } return - } else { - throw error } + throw error } finally { window.clearTimeout(timeout) } diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts index cf6c7a80fa2ec..6ded33f8edce9 100644 --- a/frontend/src/scenes/sceneTypes.ts +++ b/frontend/src/scenes/sceneTypes.ts @@ -26,6 +26,7 @@ export enum Scene { ReplayFilePlayback = 'ReplayFilePlayback', PersonsManagement = 'PersonsManagement', Person = 'Person', + PipelineNodeNew = 'PipelineNodeNew', Pipeline = 'Pipeline', PipelineNode = 'PipelineNode', Group = 'Group', diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 44e1d8f7efd59..1ae648acdf9cf 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -164,6 +164,12 @@ export const sceneConfigurations: Record = { name: 'People & groups', defaultDocsPath: '/docs/product-analytics/group-analytics', }, + [Scene.PipelineNodeNew]: { + projectBased: true, + name: 'Pipeline new step', + activityScope: ActivityScope.PLUGIN, + defaultDocsPath: '/docs/cdp', + }, [Scene.Pipeline]: { projectBased: true, name: 'Pipeline', @@ -507,6 +513,8 @@ export const routes: Record = { [urls.personByDistinctId('*', false)]: Scene.Person, [urls.personByUUID('*', false)]: Scene.Person, [urls.persons()]: Scene.PersonsManagement, + [urls.pipelineNodeNew(':stage')]: Scene.PipelineNodeNew, + [urls.pipelineNodeNew(':stage', ':pluginIdOrBatchExportDestination')]: Scene.PipelineNodeNew, [urls.pipeline(':tab')]: Scene.Pipeline, [urls.pipelineNode(':stage', ':id', ':nodeTab')]: Scene.PipelineNode, [urls.groups(':groupTypeIndex')]: Scene.PersonsManagement, diff --git a/frontend/src/scenes/session-recordings/SessionRecordings.tsx b/frontend/src/scenes/session-recordings/SessionRecordings.tsx index 7084e9649b48e..ec323b8b21e46 100644 --- a/frontend/src/scenes/session-recordings/SessionRecordings.tsx +++ b/frontend/src/scenes/session-recordings/SessionRecordings.tsx @@ -150,7 +150,7 @@ export function SessionsRecordings(): JSX.Element { , onClick: () => openSettingsPanel({ sectionId: 'project-replay' }), children: 'Configure', diff --git a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts index 30109a3936a2a..118614cfa6cb2 100644 --- a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts +++ b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts @@ -45,9 +45,8 @@ export const parseExportedSessionRecording = (fileData: string): ExportedSession .sort((a, b) => a.timestamp - b.timestamp), }, } - } else { - throw new Error('File version is not supported') } + throw new Error('File version is not supported') } /** @@ -159,6 +158,7 @@ export const sessionRecordingFilePlaybackSceneLogic = kea { - const { currentPlayerState } = useValues(sessionRecordingPlayerLogic) + const { currentPlayerState, endReached } = useValues(sessionRecordingPlayerLogic) let content = null const pausedState = currentPlayerState === SessionPlayerState.PAUSE || currentPlayerState === SessionPlayerState.READY @@ -59,7 +59,11 @@ const PlayerFrameOverlayContent = (): JSX.Element | null => { ) } if (pausedState) { - content = + content = endReached ? ( + + ) : ( + + ) } if (currentPlayerState === SessionPlayerState.SKIP) { content =
Skipping inactivity
diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx index 70be8a2abb3c7..56c69f0df5d03 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx @@ -1,7 +1,6 @@ import './PlayerMeta.scss' -import { IconEllipsis, IconTrash } from '@posthog/icons' -import { IconDownload, IconMagic, IconSearch } from '@posthog/icons' +import { IconDownload, IconEllipsis, IconMagic, IconSearch, IconTrash } from '@posthog/icons' import { LemonButton, LemonDialog, LemonMenu, LemonMenuItems, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' @@ -68,7 +67,7 @@ function URLOrScreen({ lastUrl }: { lastUrl: string | undefined }): JSX.Element ) } -export function PlayerMeta(): JSX.Element { +export function PlayerMeta({ linkIconsOnly = false }: { linkIconsOnly?: boolean }): JSX.Element { const { sessionRecordingId, logicProps, isFullScreen } = useValues(sessionRecordingPlayerLogic) const { @@ -181,10 +180,10 @@ export function PlayerMeta(): JSX.Element {
{sessionRecordingId && ( - <> - +
+ {mode === SessionRecordingPlayerMode.Standard && } - +
)}
JSX.Element + buttonProps?: Partial +}): JSX.Element { const { logicProps } = useValues(sessionRecordingPlayerLogic) const { maybePersistRecording } = useActions(sessionRecordingPlayerLogic) const nodeLogic = useNotebookNode() @@ -34,6 +41,7 @@ function PinToPlaylistButton(): JSX.Element { return logicProps.setPinned ? ( { if (nodeLogic && !logicProps.pinned) { // If we are in a node, then pinning should persist the recording @@ -42,19 +50,16 @@ function PinToPlaylistButton(): JSX.Element { logicProps.setPinned?.(!logicProps.pinned) }} - size="small" tooltip={tooltip} data-attr={logicProps.pinned ? 'unpin-from-this-list' : 'pin-to-this-list'} icon={logicProps.pinned ? : } /> ) : ( - - {description} - + {buttonContent(description)} ) } -export function PlayerMetaLinks(): JSX.Element { +export function PlayerMetaLinks({ iconsOnly }: { iconsOnly: boolean }): JSX.Element { const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic) const { setPause, setIsFullScreen } = useActions(sessionRecordingPlayerLogic) const nodeLogic = useNotebookNode() @@ -79,14 +84,18 @@ export function PlayerMetaLinks(): JSX.Element { size: 'small', } + const buttonContent = (label: string): JSX.Element => { + return !iconsOnly ? {label} : + } + const mode = logicProps.mode ?? SessionRecordingPlayerMode.Standard return ( -
+ <> {![SessionRecordingPlayerMode.Sharing].includes(mode) ? ( <> } resource={{ type: NotebookNodeType.Recording, @@ -111,17 +120,17 @@ export function PlayerMetaLinks(): JSX.Element { personsModalLogic.findMounted()?.actions.closeModal() }} > - Comment + {buttonContent('Comment')} - } onClick={onShare} {...commonProps}> - Share + } onClick={onShare} {...commonProps}> + {buttonContent('Share')} {nodeLogic?.props.nodeType === NotebookNodeType.RecordingPlaylist ? ( } - size="small" onClick={() => { nodeLogic.actions.insertAfter({ type: NotebookNodeType.Recording, @@ -131,9 +140,9 @@ export function PlayerMetaLinks(): JSX.Element { /> ) : null} - + ) : null} -
+ ) } diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss index 2f3bcac6708f3..5284d90c65570 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss @@ -1,8 +1,6 @@ @import '../../../styles/mixins'; .SessionRecordingPlayer { - --inspector-min-width: 24rem; - position: relative; display: flex; flex-direction: row; @@ -29,7 +27,6 @@ .SessionRecordingPlayer__main { flex: 1; - padding-right: 2.5rem; } &--fullscreen { @@ -62,46 +59,24 @@ } .SessionRecordingPlayer__inspector { - position: absolute; - top: 0; - right: 0; - bottom: 0; - z-index: 10; + position: relative; flex-shrink: 0; - min-width: var(--inspector-min-width); - max-width: 95%; - - &--collapsed { - --inspector-min-width: 2.5rem; - } - - .PlayerInspectorPreview { - position: absolute; - inset: 0; - z-index: 1; - cursor: pointer; - transition: opacity 0.2s ease-in-out; - } + min-width: 20rem; + max-width: 50%; } - &--widescreen { - .SessionRecordingPlayer__main { - padding-right: 0; - } - + &--wide { .SessionRecordingPlayer__inspector { - position: relative; - max-width: 75%; + min-width: 26rem; } } - &--inspector-hidden { - .SessionRecordingPlayer__main { - padding-right: 0; - } + &--stacked-vertically { + flex-direction: column; .SessionRecordingPlayer__inspector { - display: none; + min-width: 100%; + max-width: 100%; } } } diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx index bde9dc7accee6..429fcc832216d 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx @@ -7,7 +7,7 @@ import { HotkeysInterface, useKeyboardHotkeys } from 'lib/hooks/useKeyboardHotke import { usePageVisibility } from 'lib/hooks/usePageVisibility' import { useResizeBreakpoints } from 'lib/hooks/useResizeObserver' import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { useEffect, useMemo, useRef, useState } from 'react' +import { useMemo, useRef, useState } from 'react' import { useNotebookDrag } from 'scenes/notebooks/AddToNotebook/DraggableToNotebook' import { PlayerController } from 'scenes/session-recordings/player/controller/PlayerController' import { PlayerInspector } from 'scenes/session-recordings/player/inspector/PlayerInspector' @@ -35,6 +35,11 @@ export interface SessionRecordingPlayerProps extends SessionRecordingPlayerLogic matchingEventsMatchType?: MatchingEventsMatchType } +enum InspectorStacking { + Vertical = 'vertical', + Horizontal = 'horizontal', +} + export const createPlaybackSpeedKey = (action: (val: number) => void): HotkeysInterface => { return PLAYBACK_SPEEDS.map((x, i) => ({ key: `${i}`, value: x })).reduce( (acc, x) => ({ ...acc, [x.key]: { action: () => action(x.value) } }), @@ -59,6 +64,7 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. } = props const playerRef = useRef(null) + const playerMainRef = useRef(null) const logicProps: SessionRecordingPlayerLogicProps = { sessionRecordingId, @@ -130,26 +136,34 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. const { size } = useResizeBreakpoints( { - 0: 'tiny', - 400: 'small', - 1000: 'medium', + 0: 'small', + 1050: 'medium', + 1500: 'wide', }, { ref: playerRef, } ) + const { size: playerMainSize } = useResizeBreakpoints( + { + 0: 'small', + 650: 'medium', + }, + { + ref: playerMainRef, + } + ) - const isWidescreen = !isFullScreen && size === 'medium' + const isWidescreen = !isFullScreen && size === 'wide' const [inspectorExpanded, setInspectorExpanded] = useState(isWidescreen) + const [preferredInspectorStacking, setPreferredInspectorStacking] = useState(InspectorStacking.Horizontal) - const { draggable, elementProps } = useNotebookDrag({ href: urls.replaySingle(sessionRecordingId) }) + const compactLayout = size === 'small' + const layoutStacking = compactLayout ? InspectorStacking.Vertical : preferredInspectorStacking + const isVerticallyStacked = layoutStacking === InspectorStacking.Vertical - useEffect(() => { - if (isWidescreen) { - setInspectorExpanded(true) - } - }, [isWidescreen]) + const { draggable, elementProps } = useNotebookDrag({ href: urls.replaySingle(sessionRecordingId) }) if (isNotFound) { return ( @@ -163,14 +177,16 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX.
@@ -178,27 +194,35 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. closeExplorer()} /> ) : ( <> -
{ - if (!isWidescreen) { - setInspectorExpanded(false) - } - }} - > - {(!noMeta || isFullScreen) && size !== 'tiny' ? : null} +
+ {!noMeta || isFullScreen ? ( + + ) : null}
- + setInspectorExpanded(!inspectorExpanded)} + />
- {!noInspector && ( + {!noInspector && inspectorExpanded && ( + setPreferredInspectorStacking( + preferredInspectorStacking === InspectorStacking.Vertical + ? InspectorStacking.Horizontal + : InspectorStacking.Vertical + ) + } /> )} diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx index 77a0374e70904..a186863703b66 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx @@ -1,8 +1,8 @@ -import { IconFastForward, IconPause, IconPlay } from '@posthog/icons' +import { IconFastForward, IconPause, IconPlay, IconSearch } from '@posthog/icons' import { LemonMenu, LemonSwitch } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' -import { IconFullScreen } from 'lib/lemon-ui/icons' +import { IconFullScreen, IconSync } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { @@ -17,8 +17,14 @@ import { playerSettingsLogic } from '../playerSettingsLogic' import { SeekSkip, Timestamp } from './PlayerControllerTime' import { Seekbar } from './Seekbar' -export function PlayerController(): JSX.Element { - const { playingState, isFullScreen } = useValues(sessionRecordingPlayerLogic) +export function PlayerController({ + inspectorExpanded, + toggleInspectorExpanded, +}: { + inspectorExpanded: boolean + toggleInspectorExpanded: () => void +}): JSX.Element { + const { playingState, isFullScreen, endReached } = useValues(sessionRecordingPlayerLogic) const { togglePlayPause, setIsFullScreen } = useActions(sessionRecordingPlayerLogic) const { speed, skipInactivitySetting } = useValues(playerSettingsLogic) @@ -37,13 +43,19 @@ export function PlayerController(): JSX.Element { size="small" onClick={togglePlayPause} tooltip={ - <> - {showPause ? 'Pause' : 'Play'} +
+ {showPause ? 'Pause' : endReached ? 'Restart' : 'Play'} - +
} > - {showPause ? : } + {showPause ? ( + + ) : endReached ? ( + + ) : ( + + )} @@ -58,8 +70,6 @@ export function PlayerController(): JSX.Element { {speed}x -
-
+
+ + setIsFullScreen(!isFullScreen)}> + + + +
-
- - { - setIsFullScreen(!isFullScreen) - }} - > - - - -
+ {!inspectorExpanded && ( + } + > + Inspector + + )}
) diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerControllerTime.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerControllerTime.tsx index 6ca22131ef1dc..61971ab27e095 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerControllerTime.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerControllerTime.tsx @@ -4,7 +4,8 @@ import { useActions, useValues } from 'kea' import { dayjs } from 'lib/dayjs' import { useKeyHeld } from 'lib/hooks/useKeyHeld' import { IconSkipBackward } from 'lib/lemon-ui/icons' -import { capitalizeFirstLetter, colonDelimitedDuration } from 'lib/utils' +import { capitalizeFirstLetter, colonDelimitedDuration, shortTimeZone } from 'lib/utils' +import { useCallback } from 'react' import { ONE_FRAME_MS, sessionRecordingPlayerLogic } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' import { playerSettingsLogic, TimestampFormat } from '../playerSettingsLogic' @@ -22,27 +23,35 @@ export function Timestamp(): JSX.Element { const fixedUnits = endTimeSeconds > 3600 ? 3 : 2 + const rotateTimestampFormat = useCallback(() => { + setTimestampFormat( + timestampFormat === 'relative' + ? TimestampFormat.UTC + : timestampFormat === TimestampFormat.UTC + ? TimestampFormat.Device + : TimestampFormat.Relative + ) + }, [timestampFormat]) + return ( - - setTimestampFormat(timestampFormat === 'relative' ? TimestampFormat.Absolute : TimestampFormat.Relative) - } - active - > - {timestampFormat === TimestampFormat.Relative ? ( - <> - {colonDelimitedDuration(startTimeSeconds, fixedUnits)} /{' '} - {colonDelimitedDuration(endTimeSeconds, fixedUnits)} - - ) : ( - <> - {currentTimestamp - ? dayjs(currentTimestamp).tz('UTC').format('DD/MM/YYYY, HH:mm:ss') - : '--/--/----, 00:00:00'}{' '} - UTC - - )} + + + {timestampFormat === TimestampFormat.Relative ? ( + <> + {colonDelimitedDuration(startTimeSeconds, fixedUnits)} /{' '} + {colonDelimitedDuration(endTimeSeconds, fixedUnits)} + + ) : currentTimestamp ? ( + <> + {dayjs(currentTimestamp).tz('UTC').format('DD/MM/YYYY, HH:mm:ss')}{' '} + {timestampFormat === TimestampFormat.UTC + ? 'UTC' + : shortTimeZone(undefined, dayjs(currentTimestamp).toDate())} + + ) : ( + '--/--/----, 00:00:00' + )} + ) } diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx index 7b7f1454d453d..b644a2fb987b0 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspector.tsx @@ -6,49 +6,50 @@ import { useRef } from 'react' import { PlayerInspectorControls } from './PlayerInspectorControls' import { PlayerInspectorList } from './PlayerInspectorList' -import { PlayerInspectorPreview } from './PlayerInspectorPreview' export function PlayerInspector({ - inspectorExpanded, - setInspectorExpanded, + isVerticallyStacked, + onClose, + toggleLayoutStacking, }: { - inspectorExpanded: boolean - setInspectorExpanded: (focus: boolean) => void + isVerticallyStacked: boolean + onClose: (focus: boolean) => void + toggleLayoutStacking?: () => void }): JSX.Element { const ref = useRef(null) + const logicKey = `player-inspector-${isVerticallyStacked ? 'vertical' : 'horizontal'}` + const resizerLogicProps: ResizerLogicProps = { + logicKey, containerRef: ref, - logicKey: 'player-inspector', persistent: true, closeThreshold: 100, - placement: 'left', - onToggleClosed: (shouldBeClosed) => setInspectorExpanded(!shouldBeClosed), + placement: isVerticallyStacked ? 'top' : 'left', + onToggleClosed: (shouldBeClosed) => onClose(!shouldBeClosed), } const { desiredSize } = useValues(resizerLogic(resizerLogicProps)) return (
- - {inspectorExpanded ? ( - <> - setInspectorExpanded(false)} /> - - - ) : ( - setInspectorExpanded(true)} /> - )} + + onClose(false)} + isVerticallyStacked={isVerticallyStacked} + toggleLayoutStacking={toggleLayoutStacking} + /> +
) } diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx index ab2dddb3b5ad7..3b261aa18fb7a 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx @@ -1,8 +1,8 @@ -import { IconBug, IconClock, IconDashboard, IconInfo, IconPause, IconTerminal, IconX } from '@posthog/icons' +import { IconBottomPanel, IconBug, IconDashboard, IconInfo, IconSidePanel, IconTerminal, IconX } from '@posthog/icons' import { LemonButton, LemonCheckbox, LemonInput, LemonSelect, LemonTabs, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { FEATURE_FLAGS } from 'lib/constants' -import { IconPlayCircle, IconUnverifiedEvent } from 'lib/lemon-ui/icons' +import { IconUnverifiedEvent } from 'lib/lemon-ui/icons' import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { capitalizeFirstLetter } from 'lib/utils' @@ -45,7 +45,6 @@ function TabButtons({ onChange={(tabId) => setTab(tabId)} tabs={tabs.map((tabId) => { const TabIcon = TabToIcon[tabId] - return { key: tabId, label: ( @@ -66,14 +65,21 @@ function TabButtons({ ) } -export function PlayerInspectorControls({ onClose }: { onClose: () => void }): JSX.Element { +export function PlayerInspectorControls({ + onClose, + isVerticallyStacked, + toggleLayoutStacking, +}: { + onClose: () => void + isVerticallyStacked: boolean + toggleLayoutStacking?: () => void +}): JSX.Element { const { logicProps } = useValues(sessionRecordingPlayerLogic) const inspectorLogic = playerInspectorLogic(logicProps) - const { tab, windowIdFilter, syncScrollingPaused, windowIds, showMatchingEventsFilter } = useValues(inspectorLogic) - const { setWindowIdFilter, setSyncScrollPaused, setTab } = useActions(inspectorLogic) - const { showOnlyMatching, timestampMode, miniFilters, syncScroll, searchQuery } = useValues(playerSettingsLogic) - const { setShowOnlyMatching, setTimestampMode, setMiniFilter, setSyncScroll, setSearchQuery } = - useActions(playerSettingsLogic) + const { tab, windowIdFilter, windowIds, showMatchingEventsFilter } = useValues(inspectorLogic) + const { setWindowIdFilter, setTab } = useActions(inspectorLogic) + const { showOnlyMatching, miniFilters, searchQuery } = useValues(playerSettingsLogic) + const { setShowOnlyMatching, setMiniFilter, setSearchQuery } = useActions(playerSettingsLogic) const mode = logicProps.mode ?? SessionRecordingPlayerMode.Standard @@ -102,16 +108,40 @@ export function PlayerInspectorControls({ onClose }: { onClose: () => void }): J } return ( -
+
-
+
+ {toggleLayoutStacking && ( + : } + onClick={toggleLayoutStacking} + /> + )} } onClick={onClose} />
-
+
+
+ setSearchQuery(e)} + placeholder="Search..." + type="search" + value={searchQuery} + fullWidth + className="min-w-60" + suffix={ + }> + + + } + /> +
+
void }): J ))}
-
-
-
- setSearchQuery(e)} - placeholder="Search..." - type="search" - value={searchQuery} - fullWidth - suffix={ - }> - - - } - /> -
- - {windowIds.length > 1 ? ( -
- setWindowIdFilter(val || null)} - options={[ - { - value: null, - label: 'All windows', - icon: , - }, - ...windowIds.map((windowId, index) => ({ - value: windowId, - label: `Window ${index + 1}`, - icon: , - })), - ]} - /> - - - -
- ) : null} + {windowIds.length > 1 ? ( +
+ setWindowIdFilter(val || null)} + options={[ + { + value: null, + label: 'All windows', + icon: , + }, + ...windowIds.map((windowId, index) => ({ + value: windowId, + label: `Window ${index + 1}`, + icon: , + })), + ]} + tooltip="Each recording window translates to a distinct browser tab or window." + />
+ ) : null} -
- setTimestampMode(timestampMode === 'absolute' ? 'relative' : 'absolute')} - tooltipPlacement="left" - tooltip={ - timestampMode === 'absolute' - ? 'Showing absolute timestamps' - : 'Showing timestamps relative to the start of the recording' - } - > - - {capitalizeFirstLetter(timestampMode)}{' '} - - - - - { - // If the user has syncScrolling on, but it is paused due to interacting with the Inspector, we want to resume it - if (syncScroll && syncScrollingPaused) { - setSyncScrollPaused(false) - } else { - // Otherwise we are just toggling the setting - setSyncScroll(!syncScroll) - } - }} - tooltipPlacement="left" - tooltip={ - syncScroll && syncScrollingPaused - ? 'Synced scrolling is paused - click to resume' - : 'Scroll the list in sync with the recording playback' - } - > - {syncScroll && syncScrollingPaused ? ( - - ) : ( - - )} - -
-
{showMatchingEventsFilter ? ( -
- +
+ + Only events matching filters void }): J - -
) : null}
diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.tsx index fbd58fa6c67bd..7ba6d4ab16af3 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.tsx @@ -15,7 +15,6 @@ import { userLogic } from 'scenes/userLogic' import { sidePanelSettingsLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic' import { AvailableFeature, SessionRecordingPlayerTab } from '~/types' -import { playerSettingsLogic } from '../playerSettingsLogic' import { sessionRecordingPlayerLogic } from '../sessionRecordingPlayerLogic' import { PlayerInspectorListItem } from './components/PlayerInspectorListItem' import { playerInspectorLogic } from './playerInspectorLogic' @@ -114,10 +113,9 @@ export function PlayerInspectorList(): JSX.Element { const { logicProps, snapshotsLoaded, sessionPlayerMetaData } = useValues(sessionRecordingPlayerLogic) const inspectorLogic = playerInspectorLogic(logicProps) - const { items, tabsState, playbackIndicatorIndex, playbackIndicatorIndexStop, syncScrollingPaused, tab } = + const { items, tabsState, playbackIndicatorIndex, playbackIndicatorIndexStop, syncScrollPaused, tab } = useValues(inspectorLogic) const { setSyncScrollPaused } = useActions(inspectorLogic) - const { syncScroll } = useValues(playerSettingsLogic) const { currentTeam } = useValues(teamLogic) const { hasAvailableFeature } = useValues(userLogic) const performanceAvailable: boolean = hasAvailableFeature(AvailableFeature.RECORDINGS_PERFORMANCE) @@ -161,12 +159,12 @@ export function PlayerInspectorList(): JSX.Element { .getElementById('PlayerInspectorListMarker') ?.setAttribute('style', `transform: translateY(${offset}px)`) - if (!syncScrollingPaused && syncScroll) { + if (!syncScrollPaused) { scrolledByJsFlag.current = true listRef.current.scrollToRow(playbackIndicatorIndex) } } - }, [playbackIndicatorIndex, syncScroll]) + }, [playbackIndicatorIndex]) const renderRow: ListRowRenderer = ({ index, key, parent, style }) => { return ( @@ -226,6 +224,22 @@ export function PlayerInspectorList(): JSX.Element { /> )} + {syncScrollPaused && ( +
+ { + if (listRef.current) { + listRef.current.scrollToRow(playbackIndicatorIndex) + } + // Tricky: Need to dely to make sure the row scrolled has finished + setTimeout(() => setSyncScrollPaused(false), 100) + }} + > + Sync scrolling + +
+ )}
) : tabsState[tab] === 'loading' ? (
diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorPreview.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorPreview.tsx deleted file mode 100644 index 0af3441533745..0000000000000 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorPreview.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import { IconDashboard, IconSearch, IconTerminal } from '@posthog/icons' -import clsx from 'clsx' -import { useValues } from 'kea' -import { IconUnverifiedEvent } from 'lib/lemon-ui/icons' - -import { SessionRecordingPlayerTab } from '~/types' - -import { sessionRecordingPlayerLogic } from '../sessionRecordingPlayerLogic' -import { playerInspectorLogic } from './playerInspectorLogic' - -const TabToIcon = { - [SessionRecordingPlayerTab.ALL]: IconSearch, - [SessionRecordingPlayerTab.EVENTS]: IconUnverifiedEvent, - [SessionRecordingPlayerTab.CONSOLE]: IconTerminal, - [SessionRecordingPlayerTab.NETWORK]: IconDashboard, -} - -export function PlayerInspectorPreview(props: { onClick: () => void }): JSX.Element { - const { logicProps } = useValues(sessionRecordingPlayerLogic) - const inspectorLogic = playerInspectorLogic(logicProps) - - const { tab } = useValues(inspectorLogic) - - const tabs = [ - SessionRecordingPlayerTab.ALL, - SessionRecordingPlayerTab.EVENTS, - SessionRecordingPlayerTab.CONSOLE, - SessionRecordingPlayerTab.NETWORK, - ] - - return ( -
- {tabs.map((tabId) => { - const TabIcon = TabToIcon[tabId] - return ( -
- -
- ) - })} -
- ) -} diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/PlayerInspectorListItem.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/PlayerInspectorListItem.tsx index 48066a4cdca9d..41b2e2929657e 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/PlayerInspectorListItem.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/PlayerInspectorListItem.tsx @@ -13,7 +13,7 @@ import useResizeObserver from 'use-resize-observer' import { SessionRecordingPlayerTab } from '~/types' import { IconWindow } from '../../icons' -import { playerSettingsLogic } from '../../playerSettingsLogic' +import { playerSettingsLogic, TimestampFormat } from '../../playerSettingsLogic' import { sessionRecordingPlayerLogic } from '../../sessionRecordingPlayerLogic' import { InspectorListItem, playerInspectorLogic } from '../playerInspectorLogic' import { ItemConsoleLog } from './ItemConsoleLog' @@ -68,7 +68,7 @@ export function PlayerInspectorListItem({ }): JSX.Element { const { logicProps } = useValues(sessionRecordingPlayerLogic) const { tab, durationMs, end, expandedItems, windowIds } = useValues(playerInspectorLogic(logicProps)) - const { timestampMode } = useValues(playerSettingsLogic) + const { timestampFormat } = useValues(playerSettingsLogic) const { seekToTime } = useActions(sessionRecordingPlayerLogic) const { setItemExpanded } = useActions(playerInspectorLogic(logicProps)) @@ -201,8 +201,13 @@ export function PlayerInspectorListItem({ {!isExpanded ? ( seekToEvent()}> - {timestampMode === 'absolute' ? ( - + {timestampFormat != TimestampFormat.Relative ? ( + ) : ( <> {item.timeInRecording < 0 ? ( diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts index 91bdd827c65b6..5428d7d78d976 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts @@ -159,7 +159,7 @@ export const playerInspectorLogic = kea([ connect((props: PlayerInspectorLogicProps) => ({ actions: [ playerSettingsLogic, - ['setTab', 'setMiniFilter', 'setSyncScroll', 'setSearchQuery'], + ['setTab', 'setMiniFilter', 'setSearchQuery'], eventUsageLogic, ['reportRecordingInspectorItemExpanded'], sessionRecordingDataLogic(props), @@ -210,13 +210,12 @@ export const playerInspectorLogic = kea([ }, ], - syncScrollingPaused: [ + syncScrollPaused: [ false, { setTab: () => false, setSyncScrollPaused: (_, { paused }) => paused, setItemExpanded: () => true, - setSyncScroll: () => false, }, ], })), diff --git a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts index b88bec2df3f5e..65829d1257afd 100644 --- a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts +++ b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts @@ -16,8 +16,9 @@ export type SharedListMiniFilter = { } export enum TimestampFormat { - Absolute = 'absolute', Relative = 'relative', + UTC = 'utc', + Device = 'device', } const MiniFilters: SharedListMiniFilter[] = [ @@ -182,15 +183,12 @@ export const playerSettingsLogic = kea([ setHideViewedRecordings: (hideViewedRecordings: boolean) => ({ hideViewedRecordings }), setAutoplayDirection: (autoplayDirection: AutoplayDirection) => ({ autoplayDirection }), setTab: (tab: SessionRecordingPlayerTab) => ({ tab }), - setTimestampMode: (mode: 'absolute' | 'relative') => ({ mode }), setMiniFilter: (key: string, enabled: boolean) => ({ key, enabled }), setSearchQuery: (search: string) => ({ search }), - setSyncScroll: (enabled: boolean) => ({ enabled }), setDurationTypeToShow: (type: DurationType) => ({ type }), setShowFilters: (showFilters: boolean) => ({ showFilters }), setPrefersAdvancedFilters: (prefersAdvancedFilters: boolean) => ({ prefersAdvancedFilters }), setQuickFilterProperties: (properties: string[]) => ({ properties }), - setShowRecordingListProperties: (enabled: boolean) => ({ enabled }), setTimestampFormat: (format: TimestampFormat) => ({ format }), }), reducers(() => ({ @@ -235,13 +233,6 @@ export const playerSettingsLogic = kea([ setSpeed: (_, { speed }) => speed, }, ], - showRecordingListProperties: [ - false, - { persist: true }, - { - setShowRecordingListProperties: (_, { enabled }) => enabled, - }, - ], timestampFormat: [ TimestampFormat.Relative as TimestampFormat, { persist: true }, @@ -287,14 +278,6 @@ export const playerSettingsLogic = kea([ }, ], - timestampMode: [ - 'relative' as 'absolute' | 'relative', - { persist: true }, - { - setTimestampMode: (_, { mode }) => mode, - }, - ], - selectedMiniFilters: [ ['all-automatic', 'console-all', 'events-all', 'performance-all'] as string[], { persist: true }, @@ -316,9 +299,8 @@ export const playerSettingsLogic = kea([ if (enabled) { if (selectedFilter.alone) { return false - } else { - return filterInTab.alone ? false : true } + return filterInTab.alone ? false : true } if (existingSelected !== key) { @@ -347,14 +329,6 @@ export const playerSettingsLogic = kea([ setSearchQuery: (_, { search }) => search || '', }, ], - - syncScroll: [ - true, - { persist: true }, - { - setSyncScroll: (_, { enabled }) => enabled, - }, - ], })), selectors({ diff --git a/frontend/src/scenes/session-recordings/player/playlist-popover/PlaylistPopover.tsx b/frontend/src/scenes/session-recordings/player/playlist-popover/PlaylistPopover.tsx index fe58d622bbf0d..bc504ee0dde4c 100644 --- a/frontend/src/scenes/session-recordings/player/playlist-popover/PlaylistPopover.tsx +++ b/frontend/src/scenes/session-recordings/player/playlist-popover/PlaylistPopover.tsx @@ -1,4 +1,4 @@ -import { IconPlus } from '@posthog/icons' +import { IconPin, IconPlus } from '@posthog/icons' import { LemonCheckbox, LemonDivider } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Form } from 'kea-forms' @@ -119,7 +119,7 @@ export function PlaylistPopoverButton(props: LemonButtonProps): JSX.Element { } > } + icon={} active={showPlaylistPopover} onClick={() => setShowPlaylistPopover(!showPlaylistPopover)} sideIcon={null} diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index fee0907f3c9e4..b628c57847528 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -162,10 +162,9 @@ export const deduplicateSnapshots = (snapshots: RecordingSnapshot[] | null): Rec if (seenHashes.has(key)) { return false - } else { - seenHashes.add(key) - return true } + seenHashes.add(key) + return true }) .sort((a, b) => a.timestamp - b.timestamp) } @@ -544,7 +543,10 @@ export const sessionRecordingDataLogic = kea([ const newSnapshotsCount = snapshots.length if ((cache.lastSnapshotsCount ?? newSnapshotsCount) === newSnapshotsCount) { - cache.lastSnapshotsUnchangedCount = (cache.lastSnapshotsUnchangedCount ?? 0) + 1 + // if we're getting no results from realtime polling we can increment faster + // so that we stop polling sooner + const increment = newSnapshotsCount === 0 ? 2 : 1 + cache.lastSnapshotsUnchangedCount = (cache.lastSnapshotsUnchangedCount ?? 0) + increment } else { cache.lastSnapshotsUnchangedCount = 0 } @@ -613,6 +615,7 @@ export const sessionRecordingDataLogic = kea([ values.sessionPlayerData, generateRecordingReportDurations(cache), SessionRecordingUsageType.LOADED, + values.sessionPlayerMetaData, 0 ) // Reset cache now that final usage report has been sent @@ -627,6 +630,7 @@ export const sessionRecordingDataLogic = kea([ values.sessionPlayerData, durations, SessionRecordingUsageType.VIEWED, + values.sessionPlayerMetaData, 0 ) await breakpoint(IS_TEST_MODE ? 1 : 10000) @@ -634,6 +638,7 @@ export const sessionRecordingDataLogic = kea([ values.sessionPlayerData, durations, SessionRecordingUsageType.ANALYZED, + values.sessionPlayerMetaData, 10 ) }, @@ -648,7 +653,7 @@ export const sessionRecordingDataLogic = kea([ } }, })), - selectors({ + selectors(({ cache }) => ({ sessionPlayerData: [ (s, p) => [ s.sessionPlayerMetaData, @@ -687,7 +692,9 @@ export const sessionRecordingDataLogic = kea([ snapshotsLoading: [ (s) => [s.snapshotSourcesLoading, s.snapshotsForSourceLoading], (snapshotSourcesLoading, snapshotsForSourceLoading): boolean => { - return snapshotSourcesLoading || snapshotsForSourceLoading + // if there's a realTimePollingTimeoutID, don't signal that we're loading + // we don't want the UI to flip to "loading" every time we poll + return !cache.realTimePollingTimeoutID && (snapshotSourcesLoading || snapshotsForSourceLoading) }, ], snapshotsLoaded: [(s) => [s.snapshotSources], (snapshotSources): boolean => !!snapshotSources], @@ -864,7 +871,7 @@ export const sessionRecordingDataLogic = kea([ }) }, ], - }), + })), afterMount(({ cache }) => { resetTimingsCache(cache) }), diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index d1bbf94be6fa6..33da549809246 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -444,9 +444,8 @@ export const sessionRecordingPlayerLogic = kea( if (isSkippingInactivity) { const secondsToSkip = ((currentSegment?.endTimestamp ?? 0) - (currentTimestamp ?? 0)) / 1000 return Math.max(50, secondsToSkip) - } else { - return speed } + return speed }, ], segmentForTimestamp: [ diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx index df7a1dd777a15..88e470cda6f64 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx @@ -1,28 +1,15 @@ -import { - IconBug, - IconCalendar, - IconCursorClick, - IconKeyboard, - IconMagicWand, - IconPinFilled, - IconTerminal, -} from '@posthog/icons' -import { LemonDivider, LemonDropdown, Link } from '@posthog/lemon-ui' +import { IconBug, IconCursorClick, IconKeyboard, IconPinFilled } from '@posthog/icons' import clsx from 'clsx' import { useValues } from 'kea' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { PropertyIcon } from 'lib/components/PropertyIcon' import { TZLabel } from 'lib/components/TZLabel' import { FEATURE_FLAGS } from 'lib/constants' -import { IconLink } from 'lib/lemon-ui/icons' -import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { colonDelimitedDuration } from 'lib/utils' import { countryCodeToName } from 'scenes/insights/views/WorldMap' import { DraggableToNotebook } from 'scenes/notebooks/AddToNotebook/DraggableToNotebook' -import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { asDisplay } from 'scenes/persons/person-utils' import { playerSettingsLogic } from 'scenes/session-recordings/player/playerSettingsLogic' import { urls } from 'scenes/urls' @@ -122,20 +109,17 @@ export interface PropertyIconsProps { export function PropertyIcons({ recordingProperties, loading, iconClassNames }: PropertyIconsProps): JSX.Element { return ( -
+
{loading ? ( -
- - -
+ ) : ( recordingProperties.map(({ property, value, label }) => ( -
+ - - {!value ? 'Not captured' : label || value} - -
+ )) )}
@@ -145,13 +129,11 @@ export function PropertyIcons({ recordingProperties, loading, iconClassNames }: function FirstURL(props: { startUrl: string | undefined }): JSX.Element { const firstPath = props.startUrl?.replace(/https?:\/\//g, '').split(/[?|#]/)[0] return ( -
- - - {firstPath} - + + + {firstPath} -
+ ) } @@ -187,196 +169,86 @@ export function SessionRecordingPreview({ isActive, onClick, pinned, - summariseFn, - sessionSummaryLoading, }: SessionRecordingPreviewProps): JSX.Element { const { orderBy } = useValues(sessionRecordingsPlaylistLogic) - const { durationTypeToShow, showRecordingListProperties } = useValues(playerSettingsLogic) - - const nodeLogic = useNotebookNode() - const inNotebook = !!nodeLogic - - const countryCode = recording.person?.properties['$geoip_country_code'] - const iconClassnames = 'text-base text-muted-alt' - - const innerContent = ( -
onClick?.()} - > -
-
-
-
- {countryCode && ( - - - - )} - {asDisplay(recording.person)} -
-
- -
- - -
- -
- + const { durationTypeToShow } = useValues(playerSettingsLogic) - {orderBy === 'console_error_count' ? ( - - ) : ( - - )} -
-
- -
- {!recording.viewed ? : null} - {pinned ? : null} -
-
- ) - - return ( - - {showRecordingListProperties && !inNotebook ? ( - - } - closeOnClickInside={false} - > - {innerContent} - - ) : ( - innerContent - )} - - ) -} - -function SessionRecordingPreviewPopover({ - recording, - summariseFn, - sessionSummaryLoading, -}: { - recording: SessionRecordingType - summariseFn?: (recording: SessionRecordingType) => void - sessionSummaryLoading?: boolean -}): JSX.Element { const { recordingPropertiesById, recordingPropertiesLoading } = useValues(sessionRecordingsListPropertiesLogic) const recordingProperties = recordingPropertiesById[recording.id] const loading = !recordingProperties && recordingPropertiesLoading const iconProperties = gatherIconProperties(recordingProperties, recording) - const iconClassNames = 'text-muted-alt mr-2 shrink-0' + const iconClassNames = 'text-muted-alt shrink-0' return ( -
-
-

Session data

- -
- - -
- - - {recording.start_url} - -
+ +
onClick?.()} + > +
+
+
+ {asDisplay(recording.person)} +
-
-
-
-
- - -
-

Activity

+
+
+ + +
+ + + + {recording.click_count} + + + + + + {recording.keypress_count} + + +
+
-
-
- - {recording.click_count} clicks -
-
- - {recording.keypress_count} key presses -
-
- - {recording.console_error_count} console errors + {orderBy === 'console_error_count' ? ( + + ) : ( + + )}
+ +
-
- - {summariseFn && ( - <> - -
- {recording.summary ? ( - {recording.summary} - ) : ( -
- } - onClick={(e) => { - e.preventDefault() - e.stopPropagation() - if (!recording.summary) { - summariseFn(recording) - } - }} - loading={sessionSummaryLoading} - > - Generate AI summary - -
- )} -
- - )} -
-
+
+ {!recording.viewed ? : null} + {pinned ? : null} +
+
+
) } diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss index ea0f5d0d4fc07..ed2f90dc11186 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss @@ -12,15 +12,19 @@ border-radius: var(--radius); .SessionRecordingsPlaylist__list { + position: relative; display: flex; flex-direction: column; flex-shrink: 0; - width: 25%; - min-width: 300px; - max-width: 350px; height: 100%; overflow: hidden; + &:not(.SessionRecordingsPlaylist__list--collapsed) { + width: 25%; + min-width: 305px; + max-width: 350px; + } + .text-link { color: var(--default); } diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index 21eae025a4b42..025a50b38c45d 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -1,31 +1,29 @@ import './SessionRecordingsPlaylist.scss' -import { IconFilter, IconGear } from '@posthog/icons' +import { IconCollapse, IconFilter, IconGear } from '@posthog/icons' import { LemonButton, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { range } from 'd3' import { BindLogic, useActions, useValues } from 'kea' import { EmptyMessage } from 'lib/components/EmptyMessage/EmptyMessage' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' +import { Resizer } from 'lib/components/Resizer/Resizer' import { FEATURE_FLAGS } from 'lib/constants' -import { useKeyboardHotkeys } from 'lib/hooks/useKeyboardHotkeys' import { useResizeBreakpoints } from 'lib/hooks/useResizeObserver' -import { IconWithCount } from 'lib/lemon-ui/icons' +import { IconChevronRight, IconWithCount } from 'lib/lemon-ui/icons' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonTableLoader } from 'lib/lemon-ui/LemonTable/LemonTableLoader' import { Spinner } from 'lib/lemon-ui/Spinner' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import React, { useEffect, useRef, useState } from 'react' +import React, { useEffect, useRef } from 'react' import { DraggableToNotebook } from 'scenes/notebooks/AddToNotebook/DraggableToNotebook' import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { urls } from 'scenes/urls' -import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { ReplayTabs, SessionRecordingType } from '~/types' import { SessionRecordingsFilters } from '../filters/SessionRecordingsFilters' -import { playerSettingsLogic } from '../player/playerSettingsLogic' import { SessionRecordingPlayer } from '../player/SessionRecordingPlayer' import { SessionRecordingPreview, SessionRecordingPreviewSkeleton } from './SessionRecordingPreview' import { @@ -118,8 +116,7 @@ function RecordingsLists(): JSX.Element { logicProps, showOtherRecordings, recordingsCount, - sessionSummaryLoading, - sessionBeingSummarized, + isRecordingsListCollapsed, } = useValues(sessionRecordingsPlaylistLogic) const { setSelectedRecordingId, @@ -130,32 +127,15 @@ function RecordingsLists(): JSX.Element { setShowSettings, resetFilters, toggleShowOtherRecordings, - summarizeSession, + toggleRecordingsListCollapsed, } = useActions(sessionRecordingsPlaylistLogic) - const { showRecordingListProperties } = useValues(playerSettingsLogic) - const { setShowRecordingListProperties } = useActions(playerSettingsLogic) const onRecordingClick = (recording: SessionRecordingType): void => { setSelectedRecordingId(recording.id) } - const onSummarizeClick = (recording: SessionRecordingType): void => { - summarizeSession(recording.id) - } - const lastScrollPositionRef = useRef(0) const contentRef = useRef(null) - const [isHovering, setIsHovering] = useState(null) - - useKeyboardHotkeys( - { - p: { - action: () => setShowRecordingListProperties(!showRecordingListProperties), - disabled: !isHovering, - }, - }, - [isHovering] - ) const handleScroll = (e: React.UIEvent): void => { // If we are scrolling down then check if we are at the bottom of the list @@ -184,11 +164,20 @@ function RecordingsLists(): JSX.Element { const notebookNode = useNotebookNode() - return ( -
+ return isRecordingsListCollapsed ? ( +
+ } onClick={() => toggleRecordingsListCollapsed()} /> +
+ ) : ( +
- + } + onClick={() => toggleRecordingsListCollapsed()} + /> + {!notebookNode ? ( Recordings @@ -258,11 +247,7 @@ function RecordingsLists(): JSX.Element { ) : null} {pinnedRecordings.length || otherRecordings.length ? ( -
    setIsHovering(true)} onMouseLeave={() => setIsHovering(false)}> -
    - Hint: Hover list and press to preview -
    - +
      {pinnedRecordings.length ? ( @@ -283,10 +268,6 @@ function RecordingsLists(): JSX.Element { onClick={() => onRecordingClick(rec)} isActive={activeSessionRecordingId === rec.id} pinned={false} - summariseFn={onSummarizeClick} - sessionSummaryLoading={ - sessionSummaryLoading && sessionBeingSummarized === rec.id - } />
)) @@ -355,9 +336,16 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr ...props, autoPlay: props.autoPlay ?? true, } + const playlistRecordingsListRef = useRef(null) const logic = sessionRecordingsPlaylistLogic(logicProps) - const { activeSessionRecording, activeSessionRecordingId, matchingEventsMatchType, pinnedRecordings } = - useValues(logic) + const { + activeSessionRecording, + activeSessionRecordingId, + matchingEventsMatchType, + pinnedRecordings, + isRecordingsListCollapsed, + } = useValues(logic) + const { toggleRecordingsListCollapsed } = useActions(logic) const { ref: playlistRef, size } = useResizeBreakpoints({ 0: 'small', @@ -377,8 +365,22 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr 'SessionRecordingsPlaylist--embedded': notebookNode, })} > -
+
+ toggleRecordingsListCollapsed(shouldBeClosed)} + onDoubleClick={() => toggleRecordingsListCollapsed()} + />
{activeSessionRecordingId ? ( diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts index 8a37208f27945..0b032a2f5f1e0 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts @@ -12,6 +12,7 @@ const mockSessons: SessionRecordingType[] = [ end_time: '2021-01-01T01:00:00Z', viewed: false, recording_duration: 0, + snapshot_source: 'web', }, { id: 's2', @@ -19,6 +20,7 @@ const mockSessons: SessionRecordingType[] = [ end_time: '2021-01-01T03:00:00Z', viewed: false, recording_duration: 0, + snapshot_source: 'mobile', }, { @@ -27,6 +29,7 @@ const mockSessons: SessionRecordingType[] = [ end_time: '2021-01-01T04:00:00Z', viewed: false, recording_duration: 0, + snapshot_source: 'unknown', }, ] diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index 5a12f9018eba7..41792b8d7678d 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -161,6 +161,7 @@ export const sessionRecordingsPlaylistLogic = kea ({ show }), + toggleRecordingsListCollapsed: (override?: boolean) => ({ override }), }), propsChanged(({ actions, props }, oldProps) => { if (!objectsEqual(props.advancedFilters, oldProps.advancedFilters)) { @@ -388,9 +389,8 @@ export const sessionRecordingsPlaylistLogic = kea { @@ -401,9 +401,8 @@ export const sessionRecordingsPlaylistLogic = kea false, }, ], + isRecordingsListCollapsed: [ + false, + { persist: true }, + { + toggleRecordingsListCollapsed: (state, { override }) => override ?? !state, + }, + ], })), listeners(({ props, actions, values }) => ({ loadAllRecordings: () => { @@ -506,23 +512,21 @@ export const sessionRecordingsPlaylistLogic = kea, }, + { + id: 'heatmaps', + title: 'Heatmaps', + component: , + flag: 'TOOLBAR_HEATMAPS', + }, { id: 'exception-autocapture', title: 'Exception autocapture', diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/__snapshots__/verifiedDomainsLogic.test.ts.snap b/frontend/src/scenes/settings/organization/VerifiedDomains/__snapshots__/verifiedDomainsLogic.test.ts.snap index daaa4909e71ff..af6b859c4c284 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/__snapshots__/verifiedDomainsLogic.test.ts.snap +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/__snapshots__/verifiedDomainsLogic.test.ts.snap @@ -58,6 +58,7 @@ exports[`verifiedDomainsLogic values has proper defaults 1`] = ` "effective_membership_level": 8, "groups_on_events_querying_enabled": true, "has_group_types": true, + "heatmaps_opt_in": true, "id": 997, "ingested_event": true, "is_demo": false, diff --git a/frontend/src/scenes/settings/project/HeatmapsSettings.tsx b/frontend/src/scenes/settings/project/HeatmapsSettings.tsx new file mode 100644 index 0000000000000..c01af088749cc --- /dev/null +++ b/frontend/src/scenes/settings/project/HeatmapsSettings.tsx @@ -0,0 +1,40 @@ +import { LemonSwitch } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { teamLogic } from 'scenes/teamLogic' +import { userLogic } from 'scenes/userLogic' + +export function HeatmapsSettings(): JSX.Element { + const { userLoading } = useValues(userLogic) + const { currentTeam } = useValues(teamLogic) + const { updateCurrentTeam } = useActions(teamLogic) + const { reportHeatmapsToggled } = useActions(eventUsageLogic) + + return ( + <> +

+ If you use our JavaScript libraries, we can capture general clicks, mouse movements, and scrolling to + create heatmaps. No additional events are created, and you can disable this at any time. +
+ Whereas Autocapture creates events whenever it can uniquely identify an interacted element, heatmaps are + generated based on overall mouse or touch positions and are useful for understanding more general user + behavior across your site. +

+
+ { + updateCurrentTeam({ + heatmaps_opt_in: checked, + }) + reportHeatmapsToggled(checked) + }} + checked={!!currentTeam?.heatmaps_opt_in} + disabled={userLoading} + label="Enable heatmaps for web" + bordered + /> +
+ + ) +} diff --git a/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts b/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts index 0616394c5ef24..e6ea096cdf263 100644 --- a/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts +++ b/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts @@ -31,9 +31,8 @@ export const filterTestAccountsDefaultsLogic = kea([ const response = await api.create('api/user/test_slack_webhook', { webhook }) if (response.success) { return webhook - } else { - actions.testWebhookFailure(response.error) } + actions.testWebhookFailure(response.error) } catch (error: any) { actions.testWebhookFailure(error.message) } diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index b90b93a220be7..e55a475096aef 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -75,6 +75,7 @@ export type SettingId = | 'optout' | 'theme' | 'replay-ai-config' + | 'heatmaps' export type Setting = { id: SettingId diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index 95c2ca6df47cf..cb04b053d5a6d 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -34,8 +34,16 @@ import { export function SurveyView({ id }: { id: string }): JSX.Element { const { survey, surveyLoading, selectedQuestion, targetingFlagFilters } = useValues(surveyLogic) - const { editingSurvey, updateSurvey, launchSurvey, stopSurvey, archiveSurvey, resumeSurvey, setSelectedQuestion } = - useActions(surveyLogic) + const { + editingSurvey, + updateSurvey, + launchSurvey, + stopSurvey, + archiveSurvey, + resumeSurvey, + setSelectedQuestion, + duplicateSurvey, + } = useActions(surveyLogic) const { deleteSurvey } = useActions(surveysLogic) const [tabKey, setTabKey] = useState(survey.start_date ? 'results' : 'overview') @@ -43,6 +51,8 @@ export function SurveyView({ id }: { id: string }): JSX.Element { useEffect(() => { if (survey.start_date) { setTabKey('results') + } else { + setTabKey('overview') } }, [survey.start_date]) @@ -66,10 +76,21 @@ export function SurveyView({ id }: { id: string }): JSX.Element { > Edit + + Duplicate + {survey.end_date && !survey.archived && ( - archiveSurvey()} fullWidth> + archiveSurvey()} + fullWidth + > Archive )} @@ -101,7 +122,12 @@ export function SurveyView({ id }: { id: string }): JSX.Element { ) : ( !survey.archived && ( - stopSurvey()}> + stopSurvey()} + > Stop ) diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx index cc90337a44947..362f0e3907e36 100644 --- a/frontend/src/scenes/surveys/Surveys.tsx +++ b/frontend/src/scenes/surveys/Surveys.tsx @@ -62,7 +62,7 @@ export function Surveys(): JSX.Element { const { user } = useValues(userLogic) - const [tab, setSurveyTab] = useState(SurveysTabs.Active) + const [tab, setSurveyTab] = useState(filters.archived ? SurveysTabs.Archived : SurveysTabs.Active) const shouldShowEmptyState = !surveysLoading && surveys.length === 0 return ( @@ -368,7 +368,7 @@ export function StatusTag({ survey }: { survey: Survey }): JSX.Element { } as Record const status = getSurveyStatus(survey) return ( - + {status.toUpperCase()} ) diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index 557f19289d976..cbe549fff5a6c 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -87,6 +87,19 @@ export interface QuestionResultsReady { const getResponseField = (i: number): string => (i === 0 ? '$survey_response' : `$survey_response_${i}`) +function duplicateExistingSurvey(survey: Survey | NewSurvey): Partial { + return { + ...survey, + id: NEW_SURVEY.id, + name: `${survey.name} (copy)`, + archived: false, + start_date: null, + end_date: null, + targeting_flag_filters: survey.targeting_flag?.filters ?? NEW_SURVEY.targeting_flag_filters, + linked_flag_id: survey.linked_flag?.id ?? NEW_SURVEY.linked_flag_id, + } +} + export const surveyLogic = kea([ props({} as SurveyLogicProps), key(({ id }) => id), @@ -171,6 +184,26 @@ export const surveyLogic = kea([ return await api.surveys.update(props.id, { end_date: null }) }, }, + duplicatedSurvey: { + duplicateSurvey: async () => { + const { survey } = values + const payload = duplicateExistingSurvey(survey) + const createdSurvey = await api.surveys.create(sanitizeQuestions(payload)) + + lemonToast.success('Survey duplicated.', { + toastId: `survey-duplicated-${createdSurvey.id}`, + button: { + label: 'View Survey', + action: () => { + router.actions.push(urls.survey(createdSurvey.id)) + }, + }, + }) + + actions.reportSurveyCreated(createdSurvey, true) + return survey + }, + }, surveyUserStats: { loadSurveyUserStats: async (): Promise => { const { survey } = values @@ -413,6 +446,9 @@ export const surveyLogic = kea([ actions.reportSurveyEdited(survey) actions.loadSurveys() }, + duplicateSurveySuccess: () => { + actions.loadSurveys() + }, launchSurveySuccess: ({ survey }) => { lemonToast.success(<>Survey {survey.name} launched) actions.loadSurveys() diff --git a/frontend/src/scenes/teamActivityDescriber.tsx b/frontend/src/scenes/teamActivityDescriber.tsx index a4b2da80d5191..25ef21457a4e4 100644 --- a/frontend/src/scenes/teamActivityDescriber.tsx +++ b/frontend/src/scenes/teamActivityDescriber.tsx @@ -150,7 +150,10 @@ const teamActionsMapping: Record< autocapture_exceptions_errors_to_ignore: () => null, autocapture_exceptions_opt_in: () => null, autocapture_opt_out(change: ActivityChange | undefined): ChangeMapping | null { - return { description: [<>{change?.after ? 'enabled' : 'disabled'} autocapture] } + return { description: [<>{change?.after ? 'opted in to' : 'opted out of'} autocapture] } + }, + heatmaps_opt_in(change: ActivityChange | undefined): ChangeMapping | null { + return { description: [<>{change?.after ? 'enabled' : 'disabled'} heatmaps] } }, // and.... many more name(change: ActivityChange | undefined): ChangeMapping | null { diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx index 0fd8a8b8cc931..9ee01c3e8c988 100644 --- a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx +++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx @@ -201,9 +201,9 @@ export function PersonsModal({
{errorObject ? ( validationError ? ( - + ) : ( - + ) ) : actors && actors.length > 0 ? ( <> diff --git a/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts b/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts index dbe81266fb7db..26bac72e45cab 100644 --- a/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts +++ b/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts @@ -136,25 +136,24 @@ export const personsModalLogic = kea([ group[field] = result[additionalFieldIndices[index]] }) return group - } else { - const person: PersonActorType = { - type: 'person', - id: result[0].id, - uuid: result[0].id, - distinct_ids: result[0].distinct_ids, - is_identified: result[0].is_identified, - properties: result[0].properties, - created_at: result[0].created_at, - matched_recordings: [], - value_at_data_point: null, - } + } + const person: PersonActorType = { + type: 'person', + id: result[0].id, + uuid: result[0].id, + distinct_ids: result[0].distinct_ids, + is_identified: result[0].is_identified, + properties: result[0].properties, + created_at: result[0].created_at, + matched_recordings: [], + value_at_data_point: null, + } - Object.keys(props.additionalSelect || {}).forEach((field, index) => { - person[field] = result[additionalFieldIndices[index]] - }) + Object.keys(props.additionalSelect || {}).forEach((field, index) => { + person[field] = result[additionalFieldIndices[index]] + }) - return person - } + return person }), }, ], diff --git a/frontend/src/scenes/trends/trendsDataLogic.ts b/frontend/src/scenes/trends/trendsDataLogic.ts index 49e4a05f022cb..cd85bbcb204a1 100644 --- a/frontend/src/scenes/trends/trendsDataLogic.ts +++ b/frontend/src/scenes/trends/trendsDataLogic.ts @@ -91,9 +91,8 @@ export const trendsDataLogic = kea([ (insightData: TrendAPIResponse | null): TrendResult[] => { if (insightData?.result && Array.isArray(insightData.result)) { return insightData.result - } else { - return [] } + return [] }, ], @@ -175,9 +174,8 @@ export const trendsDataLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - results[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx index 281a3726984ad..a8dc5b7a80928 100644 --- a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx +++ b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx @@ -6,17 +6,11 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect, useState } from 'react' import { insightLogic } from 'scenes/insights/insightLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { - BREAKDOWN_NULL_DISPLAY, - BREAKDOWN_OTHER_DISPLAY, - formatBreakdownLabel, - isNullBreakdown, - isOtherBreakdown, -} from 'scenes/insights/utils' +import { formatBreakdownLabel } from 'scenes/insights/utils' +import { datasetToActorsQuery } from 'scenes/trends/viz/datasetToActorsQuery' import { cohortsModel } from '~/models/cohortsModel' import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' -import { NodeKind } from '~/queries/schema' import { isInsightVizNode, isTrendsQuery } from '~/queries/utils' import { ChartParams, GraphType } from '~/types' @@ -47,17 +41,12 @@ export function ActionsHorizontalBar({ showPersonsModal = true }: ChartParams): setData([ { - labels: _data.map((item) => - isOtherBreakdown(item.label) - ? BREAKDOWN_OTHER_DISPLAY - : isNullBreakdown(item.label) - ? BREAKDOWN_NULL_DISPLAY - : item.label - ), + labels: _data.map((item) => item.label), data: _data.map((item) => item.aggregated_value), actions: _data.map((item) => item.action), personsValues: _data.map((item) => item.persons), - breakdownValues: _data.map((item) => { + breakdownValues: _data.map((item) => item.breakdown_value), + breakdownLabels: _data.map((item) => { return formatBreakdownLabel( cohorts, formatPropertyValueForDisplay, @@ -121,10 +110,7 @@ export function ActionsHorizontalBar({ showPersonsModal = true }: ChartParams): if (isTrendsQueryWithFeatureFlagOn) { openPersonsModal({ title: label || '', - query: { - kind: NodeKind.InsightActorsQuery, - source: query.source, - }, + query: datasetToActorsQuery({ dataset, query: query.source, index }), additionalSelect: { value_at_data_point: 'event_count', matched_recordings: 'matched_recordings', diff --git a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx index c6c348d5bedbf..bcee7d21580a8 100644 --- a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx +++ b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx @@ -6,10 +6,10 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { capitalizeFirstLetter, isMultiSeriesFormula } from 'lib/utils' import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightLogic } from 'scenes/insights/insightLogic' +import { datasetToActorsQuery } from 'scenes/trends/viz/datasetToActorsQuery' import { cohortsModel } from '~/models/cohortsModel' import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' -import { NodeKind } from '~/queries/schema' import { isInsightVizNode, isLifecycleQuery, isStickinessQuery, isTrendsQuery } from '~/queries/utils' import { ChartDisplayType, ChartParams, GraphType } from '~/types' @@ -143,15 +143,7 @@ export function ActionsLineGraph({ ) { openPersonsModal({ title, - query: { - kind: NodeKind.InsightActorsQuery, - source: query.source, - day, - status: dataset.status, - series: dataset.action?.order ?? 0, - breakdown: dataset.breakdown_value, - compare: dataset.compare_label, - }, + query: datasetToActorsQuery({ dataset, query: query.source, day }), additionalSelect: isLifecycle ? {} : { diff --git a/frontend/src/scenes/trends/viz/ActionsPie.tsx b/frontend/src/scenes/trends/viz/ActionsPie.tsx index 36ff7166bc574..480939739a228 100644 --- a/frontend/src/scenes/trends/viz/ActionsPie.tsx +++ b/frontend/src/scenes/trends/viz/ActionsPie.tsx @@ -12,10 +12,10 @@ import { insightLogic } from 'scenes/insights/insightLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { formatBreakdownLabel } from 'scenes/insights/utils' import { PieChart } from 'scenes/insights/views/LineGraph/PieChart' +import { datasetToActorsQuery } from 'scenes/trends/viz/datasetToActorsQuery' import { cohortsModel } from '~/models/cohortsModel' import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' -import { NodeKind } from '~/queries/schema' import { isInsightVizNode, isTrendsQuery } from '~/queries/utils' import { ChartDisplayType, ChartParams, GraphDataset, GraphType } from '~/types' @@ -74,7 +74,8 @@ export function ActionsPie({ labels: _data.map((item) => item.label), data: _data.map((item) => item.aggregated_value), actions: _data.map((item) => item.action), - breakdownValues: _data.map((item) => { + breakdownValues: _data.map((item) => item.breakdown_value), + breakdownLabels: _data.map((item) => { return formatBreakdownLabel( cohorts, formatPropertyValueForDisplay, @@ -84,6 +85,7 @@ export function ActionsPie({ false ) }), + compareLabels: _data.map((item) => item.compare_label), personsValues: _data.map((item) => item.persons), days, backgroundColor: colorList, @@ -114,10 +116,7 @@ export function ActionsPie({ if (isTrendsQueryWithFeatureFlagOn) { openPersonsModal({ title: label || '', - query: { - kind: NodeKind.InsightActorsQuery, - source: query.source, - }, + query: datasetToActorsQuery({ dataset, query: query.source, index }), additionalSelect: { value_at_data_point: 'event_count', matched_recordings: 'matched_recordings', diff --git a/frontend/src/scenes/trends/viz/datasetToActorsQuery.ts b/frontend/src/scenes/trends/viz/datasetToActorsQuery.ts new file mode 100644 index 0000000000000..eea722bca5222 --- /dev/null +++ b/frontend/src/scenes/trends/viz/datasetToActorsQuery.ts @@ -0,0 +1,27 @@ +import { InsightActorsQuery, NodeKind } from '~/queries/schema' +import { GraphDataset } from '~/types' + +interface DatasetToActorsQueryProps { + dataset: GraphDataset + query: InsightActorsQuery['source'] + day?: string | number + index?: number +} + +export function datasetToActorsQuery({ query, dataset, day, index }: DatasetToActorsQueryProps): InsightActorsQuery { + const breakdown = + dataset.breakdown_value ?? + (index !== undefined && Array.isArray(dataset.breakdownValues) ? dataset.breakdownValues[index] : undefined) + const compare = + dataset.compare_label ?? + (index !== undefined && Array.isArray(dataset.compareLabels) ? dataset.compareLabels[index] : undefined) + return { + kind: NodeKind.InsightActorsQuery, + source: query, + day, + status: dataset.status, + series: dataset.action?.order ?? 0, + breakdown, + compare, + } +} diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 38cf07aa323d6..31d9426582545 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -116,7 +116,8 @@ export const urls = { personByUUID: (uuid: string, encode: boolean = true): string => encode ? `/persons/${encodeURIComponent(uuid)}` : `/persons/${uuid}`, persons: (): string => '/persons', - // TODO: Default to the landing page, once it's ready + pipelineNodeNew: (stage: PipelineStage | ':stage', pluginIdOrBatchExportDestination?: string | number): string => + `/pipeline/new/${stage}${pluginIdOrBatchExportDestination ? `/${pluginIdOrBatchExportDestination}` : ''}`, pipeline: (tab?: PipelineTab | ':tab'): string => `/pipeline/${tab ? tab : PipelineTab.Overview}`, /** @param id 'new' for new, uuid for batch exports and numbers for plugins */ pipelineNode: ( diff --git a/frontend/src/scenes/web-analytics/WebPropertyFilters.tsx b/frontend/src/scenes/web-analytics/WebPropertyFilters.tsx index 0f65abec6f0c9..5237a78e3cb8b 100644 --- a/frontend/src/scenes/web-analytics/WebPropertyFilters.tsx +++ b/frontend/src/scenes/web-analytics/WebPropertyFilters.tsx @@ -1,6 +1,9 @@ +import { useValues } from 'kea' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' -import { isEventPropertyOrPersonPropertyFilter } from 'lib/components/PropertyFilters/utils' +import { isEventPersonOrSessionPropertyFilter } from 'lib/components/PropertyFilters/utils' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { WebAnalyticsPropertyFilters } from '~/queries/schema' @@ -11,42 +14,23 @@ export const WebPropertyFilters = ({ webAnalyticsFilters: WebAnalyticsPropertyFilters setWebAnalyticsFilters: (filters: WebAnalyticsPropertyFilters) => void }): JSX.Element => { + const { featureFlags } = useValues(featureFlagLogic) + return ( setWebAnalyticsFilters(filters.filter(isEventPropertyOrPersonPropertyFilter))} + taxonomicGroupTypes={ + featureFlags[FEATURE_FLAGS.SESSION_TABLE_PROPERTY_FILTERS] + ? [ + TaxonomicFilterGroupType.SessionProperties, + TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.PersonProperties, + ] + : [TaxonomicFilterGroupType.EventProperties, TaxonomicFilterGroupType.PersonProperties] + } + onChange={(filters) => setWebAnalyticsFilters(filters.filter(isEventPersonOrSessionPropertyFilter))} propertyFilters={webAnalyticsFilters} pageKey="web-analytics" eventNames={['$pageview', '$pageleave', '$autocapture']} - propertyAllowList={{ - [TaxonomicFilterGroupType.EventProperties]: [ - '$pathname', - '$host', - '$browser', - '$os', - '$device_type', - '$geoip_country_code', - '$geoip_subdivision_1_code', - '$geoip_city_name', - // re-enable after https://github.com/PostHog/posthog-js/pull/875 is merged - // '$client_session_initial_pathname', - // '$client_session_initial_referring_host', - // '$client_session_initial_utm_source', - // '$client_session_initial_utm_campaign', - // '$client_session_initial_utm_medium', - // '$client_session_initial_utm_content', - // '$client_session_initial_utm_term', - ], - [TaxonomicFilterGroupType.PersonProperties]: [ - '$initial_pathname', - '$initial_referring_domain', - '$initial_utm_source', - '$initial_utm_campaign', - '$initial_utm_medium', - '$initial_utm_content', - '$initial_utm_term', - ], - }} /> ) } diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index f8056c88ea228..1b386ac2e2d0c 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -271,17 +271,16 @@ export const webAnalyticsLogic = kea([ } as const }) .filter(isNotNil) - } else { - // no matching property, so add one - const newFilter: WebAnalyticsPropertyFilter = { - type, - key, - value, - operator: PropertyOperator.Exact, - } - - return [...oldPropertyFilters, newFilter] } + // no matching property, so add one + const newFilter: WebAnalyticsPropertyFilter = { + type, + key, + value, + operator: PropertyOperator.Exact, + } + + return [...oldPropertyFilters, newFilter] }, setStateFromUrl: (_, { state }) => state.filters, }, @@ -1012,9 +1011,8 @@ export const webAnalyticsLogic = kea([ limit: 50, }, } - } else { - return query } + return query } if (tabId) { @@ -1040,22 +1038,21 @@ export const webAnalyticsLogic = kea([ query: extendQuery(tab.query), canOpenInsight: tab.canOpenInsight, } - } else { - if ('tabs' in tile) { - throw new Error('Developer Error, tabId not provided for tab tile') - } - return { - tileId, - title: tile.title, - showIntervalSelect: tile.showIntervalSelect, - showPathCleaningControls: tile.showPathCleaningControls, - insightProps: { - dashboardItemId: getDashboardItemId(tileId, undefined, true), - loadPriority: 0, - dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID, - }, - query: extendQuery(tile.query), - } + } + if ('tabs' in tile) { + throw new Error('Developer Error, tabId not provided for tab tile') + } + return { + tileId, + title: tile.title, + showIntervalSelect: tile.showIntervalSelect, + showPathCleaningControls: tile.showPathCleaningControls, + insightProps: { + dashboardItemId: getDashboardItemId(tileId, undefined, true), + loadPriority: 0, + dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID, + }, + query: extendQuery(tile.query), } }, ], @@ -1115,16 +1112,15 @@ export const webAnalyticsLogic = kea([ null, formatQueryForNewInsight(tab.query) ) - } else { - if ('tabs' in tile) { - throw new Error('Developer Error, tabId not provided for tab tile') - } - return urls.insightNew( - { properties: webAnalyticsFilters, date_from: dateFrom, date_to: dateTo }, - null, - formatQueryForNewInsight(tile.query) - ) } + if ('tabs' in tile) { + throw new Error('Developer Error, tabId not provided for tab tile') + } + return urls.insightNew( + { properties: webAnalyticsFilters, date_from: dateFrom, date_to: dateTo }, + null, + formatQueryForNewInsight(tile.query) + ) } }, ], diff --git a/frontend/src/test/mocks.ts b/frontend/src/test/mocks.ts index dcd926d4f1e7a..78cba1619bfb8 100644 --- a/frontend/src/test/mocks.ts +++ b/frontend/src/test/mocks.ts @@ -51,7 +51,7 @@ export const mockEventDefinitions: EventDefinition[] = [ 'test event', '$click', '$autocapture', - 'search', + 'search term', 'other event', ...Array(150), ].map((name, index) => ({ @@ -89,6 +89,15 @@ export const mockEventPropertyDefinitions: PropertyDefinition[] = [ is_seen_on_filtered_events: (name || '').includes('$'), })) +export const mockSessionPropertyDefinitions: PropertyDefinition[] = ['$session_duration', '$initial_utm_source'].map( + (name) => ({ + ...mockEventPropertyDefinition, + id: name, + name: name, + description: `${name} is the best!`, + }) +) + export const mockPersonProperty = { name: '$browser_version', count: 1, diff --git a/frontend/src/toolbar/actions/ActionsEditingToolbarMenu.tsx b/frontend/src/toolbar/actions/ActionsEditingToolbarMenu.tsx index 46120554d4600..b60b0a381d785 100644 --- a/frontend/src/toolbar/actions/ActionsEditingToolbarMenu.tsx +++ b/frontend/src/toolbar/actions/ActionsEditingToolbarMenu.tsx @@ -1,4 +1,4 @@ -import { IconPencil, IconPlus, IconSearch, IconTrash, IconX } from '@posthog/icons' +import { IconPencil, IconPlus, IconSearch, IconTrash } from '@posthog/icons' import { LemonDivider, LemonTag } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Field, Form, Group } from 'kea-forms' @@ -9,7 +9,7 @@ import { actionsTabLogic } from '~/toolbar/actions/actionsTabLogic' import { SelectorEditingModal } from '~/toolbar/actions/SelectorEditingModal' import { StepField } from '~/toolbar/actions/StepField' import { ToolbarMenu } from '~/toolbar/bar/ToolbarMenu' -import { posthog } from '~/toolbar/posthog' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' export const ActionsEditingToolbarMenu = (): JSX.Element => { const { @@ -38,7 +38,7 @@ export const ActionsEditingToolbarMenu = (): JSX.Element => { startingSelector={editingSelectorValue} onChange={(selector) => { if (selector && editingSelector !== null) { - posthog.capture('toolbar_manual_selector_applied', { + toolbarPosthogJS.capture('toolbar_manual_selector_applied', { chosenSelector: selector, }) setElementSelector(selector, editingSelector) @@ -52,7 +52,7 @@ export const ActionsEditingToolbarMenu = (): JSX.Element => { enableFormOnSubmit className="flex flex-col overflow-hidden flex-1" > - +

{selectedActionId === 'new' ? 'New ' : 'Edit '} action @@ -124,9 +124,12 @@ export const ActionsEditingToolbarMenu = (): JSX.Element => { icon={} onClick={(e) => { e.stopPropagation() - posthog.capture('toolbar_manual_selector_modal_opened', { - selector: step?.selector, - }) + toolbarPosthogJS.capture( + 'toolbar_manual_selector_modal_opened', + { + selector: step?.selector, + } + ) editSelectorWithIndex(index) }} > @@ -198,22 +201,25 @@ export const ActionsEditingToolbarMenu = (): JSX.Element => { - selectAction(null)} - sideIcon={} - > - Cancel - + {selectedActionId !== 'new' ? ( + } + size="small" + > + Delete + + ) : null} - + selectAction(null)}> + Cancel + + {selectedActionId === 'new' ? 'Create ' : 'Save '} action - {selectedActionId !== 'new' ? ( - } /> - ) : null} diff --git a/frontend/src/toolbar/actions/ActionsListView.tsx b/frontend/src/toolbar/actions/ActionsListView.tsx index fb999ed600949..db567ac44a03e 100644 --- a/frontend/src/toolbar/actions/ActionsListView.tsx +++ b/frontend/src/toolbar/actions/ActionsListView.tsx @@ -23,7 +23,7 @@ export function ActionsListView({ actions }: ActionsListViewProps): JSX.Element subtle key={action.id} onClick={() => selectAction(action.id || null)} - className="font-medium my-1" + className="font-medium my-1 w-full" > {index + 1}. diff --git a/frontend/src/toolbar/actions/actionsTabLogic.tsx b/frontend/src/toolbar/actions/actionsTabLogic.tsx index dc49fa26042e0..7988a5384b134 100644 --- a/frontend/src/toolbar/actions/actionsTabLogic.tsx +++ b/frontend/src/toolbar/actions/actionsTabLogic.tsx @@ -7,8 +7,8 @@ import { urls } from 'scenes/urls' import { actionsLogic } from '~/toolbar/actions/actionsLogic' import { toolbarLogic } from '~/toolbar/bar/toolbarLogic' -import { posthog } from '~/toolbar/posthog' import { toolbarConfigLogic } from '~/toolbar/toolbarConfigLogic' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' import { ActionDraftType, ActionForm } from '~/toolbar/types' import { actionStepToActionStepFormItem, elementToActionStep, stepToDatabaseFormat } from '~/toolbar/utils' import { ActionType, ElementType } from '~/types' @@ -292,11 +292,11 @@ export const actionsTabLogic = kea([ } }, showButtonActions: () => { - posthog.capture('toolbar mode triggered', { mode: 'actions', enabled: true }) + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'actions', enabled: true }) }, hideButtonActions: () => { actions.setShowActionsTooltip(false) - posthog.capture('toolbar mode triggered', { mode: 'actions', enabled: false }) + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'actions', enabled: false }) }, [actionsLogic.actionTypes.getActionsSuccess]: () => { const { userIntent, actionId } = values diff --git a/frontend/src/toolbar/bar/ToolbarMenu.tsx b/frontend/src/toolbar/bar/ToolbarMenu.tsx index 31f21fefdc6a2..bbb4bbbabbef9 100644 --- a/frontend/src/toolbar/bar/ToolbarMenu.tsx +++ b/frontend/src/toolbar/bar/ToolbarMenu.tsx @@ -1,15 +1,24 @@ -export function ToolbarMenu({ children }: { children: React.ReactNode }): JSX.Element { - return
{children}
+import clsx from 'clsx' + +export type ToolbarMenuProps = { + children: React.ReactNode + className?: string +} + +export function ToolbarMenu({ children, className }: ToolbarMenuProps): JSX.Element { + return
{children}
} -ToolbarMenu.Header = function ToolbarMenuHeader({ children }: { children: React.ReactNode }): JSX.Element { - return
{children}
+ToolbarMenu.Header = function ToolbarMenuHeader({ children, className }: ToolbarMenuProps): JSX.Element { + return
{children}
} -ToolbarMenu.Body = function ToolbarMenuBody({ children }: { children: React.ReactNode }): JSX.Element { - return
{children}
+ToolbarMenu.Body = function ToolbarMenuBody({ children, className }: ToolbarMenuProps): JSX.Element { + return ( +
{children}
+ ) } -ToolbarMenu.Footer = function ToolbarMenufooter({ children }: { children: React.ReactNode }): JSX.Element { - return
{children}
+ToolbarMenu.Footer = function ToolbarMenufooter({ children, className }: ToolbarMenuProps): JSX.Element { + return
{children}
} diff --git a/frontend/src/toolbar/debug/eventDebugMenuLogic.ts b/frontend/src/toolbar/debug/eventDebugMenuLogic.ts index 1b490687c3ad2..93a1f7587ee10 100644 --- a/frontend/src/toolbar/debug/eventDebugMenuLogic.ts +++ b/frontend/src/toolbar/debug/eventDebugMenuLogic.ts @@ -59,9 +59,8 @@ export const eventDebugMenuLogic = kea([ return events.filter((e) => { if (showRecordingSnapshots) { return true - } else { - return e.event !== '$snapshot' } + return e.event !== '$snapshot' }) }, ], diff --git a/frontend/src/toolbar/elements/HeatmapElement.tsx b/frontend/src/toolbar/elements/AutocaptureElement.tsx similarity index 88% rename from frontend/src/toolbar/elements/HeatmapElement.tsx rename to frontend/src/toolbar/elements/AutocaptureElement.tsx index 61a0d5db7d6f9..e6b8588fbedbd 100644 --- a/frontend/src/toolbar/elements/HeatmapElement.tsx +++ b/frontend/src/toolbar/elements/AutocaptureElement.tsx @@ -1,6 +1,6 @@ import { ElementRect } from '~/toolbar/types' -interface HeatmapElementProps { +interface AutocaptureElementProps { rect?: ElementRect style: Record onClick: (event: React.MouseEvent) => void @@ -8,13 +8,13 @@ interface HeatmapElementProps { onMouseOut: (event: React.MouseEvent) => void } -export function HeatmapElement({ +export function AutocaptureElement({ rect, style = {}, onClick, onMouseOver, onMouseOut, -}: HeatmapElementProps): JSX.Element | null { +}: AutocaptureElementProps): JSX.Element | null { if (!rect) { return null } diff --git a/frontend/src/toolbar/elements/HeatmapLabel.tsx b/frontend/src/toolbar/elements/AutocaptureElementLabel.tsx similarity index 87% rename from frontend/src/toolbar/elements/HeatmapLabel.tsx rename to frontend/src/toolbar/elements/AutocaptureElementLabel.tsx index 5b1a77d301afb..24b2aeb3f4d3e 100644 --- a/frontend/src/toolbar/elements/HeatmapLabel.tsx +++ b/frontend/src/toolbar/elements/AutocaptureElementLabel.tsx @@ -12,18 +12,18 @@ const heatmapLabelStyle = { fontFamily: 'monospace', } -interface HeatmapLabelProps extends React.PropsWithoutRef { +interface AutocaptureElementLabelProps extends React.PropsWithoutRef { rect?: ElementRect align?: 'left' | 'right' } -export function HeatmapLabel({ +export function AutocaptureElementLabel({ rect, style = {}, align = 'right', children, ...props -}: HeatmapLabelProps): JSX.Element | null { +}: AutocaptureElementLabelProps): JSX.Element | null { if (!rect) { return null } diff --git a/frontend/src/toolbar/elements/ElementInfoWindow.tsx b/frontend/src/toolbar/elements/ElementInfoWindow.tsx index 4d15961b7bc8c..1c0d010f7541a 100644 --- a/frontend/src/toolbar/elements/ElementInfoWindow.tsx +++ b/frontend/src/toolbar/elements/ElementInfoWindow.tsx @@ -81,7 +81,6 @@ export function ElementInfoWindow(): JSX.Element | null { transition: 'opacity 0.2s, box-shadow 0.2s', backgroundBlendMode: 'multiply', background: 'white', - boxShadow: `hsla(4, 30%, 27%, 0.6) 0px 3px 10px 2px`, }} > {onClose ? ( @@ -111,8 +110,16 @@ export function ElementInfoWindow(): JSX.Element | null {

) : null} - {/* eslint-disable-next-line react/forbid-dom-props */} -
+
diff --git a/frontend/src/toolbar/elements/Elements.tsx b/frontend/src/toolbar/elements/Elements.tsx index 0e8c0a278d67f..3e57984535b67 100644 --- a/frontend/src/toolbar/elements/Elements.tsx +++ b/frontend/src/toolbar/elements/Elements.tsx @@ -4,14 +4,17 @@ import { useActions, useValues } from 'kea' import { compactNumber } from 'lib/utils' import { Fragment } from 'react' +import { AutocaptureElement } from '~/toolbar/elements/AutocaptureElement' +import { AutocaptureElementLabel } from '~/toolbar/elements/AutocaptureElementLabel' import { ElementInfoWindow } from '~/toolbar/elements/ElementInfoWindow' import { elementsLogic } from '~/toolbar/elements/elementsLogic' import { FocusRect } from '~/toolbar/elements/FocusRect' -import { HeatmapElement } from '~/toolbar/elements/HeatmapElement' -import { HeatmapLabel } from '~/toolbar/elements/HeatmapLabel' import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' import { getBoxColors, getHeatMapHue } from '~/toolbar/utils' +import { Heatmap } from './Heatmap' +import { ScrollDepth } from './ScrollDepth' + export function Elements(): JSX.Element { const { heatmapElements, @@ -48,16 +51,18 @@ export function Elements(): JSX.Element { zIndex: 2147483010, }} > + + {highlightElementMeta?.rect ? : null} {elementsToDisplay.map(({ rect, element }, index) => ( - selectElement(element)} @@ -76,14 +82,15 @@ export function Elements(): JSX.Element { {heatmapElements.map(({ rect, count, clickCount, rageclickCount, element }, index) => { return ( - selectedElement === null && setHoverElement(null)} /> {!!clickCount && ( - selectedElement === null && setHoverElement(null)} > {compactNumber(clickCount || 0)} - + )} {!!rageclickCount && ( - selectedElement === null && setHoverElement(null)} > {compactNumber(rageclickCount)}😡 - + )} ) @@ -162,7 +169,7 @@ export function Elements(): JSX.Element { {labelsToDisplay.map(({ element, rect, index }, loopIndex) => { if (rect) { return ( - selectedElement === null && setHoverElement(null)} > {(index || loopIndex) + 1} - + ) } })} diff --git a/frontend/src/toolbar/elements/Heatmap.tsx b/frontend/src/toolbar/elements/Heatmap.tsx new file mode 100644 index 0000000000000..75785d3f3a768 --- /dev/null +++ b/frontend/src/toolbar/elements/Heatmap.tsx @@ -0,0 +1,120 @@ +import heatmapsJs, { Heatmap as HeatmapJS } from 'heatmap.js' +import { useValues } from 'kea' +import { MutableRefObject, useCallback, useEffect, useMemo, useRef } from 'react' + +import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' + +import { useMousePosition } from './useMousePosition' + +function HeatmapMouseInfo({ + heatmapJsRef, +}: { + heatmapJsRef: MutableRefObject | undefined> +}): JSX.Element | null { + const { shiftPressed, heatmapFilters } = useValues(heatmapLogic) + + const mousePosition = useMousePosition() + const value = heatmapJsRef.current?.getValueAt(mousePosition) + + if (!mousePosition || (!value && !shiftPressed)) { + return null + } + + const leftPosition = window.innerWidth - mousePosition.x < 100 + + return ( +
+
+ + {value} {heatmapFilters.type + 's'} + +
+
+ ) +} + +export function Heatmap(): JSX.Element | null { + const { heatmapJsData, heatmapEnabled, heatmapFilters, windowWidth, windowHeight, heatmapColorPalette } = + useValues(heatmapLogic) + const heatmapsJsRef = useRef>() + const heatmapsJsContainerRef = useRef() + + const heatmapJSColorGradient = useMemo((): Record => { + switch (heatmapColorPalette) { + case 'blue': + return { '.0': 'rgba(0, 0, 255, 0)', '.100': 'rgba(0, 0, 255, 1)' } + case 'green': + return { '.0': 'rgba(0, 255, 0, 0)', '.100': 'rgba(0, 255, 0, 1)' } + case 'red': + return { '.0': 'rgba(255, 0, 0, 0)', '.100': 'rgba(255, 0, 0, 1)' } + + default: + // Defaults taken from heatmap.js + return { '.25': 'rgb(0,0,255)', '0.55': 'rgb(0,255,0)', '0.85': 'yellow', '1.0': 'rgb(255,0,0)' } + } + }, [heatmapColorPalette]) + + const updateHeatmapData = useCallback((): void => { + try { + heatmapsJsRef.current?.setData(heatmapJsData) + } catch (e) { + console.error('error setting data', e) + } + }, [heatmapJsData]) + + const setHeatmapContainer = useCallback((container: HTMLDivElement | null): void => { + heatmapsJsContainerRef.current = container + if (!container) { + return + } + + heatmapsJsRef.current = heatmapsJs.create({ + container, + gradient: heatmapJSColorGradient, + }) + + updateHeatmapData() + }, []) + + useEffect(() => { + updateHeatmapData() + }, [heatmapJsData]) + + useEffect(() => { + if (!heatmapsJsContainerRef.current) { + return + } + + heatmapsJsRef.current?.configure({ + container: heatmapsJsContainerRef.current, + gradient: heatmapJSColorGradient, + }) + }, [heatmapJSColorGradient]) + + if (!heatmapEnabled || !heatmapFilters.enabled || heatmapFilters.type === 'scrolldepth') { + return null + } + + return ( +
+ {/* NOTE: We key on the window dimensions which triggers a recreation of the canvas */} +
+ +
+ ) +} diff --git a/frontend/src/toolbar/elements/ScrollDepth.tsx b/frontend/src/toolbar/elements/ScrollDepth.tsx new file mode 100644 index 0000000000000..9e929de52c745 --- /dev/null +++ b/frontend/src/toolbar/elements/ScrollDepth.tsx @@ -0,0 +1,137 @@ +import clsx from 'clsx' +import { useValues } from 'kea' + +import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' + +import { toolbarConfigLogic } from '../toolbarConfigLogic' +import { useMousePosition } from './useMousePosition' + +function ScrollDepthMouseInfo(): JSX.Element | null { + const { posthog } = useValues(toolbarConfigLogic) + const { heatmapElements, rawHeatmapLoading, shiftPressed } = useValues(heatmapLogic) + + const { y: mouseY } = useMousePosition() + + if (!mouseY) { + return null + } + + const scrollOffset = (posthog as any).scrollManager.scrollY() + const scrolledMouseY = mouseY + scrollOffset + + const elementInMouseY = heatmapElements.find((x, i) => { + const lastY = heatmapElements[i - 1]?.y ?? 0 + return scrolledMouseY >= lastY && scrolledMouseY < x.y + }) + + const maxCount = heatmapElements[0]?.count ?? 0 + const percentage = ((elementInMouseY?.count ?? 0) / maxCount) * 100 + + return ( +
+
+
+ {rawHeatmapLoading ? ( + <>Loading... + ) : heatmapElements.length ? ( + <>{percentage.toPrecision(4)}% scrolled this far + ) : ( + <>No scroll data for the current dimension range + )} +
+ +
+
+ ) +} + +export function ScrollDepth(): JSX.Element | null { + const { posthog } = useValues(toolbarConfigLogic) + + const { heatmapEnabled, heatmapFilters, heatmapElements, scrollDepthPosthogJsError, heatmapColorPalette } = + useValues(heatmapLogic) + + if (!heatmapEnabled || !heatmapFilters.enabled || heatmapFilters.type !== 'scrolldepth') { + return null + } + + if (scrollDepthPosthogJsError) { + return null + } + + const scrollOffset = (posthog as any).scrollManager.scrollY() + + // We want to have a fading color from red to orange to green to blue to grey, fading from the highest count to the lowest + const maxCount = heatmapElements[0]?.count ?? 0 + + function color(count: number): string { + const value = 1 - count / maxCount + + if (heatmapColorPalette === 'default') { + const safeValue = Math.max(0, Math.min(1, value)) + const hue = Math.round(260 * safeValue) + + // Return hsl color. You can adjust saturation and lightness to your liking + return `hsl(${hue}, 100%, 50%)` + } + + const rgba = [0, 0, 0, count / maxCount] + + switch (heatmapColorPalette) { + case 'red': + rgba[0] = 255 + break + case 'green': + rgba[1] = 255 + break + case 'blue': + rgba[2] = 255 + break + default: + break + } + + return `rgba(${rgba.join(', ')})` + } + + return ( +
+
+ {heatmapElements.map(({ y, count }, i) => ( +
+ ))} +
+ +
+ ) +} diff --git a/frontend/src/toolbar/elements/elementsLogic.ts b/frontend/src/toolbar/elements/elementsLogic.ts index df7ad6639166c..c08f0645fb712 100644 --- a/frontend/src/toolbar/elements/elementsLogic.ts +++ b/frontend/src/toolbar/elements/elementsLogic.ts @@ -3,9 +3,9 @@ import { collectAllElementsDeep } from 'query-selector-shadow-dom' import { actionsLogic } from '~/toolbar/actions/actionsLogic' import { actionsTabLogic } from '~/toolbar/actions/actionsTabLogic' -import { posthog } from '~/toolbar/posthog' import { currentPageLogic } from '~/toolbar/stats/currentPageLogic' import { toolbarConfigLogic } from '~/toolbar/toolbarConfigLogic' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' import { ActionElementWithMetadata, ElementWithMetadata } from '~/toolbar/types' import { elementToActionStep, getAllClickTargets, getElementForStep, getRectForElement } from '../utils' @@ -371,11 +371,11 @@ export const elementsLogic = kea([ }), listeners(({ actions }) => ({ enableInspect: () => { - posthog.capture('toolbar mode triggered', { mode: 'inspect', enabled: true }) + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'inspect', enabled: true }) actionsLogic.actions.getActions() }, disableInspect: () => { - posthog.capture('toolbar mode triggered', { mode: 'inspect', enabled: false }) + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'inspect', enabled: false }) }, selectElement: ({ element }) => { const inspectForAction = @@ -401,7 +401,7 @@ export const elementsLogic = kea([ } } - posthog.capture('toolbar selected HTML element', { + toolbarPosthogJS.capture('toolbar selected HTML element', { element_tag: element?.tagName.toLowerCase(), element_type: (element as HTMLInputElement)?.type, has_href: !!(element as HTMLAnchorElement)?.href, diff --git a/frontend/src/toolbar/elements/heatmapLogic.ts b/frontend/src/toolbar/elements/heatmapLogic.ts index 45ec141630420..f6256a91844be 100644 --- a/frontend/src/toolbar/elements/heatmapLogic.ts +++ b/frontend/src/toolbar/elements/heatmapLogic.ts @@ -1,30 +1,74 @@ +import { LemonSelectOption } from '@posthog/lemon-ui' import { actions, afterMount, beforeUnmount, connect, kea, listeners, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { encodeParams } from 'kea-router' +import { subscriptions } from 'kea-subscriptions' +import { windowValues } from 'kea-window-values' import { elementToSelector, escapeRegex } from 'lib/actionUtils' import { PaginatedResponse } from 'lib/api' import { dateFilterToText } from 'lib/utils' +import { PostHog } from 'posthog-js' import { collectAllElementsDeep, querySelectorAllDeep } from 'query-selector-shadow-dom' -import { posthog } from '~/toolbar/posthog' import { currentPageLogic } from '~/toolbar/stats/currentPageLogic' import { toolbarConfigLogic, toolbarFetch } from '~/toolbar/toolbarConfigLogic' -import { CountedHTMLElement, ElementsEventType } from '~/toolbar/types' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' +import { + CountedHTMLElement, + ElementsEventType, + HeatmapElement, + HeatmapRequestType, + HeatmapResponseType, +} from '~/toolbar/types' import { elementToActionStep, trimElement } from '~/toolbar/utils' import { FilterType, PropertyFilterType, PropertyOperator } from '~/types' import type { heatmapLogicType } from './heatmapLogicType' +export const SCROLL_DEPTH_JS_VERSION = [1, 99] + const emptyElementsStatsPages: PaginatedResponse = { next: undefined, previous: undefined, results: [], } +export type CommonFilters = { + date_from?: string + date_to?: string +} + +export type HeatmapFilters = { + enabled: boolean + type?: string + viewportAccuracy?: number + aggregation?: HeatmapRequestType['aggregation'] +} + +export type HeatmapJsDataPoint = { + x: number + y: number + value: number +} + +export type HeatmapJsData = { + data: HeatmapJsDataPoint[] + max: number + min: number +} +export type HeatmapFixedPositionMode = 'fixed' | 'relative' | 'hidden' + +export const HEATMAP_COLOR_PALETTE_OPTIONS: LemonSelectOption[] = [ + { value: 'default', label: 'Default (multicolor)' }, + { value: 'red', label: 'Red (monocolor)' }, + { value: 'green', label: 'Green (monocolor)' }, + { value: 'blue', label: 'Blue (monocolor)' }, +] + export const heatmapLogic = kea([ path(['toolbar', 'elements', 'heatmapLogic']), connect({ - values: [currentPageLogic, ['href', 'wildcardHref']], + values: [currentPageLogic, ['href', 'wildcardHref'], toolbarConfigLogic, ['posthog']], actions: [currentPageLogic, ['setHref', 'setWildcardHref']], }), actions({ @@ -33,12 +77,29 @@ export const heatmapLogic = kea([ }), enableHeatmap: true, disableHeatmap: true, - setShowHeatmapTooltip: (showHeatmapTooltip: boolean) => ({ showHeatmapTooltip }), setShiftPressed: (shiftPressed: boolean) => ({ shiftPressed }), - setHeatmapFilter: (filter: Partial) => ({ filter }), + setCommonFilters: (filters: CommonFilters) => ({ filters }), + setHeatmapFilters: (filters: HeatmapFilters) => ({ filters }), + patchHeatmapFilters: (filters: Partial) => ({ filters }), + toggleClickmapsEnabled: (enabled?: boolean) => ({ enabled }), + loadMoreElementStats: true, setMatchLinksByHref: (matchLinksByHref: boolean) => ({ matchLinksByHref }), + loadHeatmap: (type: string) => ({ + type, + }), + loadAllEnabled: (delayMs: number = 0) => ({ delayMs }), + maybeLoadClickmap: (delayMs: number = 0) => ({ delayMs }), + maybeLoadHeatmap: (delayMs: number = 0) => ({ delayMs }), + fetchHeatmapApi: (params: HeatmapRequestType) => ({ params }), + setHeatmapScrollY: (scrollY: number) => ({ scrollY }), + setHeatmapFixedPositionMode: (mode: HeatmapFixedPositionMode) => ({ mode }), + setHeatmapColorPalette: (Palette: string | null) => ({ Palette }), }), + windowValues(() => ({ + windowWidth: (window: Window) => window.innerWidth, + windowHeight: (window: Window) => window.innerHeight, + })), reducers({ matchLinksByHref: [false, { setMatchLinksByHref: (_, { matchLinksByHref }) => matchLinksByHref }], canLoadMoreElementStats: [ @@ -56,31 +117,57 @@ export const heatmapLogic = kea([ getElementStatsFailure: () => false, }, ], - heatmapLoading: [ + shiftPressed: [ false, { - getElementStats: () => true, - getElementStatsSuccess: () => false, - getElementStatsFailure: () => false, - resetElementStats: () => false, + setShiftPressed: (_, { shiftPressed }) => shiftPressed, }, ], - showHeatmapTooltip: [ - false, + commonFilters: [ + {} as CommonFilters, { - setShowHeatmapTooltip: (_, { showHeatmapTooltip }) => showHeatmapTooltip, + setCommonFilters: (_, { filters }) => filters, }, ], - shiftPressed: [ - false, + heatmapFilters: [ { - setShiftPressed: (_, { shiftPressed }) => shiftPressed, + enabled: true, + type: 'click', + viewportAccuracy: 0.9, + aggregation: 'total_count', + } as HeatmapFilters, + { persist: true }, + { + setHeatmapFilters: (_, { filters }) => filters, + patchHeatmapFilters: (state, { filters }) => ({ ...state, ...filters }), + }, + ], + clickmapsEnabled: [ + true, + { persist: true }, + { + toggleClickmapsEnabled: (state, { enabled }) => (enabled === undefined ? !state : enabled), + }, + ], + heatmapScrollY: [ + 0, + { + setHeatmapScrollY: (_, { scrollY }) => scrollY, + }, + ], + + heatmapFixedPositionMode: [ + 'fixed' as HeatmapFixedPositionMode, + { + setHeatmapFixedPositionMode: (_, { mode }) => mode, }, ], - heatmapFilter: [ - {} as Partial, + + heatmapColorPalette: [ + 'default' as string | null, + { persist: true }, { - setHeatmapFilter: (_, { filter }) => filter, + setHeatmapColorPalette: (_, { Palette }) => Palette, }, ], }), @@ -110,7 +197,8 @@ export const heatmapLogic = kea([ type: PropertyFilterType.Event, }, ], - ...values.heatmapFilter, + date_from: values.commonFilters.date_from, + date_to: values.commonFilters.date_to, } defaultUrl = `/api/element/stats/${encodeParams({ ...params, paginate_response: true }, '?')}` @@ -144,22 +232,58 @@ export const heatmapLogic = kea([ }, }, ], + + rawHeatmap: [ + null as HeatmapResponseType | null, + { + loadHeatmap: async () => { + const { href, wildcardHref } = values + const { date_from, date_to } = values.commonFilters + const { type, aggregation } = values.heatmapFilters + const urlExact = wildcardHref === href ? href : undefined + const urlRegex = wildcardHref !== href ? wildcardHref : undefined + + // toolbar fetch collapses queryparams but this URL has multiple with the same name + const response = await toolbarFetch( + `/api/heatmap/${encodeParams( + { + type, + date_from, + date_to, + url_exact: urlExact, + url_pattern: urlRegex, + viewport_width_min: values.viewportRange.min, + viewport_width_max: values.viewportRange.max, + aggregation, + }, + '?' + )}`, + 'GET' + ) + + if (response.status === 403) { + toolbarConfigLogic.actions.authenticate() + } + + if (response.status !== 200) { + throw new Error('API error') + } + + return await response.json() + }, + }, + ], })), selectors(({ cache }) => ({ dateRange: [ - (s) => [s.heatmapFilter], - (heatmapFilter: Partial) => { - return dateFilterToText(heatmapFilter.date_from, heatmapFilter.date_to, 'Last 7 days') + (s) => [s.commonFilters], + (commonFilters: Partial) => { + return dateFilterToText(commonFilters.date_from, commonFilters.date_to, 'Last 7 days') }, ], elements: [ - (selectors) => [ - selectors.elementStats, - toolbarConfigLogic.selectors.dataAttributes, - selectors.href, - selectors.matchLinksByHref, - ], + (s) => [s.elementStats, toolbarConfigLogic.selectors.dataAttributes, s.href, s.matchLinksByHref], (elementStats, dataAttributes, href, matchLinksByHref) => { cache.pageElements = cache.lastHref == href ? cache.pageElements : collectAllElementsDeep('*', document) cache.selectorToElements = cache.lastHref == href ? cache.selectorToElements : {} @@ -240,8 +364,11 @@ export const heatmapLogic = kea([ }, ], countedElements: [ - (selectors) => [selectors.elements, toolbarConfigLogic.selectors.dataAttributes], - (elements, dataAttributes) => { + (s) => [s.elements, toolbarConfigLogic.selectors.dataAttributes, s.clickmapsEnabled], + (elements, dataAttributes, clickmapsEnabled) => { + if (!clickmapsEnabled) { + return [] + } const normalisedElements = new Map() ;(elements || []).forEach((countedElement) => { const trimmedElement = trimElement(countedElement.element) @@ -273,22 +400,225 @@ export const heatmapLogic = kea([ return countedElements.map((e, i) => ({ ...e, position: i + 1 })) }, ], - elementCount: [(selectors) => [selectors.countedElements], (countedElements) => countedElements.length], + elementCount: [(s) => [s.countedElements], (countedElements) => countedElements.length], clickCount: [ - (selectors) => [selectors.countedElements], + (s) => [s.countedElements], (countedElements) => (countedElements ? countedElements.map((e) => e.count).reduce((a, b) => a + b, 0) : 0), ], highestClickCount: [ - (selectors) => [selectors.countedElements], + (s) => [s.countedElements], (countedElements) => countedElements ? countedElements.map((e) => e.count).reduce((a, b) => (b > a ? b : a), 0) : 0, ], + + heatmapElements: [ + (s) => [s.rawHeatmap], + (rawHeatmap): HeatmapElement[] => { + if (!rawHeatmap) { + return [] + } + + const elements: HeatmapElement[] = [] + + rawHeatmap?.results.forEach((element) => { + if ('scroll_depth_bucket' in element) { + elements.push({ + count: element.cumulative_count, + xPercentage: 0, + targetFixed: false, + y: element.scroll_depth_bucket, + }) + } else { + elements.push({ + count: element.count, + xPercentage: element.pointer_relative_x, + targetFixed: element.pointer_target_fixed, + y: element.pointer_y, + }) + } + }) + + return elements + }, + ], + + viewportRange: [ + (s) => [s.heatmapFilters, s.windowWidth], + (heatmapFilters, windowWidth): { max: number; min: number } => { + const viewportAccuracy = heatmapFilters.viewportAccuracy ?? 0.2 + const extraPixels = windowWidth - windowWidth * viewportAccuracy + + const minWidth = Math.max(0, windowWidth - extraPixels) + const maxWidth = windowWidth + extraPixels + + return { + min: Math.round(minWidth), + max: Math.round(maxWidth), + } + }, + ], + + scrollDepthPosthogJsError: [ + (s) => [s.posthog], + (posthog: PostHog): 'version' | 'disabled' | null => { + const posthogVersion = posthog?._calculate_event_properties('test', {})?.['$lib_version'] ?? '0.0.0' + const majorMinorVersion = posthogVersion.split('.') + const majorVersion = parseInt(majorMinorVersion[0], 10) + const minorVersion = parseInt(majorMinorVersion[1], 10) + + if (!(posthog as any)?.scrollManager?.scrollY) { + return 'version' + } + + const isSupported = + majorVersion > SCROLL_DEPTH_JS_VERSION[0] || + (majorVersion === SCROLL_DEPTH_JS_VERSION[0] && minorVersion >= SCROLL_DEPTH_JS_VERSION[1]) + const isDisabled = posthog?.config.disable_scroll_properties + + return !isSupported ? 'version' : isDisabled ? 'disabled' : null + }, + ], + + heatmapJsData: [ + (s) => [s.heatmapElements, s.heatmapScrollY, s.windowWidth, s.heatmapFixedPositionMode], + (heatmapElements, heatmapScrollY, windowWidth, heatmapFixedPositionMode): HeatmapJsData => { + // We want to account for all the fixed position elements, the scroll of the context and the browser width + const data = heatmapElements.reduce((acc, element) => { + if (heatmapFixedPositionMode === 'hidden' && element.targetFixed) { + return acc + } + + const y = Math.round( + element.targetFixed && heatmapFixedPositionMode === 'fixed' + ? element.y + : element.y - heatmapScrollY + ) + const x = Math.round(element.xPercentage * windowWidth) + + return [...acc, { x, y, value: element.count }] + }, [] as HeatmapJsDataPoint[]) + + // Max is the highest value in the data set we have + const max = data.reduce((max, { value }) => Math.max(max, value), 0) + + // TODO: Group based on some sensible resolutions (we can then use this for a hover state to show more detail) + + return { + min: 0, + max, + data, + } + }, + ], + })), + + subscriptions(({ actions }) => ({ + viewportRange: () => { + actions.maybeLoadHeatmap(500) + }, + })), + + listeners(({ actions, values }) => ({ + fetchHeatmapApi: async () => { + const { href, wildcardHref } = values + const { date_from, date_to } = values.commonFilters + const { type, aggregation } = values.heatmapFilters + const urlExact = wildcardHref === href ? href : undefined + const urlRegex = wildcardHref !== href ? wildcardHref : undefined + + // toolbar fetch collapses queryparams but this URL has multiple with the same name + const response = await toolbarFetch( + `/api/heatmap/${encodeParams( + { + type, + date_from, + date_to, + url_exact: urlExact, + url_pattern: urlRegex, + viewport_width_min: values.viewportRange.min, + viewport_width_max: values.viewportRange.max, + aggregation, + }, + '?' + )}`, + 'GET' + ) + + if (response.status === 403) { + toolbarConfigLogic.actions.authenticate() + } + + if (response.status !== 200) { + throw new Error('API error') + } + + return await response.json() + }, + enableHeatmap: () => { + actions.loadAllEnabled() + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'heatmap', enabled: true }) + }, + disableHeatmap: () => { + actions.resetElementStats() + toolbarPosthogJS.capture('toolbar mode triggered', { mode: 'heatmap', enabled: false }) + }, + + loadAllEnabled: async ({ delayMs }, breakpoint) => { + await breakpoint(delayMs) + + actions.maybeLoadHeatmap() + actions.maybeLoadClickmap() + }, + maybeLoadClickmap: async ({ delayMs }, breakpoint) => { + await breakpoint(delayMs) + if (values.heatmapEnabled && values.clickmapsEnabled) { + actions.getElementStats() + } + }, + + maybeLoadHeatmap: async ({ delayMs }, breakpoint) => { + await breakpoint(delayMs) + if (values.heatmapEnabled) { + if (values.heatmapFilters.enabled && values.heatmapFilters.type) { + actions.loadHeatmap(values.heatmapFilters.type) + } + } + }, + + setHref: () => { + actions.loadAllEnabled() + }, + setWildcardHref: () => { + actions.loadAllEnabled(1000) + }, + setCommonFilters: () => { + actions.loadAllEnabled(200) + }, + + // Only trigger element stats loading if clickmaps are enabled + toggleClickmapsEnabled: () => { + if (values.clickmapsEnabled) { + actions.getElementStats() + } + }, + + loadMoreElementStats: () => { + if (values.elementStats?.next) { + actions.getElementStats(values.elementStats.next) + } + }, + + patchHeatmapFilters: ({ filters }) => { + if (filters.type) { + // Clear the heatmap if the type changes + actions.loadHeatmapSuccess({ results: [] }) + } + actions.maybeLoadHeatmap(200) + }, })), afterMount(({ actions, values, cache }) => { - if (values.heatmapEnabled) { - actions.getElementStats() - } + actions.loadAllEnabled() cache.keyDownListener = (event: KeyboardEvent) => { if (event.shiftKey && !values.shiftPressed) { actions.setShiftPressed(true) @@ -301,53 +631,18 @@ export const heatmapLogic = kea([ } window.addEventListener('keydown', cache.keyDownListener) window.addEventListener('keyup', cache.keyUpListener) + + cache.scrollCheckTimer = setInterval(() => { + const scrollY = (values.posthog as any)?.scrollManager?.scrollY() ?? 0 + if (values.heatmapScrollY !== scrollY) { + actions.setHeatmapScrollY(scrollY) + } + }, 100) }), beforeUnmount(({ cache }) => { window.removeEventListener('keydown', cache.keyDownListener) window.removeEventListener('keyup', cache.keyUpListener) + clearInterval(cache.scrollCheckTimer) }), - - listeners(({ actions, values }) => ({ - loadMoreElementStats: () => { - if (values.elementStats?.next) { - actions.getElementStats(values.elementStats.next) - } - }, - setHref: () => { - if (values.heatmapEnabled) { - actions.resetElementStats() - actions.getElementStats() - } - }, - setWildcardHref: async (_, breakpoint) => { - await breakpoint(100) - if (values.heatmapEnabled) { - actions.resetElementStats() - actions.getElementStats() - } - }, - enableHeatmap: () => { - actions.getElementStats() - posthog.capture('toolbar mode triggered', { mode: 'heatmap', enabled: true }) - }, - disableHeatmap: () => { - actions.resetElementStats() - actions.setShowHeatmapTooltip(false) - posthog.capture('toolbar mode triggered', { mode: 'heatmap', enabled: false }) - }, - getElementStatsSuccess: () => { - actions.setShowHeatmapTooltip(true) - }, - setShowHeatmapTooltip: async ({ showHeatmapTooltip }, breakpoint) => { - if (showHeatmapTooltip) { - await breakpoint(1000) - actions.setShowHeatmapTooltip(false) - } - }, - setHeatmapFilter: () => { - actions.resetElementStats() - actions.getElementStats() - }, - })), ]) diff --git a/frontend/src/toolbar/elements/useMousePosition.ts b/frontend/src/toolbar/elements/useMousePosition.ts new file mode 100644 index 0000000000000..a5bd1021afee1 --- /dev/null +++ b/frontend/src/toolbar/elements/useMousePosition.ts @@ -0,0 +1,17 @@ +import { useEffect, useState } from 'react' + +export const useMousePosition = (): { x: number; y: number } => { + const [mousePosition, setMousePosition] = useState({ x: 0, y: 0 }) + + useEffect(() => { + const onMove = (e: MouseEvent): void => { + setMousePosition({ x: e.clientX, y: e.clientY }) + } + + window.addEventListener('mousemove', onMove) + return () => { + window.removeEventListener('mousemove', onMove) + } + }, []) + return mousePosition +} diff --git a/frontend/src/toolbar/flags/flagsToolbarLogic.ts b/frontend/src/toolbar/flags/flagsToolbarLogic.ts index e1f41cabca73c..07e7082646023 100644 --- a/frontend/src/toolbar/flags/flagsToolbarLogic.ts +++ b/frontend/src/toolbar/flags/flagsToolbarLogic.ts @@ -5,8 +5,8 @@ import { encodeParams } from 'kea-router' import { permanentlyMount } from 'lib/utils/kea-logic-builders' import type { PostHog } from 'posthog-js' -import { posthog as posthogJS } from '~/toolbar/posthog' import { toolbarConfigLogic, toolbarFetch } from '~/toolbar/toolbarConfigLogic' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' import { CombinedFeatureFlagAndValueType } from '~/types' import type { flagsToolbarLogicType } from './flagsToolbarLogicType' @@ -119,7 +119,7 @@ export const flagsToolbarLogic = kea([ const clientPostHog = values.posthog if (clientPostHog) { clientPostHog.featureFlags.override({ ...values.localOverrides, [flagKey]: overrideValue }) - posthogJS.capture('toolbar feature flag overridden') + toolbarPosthogJS.capture('toolbar feature flag overridden') actions.checkLocalOverrides() clientPostHog.featureFlags.reloadFeatureFlags() } @@ -134,7 +134,7 @@ export const flagsToolbarLogic = kea([ } else { clientPostHog.featureFlags.override(false) } - posthogJS.capture('toolbar feature flag override removed') + toolbarPosthogJS.capture('toolbar feature flag override removed') actions.checkLocalOverrides() clientPostHog.featureFlags.reloadFeatureFlags() } diff --git a/frontend/src/toolbar/posthog.ts b/frontend/src/toolbar/posthog.ts deleted file mode 100644 index 6be5aefd4a906..0000000000000 --- a/frontend/src/toolbar/posthog.ts +++ /dev/null @@ -1,18 +0,0 @@ -import PostHog from 'posthog-js-lite' - -const DEFAULT_API_KEY = 'sTMFPsFhdP1Ssg' - -const runningOnPosthog = !!window.POSTHOG_APP_CONTEXT -const apiKey = runningOnPosthog ? window.JS_POSTHOG_API_KEY : DEFAULT_API_KEY -const apiHost = runningOnPosthog ? window.JS_POSTHOG_HOST : 'https://internal-e.posthog.com' - -export const posthog = new PostHog(apiKey || DEFAULT_API_KEY, { - host: apiHost, - enable: false, // must call .optIn() before any events are sent - persistence: 'memory', // We don't want to persist anything, all events are in-memory - persistence_name: apiKey + '_toolbar', // We don't need this but it ensures we don't accidentally mess with the standard persistence -}) - -if (runningOnPosthog && window.JS_POSTHOG_SELF_CAPTURE) { - posthog.debug() -} diff --git a/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx b/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx index e0af4c02d1864..f07471eec2a58 100644 --- a/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx +++ b/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx @@ -1,111 +1,467 @@ +import { IconInfo, IconMagicWand } from '@posthog/icons' +import { LemonLabel, LemonSegmentedButton, LemonSelect, LemonTag } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { CUSTOM_OPTION_KEY } from 'lib/components/DateFilter/types' import { IconSync } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonInput } from 'lib/lemon-ui/LemonInput' import { LemonMenu } from 'lib/lemon-ui/LemonMenu' +import { LemonSlider } from 'lib/lemon-ui/LemonSlider' import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch' import { Spinner } from 'lib/lemon-ui/Spinner' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { dateFilterToText, dateMapping } from 'lib/utils' +import React, { useState } from 'react' import { ToolbarMenu } from '~/toolbar/bar/ToolbarMenu' import { elementsLogic } from '~/toolbar/elements/elementsLogic' -import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' +import { HEATMAP_COLOR_PALETTE_OPTIONS, heatmapLogic } from '~/toolbar/elements/heatmapLogic' import { currentPageLogic } from '~/toolbar/stats/currentPageLogic' +import { toolbarConfigLogic } from '../toolbarConfigLogic' +import { useToolbarFeatureFlag } from '../toolbarPosthogJS' + +const ScrollDepthJSWarning = (): JSX.Element | null => { + const { scrollDepthPosthogJsError } = useValues(heatmapLogic) + + if (!scrollDepthPosthogJsError) { + return null + } + + return ( +

+ {scrollDepthPosthogJsError === 'version' ? ( + <>This feature requires a newer version of posthog-js + ) : scrollDepthPosthogJsError === 'disabled' ? ( + <> + Your posthog-js config has disable_scroll_properties set - these properties are required for + scroll depth calculations to work. + + ) : null} +

+ ) +} + +const HeatmapsJSWarning = (): JSX.Element | null => { + const { posthog } = useValues(toolbarConfigLogic) + + if (!posthog || posthog?.heatmaps?.isEnabled) { + return null + } + + return ( +

+ {!posthog.heatmaps ? ( + <>The version of posthog-js you are using does not support collecting heatmap data. + ) : !posthog.heatmaps.isEnabled ? ( + <> + Heatmap collection is disabled in your posthog-js configuration. If you do not see heatmap data then + this is likely why. + + ) : null} +

+ ) +} + +const SectionButton = ({ + children, + checked, + onChange, + loading, +}: { + children: React.ReactNode + checked: boolean + onChange: (checked: boolean) => void + loading?: boolean +}): JSX.Element => { + return ( +
+ onChange(!checked)} + sideIcon={} + > + + {children} + + {loading ? : null} + + +
+ ) +} + +const SectionSetting = ({ + children, + title, + info, +}: { + children: React.ReactNode + title: React.ReactNode + info?: React.ReactNode +}): JSX.Element => { + const [showInfo, setShowInfo] = useState(false) + return ( +
+
+ + {title} + + {info && ( + } + size="xsmall" + active={showInfo} + onClick={() => setShowInfo(!showInfo)} + noPadding + /> + )} + +
+ + {showInfo ?
{info}
: null} + + {children} +
+ ) +} + export const HeatmapToolbarMenu = (): JSX.Element => { const { wildcardHref } = useValues(currentPageLogic) - const { setWildcardHref } = useActions(currentPageLogic) + const { setWildcardHref, autoWildcardHref } = useActions(currentPageLogic) - const { matchLinksByHref, countedElements, clickCount, heatmapLoading, heatmapFilter, canLoadMoreElementStats } = - useValues(heatmapLogic) - const { setHeatmapFilter, loadMoreElementStats, setMatchLinksByHref } = useActions(heatmapLogic) + const { + matchLinksByHref, + countedElements, + clickCount, + commonFilters, + heatmapFilters, + canLoadMoreElementStats, + viewportRange, + rawHeatmapLoading, + elementStatsLoading, + clickmapsEnabled, + heatmapFixedPositionMode, + heatmapColorPalette, + } = useValues(heatmapLogic) + const { + setCommonFilters, + patchHeatmapFilters, + loadMoreElementStats, + setMatchLinksByHref, + toggleClickmapsEnabled, + setHeatmapFixedPositionMode, + setHeatmapColorPalette, + } = useActions(heatmapLogic) const { setHighlightElement, setSelectedElement } = useActions(elementsLogic) const dateItems = dateMapping .filter((dm) => dm.key !== CUSTOM_OPTION_KEY) .map((dateOption) => ({ label: dateOption.key, - onClick: () => setHeatmapFilter({ date_from: dateOption.values[0], date_to: dateOption.values[1] }), + onClick: () => setCommonFilters({ date_from: dateOption.values[0], date_to: dateOption.values[1] }), })) + const showNewHeatmaps = useToolbarFeatureFlag('toolbar-heatmaps') + return ( - -
-
Use * as a wildcard
-
- - - {dateFilterToText(heatmapFilter.date_from, heatmapFilter.date_to, 'Last 7 days')} - - +
+ + } + size="small" + onClick={() => autoWildcardHref()} + tooltip={ + <> + You can use the wildcard character * to match any character in the URL. For + example, https://example.com/* will match{' '} + https://example.com/page and https://example.com/page/1. +
+ Click this button to automatically wildcards where we believe it would make sense + + } + /> +
- } - type="secondary" - size="small" - onClick={loadMoreElementStats} - disabledReason={ - canLoadMoreElementStats ? undefined : 'Loaded all elements in this data range.' - } - > - Load more +
+ + + {dateFilterToText(commonFilters.date_from, commonFilters.date_to, 'Last 7 days')} - - {heatmapLoading ? : null} -
-
- Found: {countedElements.length} elements / {clickCount} clicks! -
- - - setMatchLinksByHref(checked)} - fullWidth={true} - bordered={true} - /> - +
-
-
- {heatmapLoading ? ( - - - - ) : countedElements.length ? ( - countedElements.map(({ element, count, actionStep }, index) => { - return ( -
setSelectedElement(element)} - onMouseEnter={() => setHighlightElement(element)} - onMouseLeave={() => setHighlightElement(null)} - > -
- {index + 1}.  - {actionStep?.text || - (actionStep?.tag_name ? ( - <{actionStep.tag_name}> - ) : ( - Element - ))} -
-
{count} clicks
+ {showNewHeatmaps ? ( +
+ + patchHeatmapFilters({ + enabled: e, + }) + } + loading={rawHeatmapLoading} + checked={!!heatmapFilters.enabled} + > + Heatmaps NEW{' '} + + + {heatmapFilters.enabled && ( + <> + +

+ Heatmaps are calculated using additional data sent along with standard events. They + are based off of general pointer interactions and might not be 100% accurate to the + page you are viewing. +

+ + + Select the kind of heatmap you want to view. Clicks, rageclicks, and mouse + moves options will show different "heat" based on the number of interactions + at that area of the page. Scroll depth will show how far down the page users + have reached. +
+ Scroll depth uses additional information from Pageview and Pageleave events + to indicate how far down the page users have scrolled. + + } + > +
+ patchHeatmapFilters({ type: e })} + value={heatmapFilters.type ?? undefined} + options={[ + { + value: 'click', + label: 'Clicks', + }, + { + value: 'rageclick', + label: 'Rageclicks', + }, + { + value: 'mousemove', + label: 'Mouse moves', + }, + { + value: 'scrolldepth', + label: 'Scroll depth', + }, + ]} + size="small" + /> + + {heatmapFilters.type === 'scrolldepth' && } +
+
+ + + Heatmaps can be aggregated by total count or unique visitors. Total count + will show the total number of interactions on the page, while unique + visitors will only count each visitor once. + + } + > +
+ patchHeatmapFilters({ aggregation: e })} + value={heatmapFilters.aggregation ?? 'total_count'} + options={[ + { + value: 'total_count', + label: 'Total count', + }, + { + value: 'unique_visitors', + label: 'Unique visitors', + }, + ]} + size="small" + />
- ) - }) - ) : ( -
No elements found.
+
+ + + The viewport accuracy setting will determine how closely the loaded data + will be to your current viewport. +
+ For example if you set this to 100%, only visitors whose viewport width is + identical to yours will be included in the heatmap. +
+ At 90% you will see data from viewports that are 10% smaller or larger than + yours. + + } + > +
+ patchHeatmapFilters({ viewportAccuracy: value })} + /> + + {`${Math.round((heatmapFilters.viewportAccuracy ?? 1) * 100)}% (${ + viewportRange.min + }px - ${viewportRange.max}px)`} + +
+
+ + + + + + {heatmapFilters.type !== 'scrolldepth' && ( + + PostHog JS will attempt to detect fixed elements such as headers or + modals and will therefore show those heatmap areas, ignoring the scroll + value. +
+ You can choose to show these areas as fixed, include them with scrolled + data or hide them altogether. + + } + > + +
+ )} + )}
+ ) : null} + +
+ {showNewHeatmaps ? ( + toggleClickmapsEnabled(e)} + loading={elementStatsLoading} + checked={!!clickmapsEnabled} + > + Clickmaps (autocapture) + + ) : null} + + {(clickmapsEnabled || !showNewHeatmaps) && ( + <> + {showNewHeatmaps ? ( +

+ Clickmaps are built using Autocapture events. They are more accurate than heatmaps + if the event can be mapped to a specific element found on the page you are viewing + but less data is usually captured. +

+ ) : null} +
+ } + type="secondary" + size="small" + onClick={loadMoreElementStats} + disabledReason={ + canLoadMoreElementStats ? undefined : 'Loaded all elements in this data range.' + } + > + Load more + + + Matching links by their target URL can exclude clicks from the heatmap if + the URL is too unique. + + } + > + setMatchLinksByHref(checked)} + fullWidth={true} + bordered={true} + /> + +
+ +
+ Found: {countedElements.length} elements / {clickCount} clicks! +
+
+ {countedElements.length ? ( + countedElements.map(({ element, count, actionStep }, index) => { + return ( + setSelectedElement(element)} + > +
setHighlightElement(element)} + onMouseLeave={() => setHighlightElement(null)} + > +
+ {index + 1}.  + {actionStep?.text || + (actionStep?.tag_name ? ( + <{actionStep.tag_name}> + ) : ( + Element + ))} +
+
{count} clicks
+
+
+ ) + }) + ) : ( +
No elements found.
+ )} +
+ + )}
diff --git a/frontend/src/toolbar/stats/currentPageLogic.ts b/frontend/src/toolbar/stats/currentPageLogic.ts index e867653432c52..021f9cea56d5a 100644 --- a/frontend/src/toolbar/stats/currentPageLogic.ts +++ b/frontend/src/toolbar/stats/currentPageLogic.ts @@ -1,12 +1,32 @@ -import { actions, afterMount, beforeUnmount, kea, path, reducers } from 'kea' +import { actions, afterMount, beforeUnmount, kea, listeners, path, reducers } from 'kea' import type { currentPageLogicType } from './currentPageLogicType' +const replaceWithWildcard = (part: string): string => { + // replace uuids + if (part.match(/^([a-f]|[0-9]){8}-([a-f]|[0-9]){4}-([a-f]|[0-9]){4}-([a-f]|[0-9]){4}-([a-f]|[0-9]){12}$/)) { + return '*' + } + + // replace digits + if (part.match(/^[0-9]+$/)) { + return '*' + } + + // Replace long values + if (part.length > 24) { + return '*' + } + + return part +} + export const currentPageLogic = kea([ path(['toolbar', 'stats', 'currentPageLogic']), actions(() => ({ setHref: (href: string) => ({ href }), setWildcardHref: (href: string) => ({ href }), + autoWildcardHref: true, })), reducers(() => ({ href: [window.location.href, { setHref: (_, { href }) => href }], @@ -16,6 +36,31 @@ export const currentPageLogic = kea([ ], })), + listeners(({ actions, values }) => ({ + autoWildcardHref: () => { + let url = values.wildcardHref + + const urlParts = url.split('?') + + url = urlParts[0] + .split('/') + .map((part) => replaceWithWildcard(part)) + .join('/') + + // Iterate over query params and do the same for their values + if (urlParts.length > 1) { + const queryParams = urlParts[1].split('&') + for (let i = 0; i < queryParams.length; i++) { + const [key, value] = queryParams[i].split('=') + queryParams[i] = `${key}=${replaceWithWildcard(value)}` + } + url = `${url}?${queryParams.join('&')}` + } + + actions.setWildcardHref(url) + }, + })), + afterMount(({ actions, values, cache }) => { cache.interval = window.setInterval(() => { if (window.location.href !== values.href) { diff --git a/frontend/src/toolbar/toolbarConfigLogic.ts b/frontend/src/toolbar/toolbarConfigLogic.ts index c0650d7552d55..55ea5b53684c1 100644 --- a/frontend/src/toolbar/toolbarConfigLogic.ts +++ b/frontend/src/toolbar/toolbarConfigLogic.ts @@ -2,7 +2,7 @@ import { actions, afterMount, kea, listeners, path, props, reducers, selectors } import { combineUrl, encodeParams } from 'kea-router' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' -import { posthog } from '~/toolbar/posthog' +import { toolbarPosthogJS } from '~/toolbar/toolbarPosthogJS' import { ToolbarProps } from '~/types' import type { toolbarConfigLogicType } from './toolbarConfigLogicType' @@ -51,17 +51,17 @@ export const toolbarConfigLogic = kea([ listeners(({ values, actions }) => ({ authenticate: () => { - posthog.capture('toolbar authenticate', { is_authenticated: values.isAuthenticated }) + toolbarPosthogJS.capture('toolbar authenticate', { is_authenticated: values.isAuthenticated }) const encodedUrl = encodeURIComponent(window.location.href) actions.persistConfig() window.location.href = `${values.apiURL}/authorize_and_redirect/?redirect=${encodedUrl}` }, logout: () => { - posthog.capture('toolbar logout') + toolbarPosthogJS.capture('toolbar logout') localStorage.removeItem(LOCALSTORAGE_KEY) }, tokenExpired: () => { - posthog.capture('toolbar token expired') + toolbarPosthogJS.capture('toolbar token expired') console.warn('PostHog Toolbar API token expired. Clearing session.') if (values.props.source !== 'localstorage') { lemonToast.error('PostHog Toolbar API token expired.') @@ -87,12 +87,14 @@ export const toolbarConfigLogic = kea([ afterMount(({ props, values }) => { if (props.instrument) { const distinctId = props.distinctId + + void toolbarPosthogJS.optIn() + if (distinctId) { - posthog.identify(distinctId, props.userEmail ? { email: props.userEmail } : {}) + toolbarPosthogJS.identify(distinctId, props.userEmail ? { email: props.userEmail } : {}) } - posthog.optIn() } - posthog.capture('toolbar loaded', { is_authenticated: values.isAuthenticated }) + toolbarPosthogJS.capture('toolbar loaded', { is_authenticated: values.isAuthenticated }) }), ]) diff --git a/frontend/src/toolbar/toolbarPosthogJS.ts b/frontend/src/toolbar/toolbarPosthogJS.ts new file mode 100644 index 0000000000000..cb8cf83d2bdbc --- /dev/null +++ b/frontend/src/toolbar/toolbarPosthogJS.ts @@ -0,0 +1,35 @@ +import { FeatureFlagKey } from 'lib/constants' +import PostHog from 'posthog-js-lite' +import { useEffect, useState } from 'react' + +const DEFAULT_API_KEY = 'sTMFPsFhdP1Ssg' + +const runningOnPosthog = !!window.POSTHOG_APP_CONTEXT +const apiKey = runningOnPosthog ? window.JS_POSTHOG_API_KEY : DEFAULT_API_KEY +const apiHost = runningOnPosthog ? window.JS_POSTHOG_HOST : 'https://internal-e.posthog.com' + +export const toolbarPosthogJS = new PostHog(apiKey || DEFAULT_API_KEY, { + host: apiHost, + defaultOptIn: false, // must call .optIn() before any events are sent + persistence: 'memory', // We don't want to persist anything, all events are in-memory + persistence_name: apiKey + '_toolbar', // We don't need this but it ensures we don't accidentally mess with the standard persistence + preloadFeatureFlags: false, +}) + +if (runningOnPosthog && window.JS_POSTHOG_SELF_CAPTURE) { + toolbarPosthogJS.debug() +} + +export const useToolbarFeatureFlag = (flag: FeatureFlagKey, match?: string): boolean => { + const [flagValue, setFlagValue] = useState(toolbarPosthogJS.getFeatureFlag(flag)) + + useEffect(() => { + return toolbarPosthogJS.onFeatureFlag(flag, (value) => setFlagValue(value)) + }, [flag, match]) + + if (match) { + return flagValue === match + } + + return !!flagValue +} diff --git a/frontend/src/toolbar/types.ts b/frontend/src/toolbar/types.ts index acc0a13a1005e..261a9618290a6 100644 --- a/frontend/src/toolbar/types.ts +++ b/frontend/src/toolbar/types.ts @@ -7,6 +7,42 @@ export type ElementsEventType = { type: '$autocapture' | '$rageclick' } +export type HeatmapKind = 'click' | 'rageclick' | 'mousemove' | 'scrolldepth' + +export type HeatmapRequestType = { + type: HeatmapKind + date_from?: string + date_to?: string + url_exact?: string + url_pattern?: string + viewport_width_min?: number + viewport_width_max?: number + aggregation: 'total_count' | 'unique_visitors' +} + +export type HeatmapResponseType = { + results: ( + | { + count: number + pointer_relative_x: number + pointer_target_fixed: boolean + pointer_y: number + } + | { + scroll_depth_bucket: number + bucket_count: number + cumulative_count: number + } + )[] +} + +export type HeatmapElement = { + count: number + xPercentage: number + targetFixed: boolean + y: number +} + export interface CountedHTMLElement { count: number // total of types of clicks clickCount: number // autocapture clicks @@ -44,12 +80,6 @@ export interface ActionElementWithMetadata extends ElementWithMetadata { step?: ActionStepType } -export type BoxColor = { - backgroundBlendMode: string - background: string - boxShadow: string -} - export type ActionDraftType = Omit export interface ActionStepForm extends ActionStepType { diff --git a/frontend/src/toolbar/utils.ts b/frontend/src/toolbar/utils.ts index 9e2e67b56231f..53f281f7e1c74 100644 --- a/frontend/src/toolbar/utils.ts +++ b/frontend/src/toolbar/utils.ts @@ -2,8 +2,9 @@ import { finder } from '@medv/finder' import { CLICK_TARGET_SELECTOR, CLICK_TARGETS, escapeRegex, TAGS_TO_IGNORE } from 'lib/actionUtils' import { cssEscape } from 'lib/utils/cssEscape' import { querySelectorAllDeep } from 'query-selector-shadow-dom' +import { CSSProperties } from 'react' -import { ActionStepForm, BoxColor, ElementRect } from '~/toolbar/types' +import { ActionStepForm, ElementRect } from '~/toolbar/types' import { ActionStepType, StringMatching } from '~/types' export const TOOLBAR_ID = '__POSTHOG_TOOLBAR__' @@ -246,26 +247,21 @@ export function getElementForStep(step: ActionStepForm, allElements?: HTMLElemen return null } -export function getBoxColors(color: 'blue' | 'red' | 'green', hover = false, opacity = 0.2): BoxColor | undefined { +export function getBoxColors(color: 'blue' | 'red' | 'green', hover = false, opacity = 0.2): CSSProperties | undefined { if (color === 'blue') { return { backgroundBlendMode: 'multiply', background: `hsla(240, 90%, 58%, ${opacity})`, - boxShadow: `hsla(240, 90%, 27%, 0.5) 0px 3px 10px ${hover ? 4 : 2}px`, + boxShadow: `hsla(240, 90%, 27%, 0.2) 0px 3px 10px ${hover ? 4 : 0}px`, + outline: `hsla(240, 90%, 58%, 0.5) solid 1px`, } } if (color === 'red') { return { backgroundBlendMode: 'multiply', background: `hsla(4, 90%, 58%, ${opacity})`, - boxShadow: `hsla(4, 90%, 27%, 0.8) 0px 3px 10px ${hover ? 4 : 2}px`, - } - } - if (color === 'green') { - return { - backgroundBlendMode: 'multiply', - background: `hsla(97, 90%, 58%, ${opacity})`, - boxShadow: `hsla(97, 90%, 27%, 0.8) 0px 3px 10px ${hover ? 4 : 2}px`, + boxShadow: `hsla(4, 90%, 27%, 0.2) 0px 3px 10px ${hover ? 5 : 0}px`, + outline: `hsla(4, 90%, 58%, 0.5) solid 1px`, } } } @@ -297,14 +293,13 @@ export function actionStepToActionStepFormItem(step: ActionStepType, isNew = fal href_selected: false, url_selected: false, } - } else { - return { - ...step, - selector_selected: hasSelector, - text_selected: false, - url_selected: false, - href_selected: false, - } + } + return { + ...step, + selector_selected: hasSelector, + text_selected: false, + url_selected: false, + href_selected: false, } } diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 1f1221d8c9c1a..3a49374e1f4fe 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -148,6 +148,7 @@ export enum AvailableFeature { TWOFA = '2fa', PRIORITY_SUPPORT = 'priority_support', SUPPORT_RESPONSE_TIME = 'support_response_time', + DATA_PIPELINES_TRANSFORMATIONS = 'data_pipelines_transformations', } type AvailableFeatureUnion = `${AvailableFeature}` @@ -442,6 +443,7 @@ export interface TeamType extends TeamBasicType { session_replay_config: { record_canvas?: boolean; ai_config?: SessionRecordingAIConfig } | undefined | null autocapture_exceptions_opt_in: boolean surveys_opt_in?: boolean + heatmaps_opt_in?: boolean autocapture_exceptions_errors_to_ignore: string[] test_account_filters: AnyPropertyFilter[] test_account_filters_default_checked: boolean @@ -615,7 +617,6 @@ export enum PipelineTab { } export enum PipelineStage { - Filter = 'filter', Transformation = 'transformation', Destination = 'destination', SiteApp = 'site-app', @@ -1249,6 +1250,7 @@ export interface SessionRecordingType { /** Where this recording information was loaded from */ storage?: 'object_storage_lts' | 'object_storage' summary?: string + snapshot_source: 'web' | 'mobile' | 'unknown' } export interface SessionRecordingPropertiesType { @@ -2812,6 +2814,7 @@ export enum PropertyDefinitionType { Event = 'event', Person = 'person', Group = 'group', + Session = 'session', } export interface PropertyDefinition { @@ -3113,11 +3116,13 @@ export type GraphDataset = ChartDataset & id: number /** Toggled on to draw incompleteness lines in LineGraph.tsx */ dotted?: boolean - /** Array of breakdown values used only in ActionsHorizontalBar.tsx data */ + /** Array of breakdown values used only in ActionsHorizontalBar/ActionsPie.tsx data */ breakdownValues?: (string | number | undefined)[] - /** Array of compare labels used only in ActionsHorizontalBar.tsx data */ + /** Array of breakdown labels used only in ActionsHorizontalBar/ActionsPie.tsx data */ + breakdownLabels?: (string | number | undefined)[] + /** Array of compare labels used only in ActionsHorizontalBar/ActionsPie.tsx data */ compareLabels?: (CompareLabelType | undefined)[] - /** Array of persons ussed only in (ActionsHorizontalBar|ActionsPie).tsx */ + /** Array of persons used only in (ActionsHorizontalBar|ActionsPie).tsx */ personsValues?: (Person | undefined)[] index?: number /** Value (count) for specific data point; only valid in the context of an xy intercept */ @@ -3598,6 +3603,8 @@ export interface ExternalDataSourceSyncSchema { export interface ExternalDataSourceSchema extends SimpleExternalDataSourceSchema { table?: SimpleDataWarehouseTable + incremental?: boolean + status?: string } export interface SimpleDataWarehouseTable { @@ -3607,7 +3614,7 @@ export interface SimpleDataWarehouseTable { row_count: number } -export type BatchExportDestinationS3 = { +export type BatchExportServiceS3 = { type: 'S3' config: { bucket_name: string @@ -3625,7 +3632,7 @@ export type BatchExportDestinationS3 = { } } -export type BatchExportDestinationPostgres = { +export type BatchExportServicePostgres = { type: 'Postgres' config: { user: string @@ -3641,7 +3648,7 @@ export type BatchExportDestinationPostgres = { } } -export type BatchExportDestinationSnowflake = { +export type BatchExportServiceSnowflake = { type: 'Snowflake' config: { account: string @@ -3657,7 +3664,7 @@ export type BatchExportDestinationSnowflake = { } } -export type BatchExportDestinationBigQuery = { +export type BatchExportServiceBigQuery = { type: 'BigQuery' config: { project_id: string @@ -3673,7 +3680,7 @@ export type BatchExportDestinationBigQuery = { } } -export type BatchExportDestinationHTTP = { +export type BatchExportServiceHTTP = { type: 'HTTP' config: { url: string @@ -3683,7 +3690,7 @@ export type BatchExportDestinationHTTP = { } } -export type BatchExportDestinationRedshift = { +export type BatchExportServiceRedshift = { type: 'Redshift' config: { user: string @@ -3702,13 +3709,13 @@ export type BatchExportDestinationRedshift = { // When adding a new option here also add a icon for it to // src/scenes/pipeline/icons/ // and update RenderBatchExportIcon -export type BatchExportDestination = - | BatchExportDestinationS3 - | BatchExportDestinationSnowflake - | BatchExportDestinationPostgres - | BatchExportDestinationBigQuery - | BatchExportDestinationRedshift - | BatchExportDestinationHTTP +export type BatchExportService = + | BatchExportServiceS3 + | BatchExportServiceSnowflake + | BatchExportServicePostgres + | BatchExportServiceBigQuery + | BatchExportServiceRedshift + | BatchExportServiceHTTP export type BatchExportConfiguration = { // User provided data for the export. This is the data that the user @@ -3716,7 +3723,7 @@ export type BatchExportConfiguration = { id: string team_id: number name: string - destination: BatchExportDestination + destination: BatchExportService interval: 'hour' | 'day' | 'every 5 minutes' created_at: string start_at: string | null diff --git a/gunicorn.config.py b/gunicorn.config.py index 1e56182026068..acd7ba3f5f592 100644 --- a/gunicorn.config.py +++ b/gunicorn.config.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- import logging import os diff --git a/hogvm/python/execute.py b/hogvm/python/execute.py index 4e4a61a1af5a0..a1130c0d54c89 100644 --- a/hogvm/python/execute.py +++ b/hogvm/python/execute.py @@ -1,5 +1,5 @@ import re -from typing import List, Any, Dict +from typing import Any from hogvm.python.operation import Operation, HOGQL_BYTECODE_IDENTIFIER @@ -33,7 +33,7 @@ def to_concat_arg(arg) -> str: return str(arg) -def execute_bytecode(bytecode: List[Any], fields: Dict[str, Any]) -> Any: +def execute_bytecode(bytecode: list[Any], fields: dict[str, Any]) -> Any: try: stack = [] iterator = iter(bytecode) diff --git a/latest_migrations.manifest b/latest_migrations.manifest index dac9ed4ce4539..9aab6a612f77b 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0403_plugin_has_private_access +posthog: 0405_team_heatmaps_opt_in sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 3c6bbf22089c0..0d27df2286796 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -66,11 +66,8 @@ posthog/hogql/database/schema/person_distinct_ids.py:0: error: Argument 1 to "se posthog/hogql/database/schema/person_distinct_id_overrides.py:0: error: Argument 1 to "select_from_person_distinct_id_overrides_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type] posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable] posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable] +posthog/clickhouse/kafka_engine.py:0: error: Argument 1 to "join" of "str" has incompatible type "list"; expected "Iterable[str]" [arg-type] posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined] -posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] -posthog/hogql/database/database.py:0: error: Incompatible types (expression has type "Literal['view', 'lazy_table']", TypedDict item "type" has type "Literal['integer', 'float', 'string', 'datetime', 'date', 'boolean', 'array', 'json', 'lazy_table', 'virtual_table', 'field_traverser', 'expression']") [typeddict-item] -posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] -posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "posthog.models.user.UserManager", base class "AbstractUser" defined the type as "django.contrib.auth.models.UserManager[AbstractUser]") [assignment] posthog/models/user.py:0: error: Cannot override writeable attribute with read-only property [override] @@ -82,6 +79,10 @@ posthog/models/user.py:0: note: bool posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] posthog/models/person/person.py:0: error: Incompatible types in assignment (expression has type "list[Never]", variable has type "ValuesQuerySet[PersonDistinctId, str]") [assignment] +posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/hogql/database/database.py:0: error: Incompatible types (expression has type "Literal['view', 'lazy_table']", TypedDict item "type" has type "Literal['integer', 'float', 'string', 'datetime', 'date', 'boolean', 'array', 'json', 'lazy_table', 'virtual_table', 'field_traverser', 'expression']") [typeddict-item] +posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] +posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] posthog/models/feature_flag/flag_matching.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible return value type (got "str", expected "Literal['hour', 'day', 'week', 'month']") [return-value] posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "dict[str, int] | None" has no attribute "get" [union-attr] @@ -501,6 +502,8 @@ posthog/api/organization_member.py:0: error: Metaclass conflict: the metaclass o posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] ee/api/role.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] ee/clickhouse/views/insights.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] +posthog/warehouse/data_load/validate_schema.py:0: error: Item "None" of "DataWarehouseTable | None" has no attribute "get_columns" [union-attr] +posthog/warehouse/data_load/validate_schema.py:0: error: Item "None" of "DataWarehouseTable | None" has no attribute "columns" [union-attr] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 6 has incompatible type "ExternalDataSchema"; expected "str" [arg-type] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "DateTime | Date | datetime | date | str | float | int | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] @@ -545,6 +548,7 @@ posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] +posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py:0: error: Module "django.contrib.postgres.operations" has no attribute "AddConstraintNotValid" [attr-defined] posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] posthog/hogql/test/test_query.py:0: error: Argument 1 to "len" has incompatible type "list[Any] | None"; expected "Sized" [arg-type] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] diff --git a/package.json b/package.json index 2b4377e6c506d..3b478bf21f1e4 100644 --- a/package.json +++ b/package.json @@ -79,8 +79,8 @@ "@posthog/icons": "0.7.0", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", - "@rrweb/types": "2.0.0-alpha.12", - "@sentry/react": "7.22.0", + "@rrweb/types": "2.0.0-alpha.13", + "@sentry/react": "7.112.1", "@tailwindcss/container-queries": "^0.1.1", "@testing-library/dom": ">=7.21.4", "@tiptap/core": "^2.1.16", @@ -126,6 +126,7 @@ "fflate": "^0.7.4", "fs-extra": "^10.0.0", "fuse.js": "^6.6.2", + "heatmap.js": "^2.0.5", "husky": "^7.0.4", "image-blob-reduce": "^4.1.0", "kea": "^3.1.5", @@ -145,8 +146,8 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.128.2", - "posthog-js-lite": "2.5.0", + "posthog-js": "1.130.0", + "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", "protomaps-themes-base": "2.0.0-alpha.1", @@ -169,7 +170,7 @@ "react-transition-group": "^4.4.5", "react-virtualized": "^9.22.5", "resize-observer-polyfill": "^1.5.1", - "rrweb": "2.0.0-alpha.12", + "rrweb": "2.0.0-alpha.13", "sass": "^1.26.2", "tailwind-merge": "^2.2.2", "tailwindcss": "^3.4.0", @@ -189,7 +190,7 @@ "@babel/preset-typescript": "^7.22.5", "@cypress/webpack-preprocessor": "^5.17.1", "@playwright/test": "1.41.2", - "@sentry/types": "7.22.0", + "@sentry/types": "7.112.1", "@storybook/addon-a11y": "^7.6.4", "@storybook/addon-actions": "^7.6.4", "@storybook/addon-essentials": "^7.6.4", @@ -217,6 +218,7 @@ "@types/d3": "^7.4.0", "@types/d3-sankey": "^0.12.1", "@types/dompurify": "^3.0.3", + "@types/heatmap.js": "^2.0.41", "@types/image-blob-reduce": "^4.1.1", "@types/jest": "^29.2.3", "@types/jest-image-snapshot": "^6.1.0", @@ -258,6 +260,7 @@ "eslint-plugin-react": "^7.33.2", "eslint-plugin-simple-import-sort": "^10.0.0", "eslint-plugin-storybook": "^0.6.15", + "eslint-plugin-unused-imports": "^3.1.0", "file-loader": "^6.1.0", "givens": "^1.3.6", "history": "^5.0.1", @@ -307,7 +310,8 @@ "playwright": "1.41.2" }, "patchedDependencies": { - "rrweb@2.0.0-alpha.12": "patches/rrweb@2.0.0-alpha.12.patch" + "rrweb@2.0.0-alpha.13": "patches/rrweb@2.0.0-alpha.13.patch", + "heatmap.js@2.0.5": "patches/heatmap.js@2.0.5.patch" } }, "lint-staged": { diff --git a/patches/heatmap.js@2.0.5.patch b/patches/heatmap.js@2.0.5.patch new file mode 100644 index 0000000000000..2ad80898654c9 --- /dev/null +++ b/patches/heatmap.js@2.0.5.patch @@ -0,0 +1,13 @@ +diff --git a/build/heatmap.js b/build/heatmap.js +index 3eee39ea8c127b065fb4df763ab76af152e7d368..a37c950b937d04805b62832c661890931d0f3ff1 100644 +--- a/build/heatmap.js ++++ b/build/heatmap.js +@@ -524,7 +524,7 @@ var Canvas2dRenderer = (function Canvas2dRendererClosure() { + + } + +- img.data = imgData; ++ //img.data = imgData; + this.ctx.putImageData(img, x, y); + + this._renderBoundaries = [1000, 1000, 0, 0]; \ No newline at end of file diff --git a/patches/rrweb@2.0.0-alpha.12.patch b/patches/rrweb@2.0.0-alpha.13.patch similarity index 86% rename from patches/rrweb@2.0.0-alpha.12.patch rename to patches/rrweb@2.0.0-alpha.13.patch index e3fe50fb9ac49..c415c192d26c7 100644 --- a/patches/rrweb@2.0.0-alpha.12.patch +++ b/patches/rrweb@2.0.0-alpha.13.patch @@ -1,8 +1,8 @@ diff --git a/es/rrweb/packages/rrweb/src/replay/index.js b/es/rrweb/packages/rrweb/src/replay/index.js -index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce94604ce1aa1 100644 +index 0d49411b1f6d31103bed898c0e81d1d74ab51234..0b2160ef08aa3ae5310f63c295abc0a560332b22 100644 --- a/es/rrweb/packages/rrweb/src/replay/index.js +++ b/es/rrweb/packages/rrweb/src/replay/index.js -@@ -201,6 +201,10 @@ class Replayer { +@@ -203,6 +203,10 @@ class Replayer { mouseTail: defaultMouseTailConfig, useVirtualDom: true, logger: console, @@ -13,7 +13,7 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 }; this.config = Object.assign({}, defaultConfig, config); this.handleResize = this.handleResize.bind(this); -@@ -738,272 +742,276 @@ class Replayer { +@@ -755,255 +759,259 @@ class Replayer { applyIncremental(e, isSync) { var _a, _b, _c; const { data: d } = e; @@ -116,16 +116,16 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 - if ('blur' in target) { - target.blur(); - } -+ case IncrementalSource.MouseInteraction: { -+ if (d.id === -1) { - break; +- break; - case MouseInteractions.Focus: - if (triggerFocus && target.focus) { - target.focus({ - preventScroll: true, - }); - } -- break; ++ case IncrementalSource.MouseInteraction: { ++ if (d.id === -1) { + break; - case MouseInteractions.Click: - case MouseInteractions.TouchStart: - case MouseInteractions.TouchEnd: @@ -300,11 +300,17 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + width: d.width, + height: d.height, + }); -+ break; + break; +- } +- if (this.usingVirtualDom) { +- const target = this.virtualDom.mirror.getNode(d.id); +- if (!target) { +- return this.debugNodeNotFound(d, d.id); + case IncrementalSource.Input: { + if (d.id === -1) { + break; -+ } + } +- target.inputData = d; + if (this.usingVirtualDom) { + const target = this.virtualDom.mirror.getNode(d.id); + if (!target) { @@ -316,18 +322,6 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + this.applyInput(d); break; } -- if (this.usingVirtualDom) { -- const target = this.virtualDom.mirror.getNode(d.id); -+ case IncrementalSource.MediaInteraction: { -+ const target = this.usingVirtualDom -+ ? this.virtualDom.mirror.getNode(d.id) -+ : this.mirror.getNode(d.id); - if (!target) { - return this.debugNodeNotFound(d, d.id); - } -- target.inputData = d; -- break; -- } - this.applyInput(d); - break; - } @@ -339,39 +333,54 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 - return this.debugNodeNotFound(d, d.id); - } - const mediaEl = target; -- try { -- if (d.currentTime !== undefined) { -- mediaEl.currentTime = d.currentTime; -+ const mediaEl = target; -+ try { -+ if (d.currentTime !== undefined) { -+ mediaEl.currentTime = d.currentTime; -+ } -+ if (d.volume !== undefined) { -+ mediaEl.volume = d.volume; -+ } -+ if (d.muted !== undefined) { -+ mediaEl.muted = d.muted; -+ } -+ if (d.type === 1) { -+ mediaEl.pause(); -+ } -+ if (d.type === 0) { -+ void mediaEl.play(); -+ } -+ if (d.type === 4) { -+ mediaEl.playbackRate = d.playbackRate; -+ } - } -- if (d.volume !== undefined) { -- mediaEl.volume = d.volume; -+ catch (error) { -+ this.warn(`Failed to replay media interactions: ${error.message || error}`); +- const { events } = this.service.state.context; +- this.mediaManager.mediaMutation({ +- target: mediaEl, +- timeOffset: e.timestamp - events[0].timestamp, +- mutation: d, +- }); +- break; +- } +- case IncrementalSource.StyleSheetRule: +- case IncrementalSource.StyleDeclaration: { +- if (this.usingVirtualDom) { +- if (d.styleId) +- this.constructedStyleMutations.push(d); +- else if (d.id) +- (_b = this.virtualDom.mirror.getNode(d.id)) === null || _b === void 0 ? void 0 : _b.rules.push(d); +- } +- else +- this.applyStyleSheetMutation(d); +- break; +- } +- case IncrementalSource.CanvasMutation: { +- if (!this.config.UNSAFE_replayCanvas) { +- return; +- } +- if (this.usingVirtualDom) { +- const target = this.virtualDom.mirror.getNode(d.id); ++ case IncrementalSource.MediaInteraction: { ++ const target = this.usingVirtualDom ++ ? this.virtualDom.mirror.getNode(d.id) ++ : this.mirror.getNode(d.id); + if (!target) { + return this.debugNodeNotFound(d, d.id); } -- if (d.muted !== undefined) { -- mediaEl.muted = d.muted; +- target.canvasMutations.push({ +- event: e, ++ const mediaEl = target; ++ const { events } = this.service.state.context; ++ this.mediaManager.mediaMutation({ ++ target: mediaEl, ++ timeOffset: e.timestamp - events[0].timestamp, + mutation: d, + }); + break; -+ } + } +- else { +- const target = this.mirror.getNode(d.id); +- if (!target) { +- return this.debugNodeNotFound(d, d.id); + case IncrementalSource.StyleSheetRule: + case IncrementalSource.StyleDeclaration: { + if (this.usingVirtualDom) { @@ -380,18 +389,30 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + else if (d.id) + (_b = this.virtualDom.mirror.getNode(d.id)) === null || _b === void 0 ? void 0 : _b.rules.push(d); } -- if (d.type === 1) { -- mediaEl.pause(); +- void canvasMutation({ +- event: e, +- mutation: d, +- target: target, +- imageMap: this.imageMap, +- canvasEventMap: this.canvasEventMap, +- errorHandler: this.warnCanvasMutationFailed.bind(this), +- }); + else + this.applyStyleSheetMutation(d); + break; -+ } + } +- break; +- } +- case IncrementalSource.Font: { +- try { +- const fontFace = new FontFace(d.family, d.buffer +- ? new Uint8Array(JSON.parse(d.fontSource)) +- : d.fontSource, d.descriptors); +- (_c = this.iframe.contentDocument) === null || _c === void 0 ? void 0 : _c.fonts.add(fontFace); + case IncrementalSource.CanvasMutation: { + if (!this.config.UNSAFE_replayCanvas) { + return; - } -- if (d.type === 0) { -- void mediaEl.play(); ++ } + if (this.usingVirtualDom) { + const target = this.virtualDom.mirror.getNode(d.id); + if (!target) { @@ -401,9 +422,7 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + event: e, + mutation: d, + }); - } -- if (d.type === 4) { -- mediaEl.playbackRate = d.playbackRate; ++ } + else { + const target = this.mirror.getNode(d.id); + if (!target) { @@ -417,74 +436,28 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + canvasEventMap: this.canvasEventMap, + errorHandler: this.warnCanvasMutationFailed.bind(this), + }); - } ++ } + break; } - catch (error) { -- this.warn(`Failed to replay media interactions: ${error.message || error}`); -- } -- break; -- } -- case IncrementalSource.StyleSheetRule: -- case IncrementalSource.StyleDeclaration: { -- if (this.usingVirtualDom) { -- if (d.styleId) -- this.constructedStyleMutations.push(d); -- else if (d.id) -- (_b = this.virtualDom.mirror.getNode(d.id)) === null || _b === void 0 ? void 0 : _b.rules.push(d); -- } -- else -- this.applyStyleSheetMutation(d); -- break; -- } -- case IncrementalSource.CanvasMutation: { -- if (!this.config.UNSAFE_replayCanvas) { -- return; -- } -- if (this.usingVirtualDom) { -- const target = this.virtualDom.mirror.getNode(d.id); -- if (!target) { -- return this.debugNodeNotFound(d, d.id); +- this.warn(error); + case IncrementalSource.Font: { + try { + const fontFace = new FontFace(d.family, d.buffer + ? new Uint8Array(JSON.parse(d.fontSource)) + : d.fontSource, d.descriptors); + (_c = this.iframe.contentDocument) === null || _c === void 0 ? void 0 : _c.fonts.add(fontFace); - } -- target.canvasMutations.push({ -- event: e, -- mutation: d, -- }); -- } -- else { -- const target = this.mirror.getNode(d.id); -- if (!target) { -- return this.debugNodeNotFound(d, d.id); ++ } + catch (error) { + this.warn(error); - } -- void canvasMutation({ -- event: e, -- mutation: d, -- target: target, -- imageMap: this.imageMap, -- canvasEventMap: this.canvasEventMap, -- errorHandler: this.warnCanvasMutationFailed.bind(this), -- }); -- } -- break; -- } -- case IncrementalSource.Font: { -- try { -- const fontFace = new FontFace(d.family, d.buffer -- ? new Uint8Array(JSON.parse(d.fontSource)) -- : d.fontSource, d.descriptors); -- (_c = this.iframe.contentDocument) === null || _c === void 0 ? void 0 : _c.fonts.add(fontFace); ++ } + break; } -- catch (error) { -- this.warn(error); +- break; +- } +- case IncrementalSource.Selection: { +- if (isSync) { +- this.lastSelectionData = d; + case IncrementalSource.Selection: { + if (isSync) { + this.lastSelectionData = d; @@ -492,12 +465,7 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 + } + this.applySelection(d); + break; - } -- break; -- } -- case IncrementalSource.Selection: { -- if (isSync) { -- this.lastSelectionData = d; ++ } + case IncrementalSource.AdoptedStyleSheet: { + if (this.usingVirtualDom) + this.adoptedStyleSheets.push(d); @@ -521,50 +489,69 @@ index e9a8ab2ba94093198f3dc42c9f6c4915f99cbc1d..182662fff8cca2eb4c63d956f0fce946 } applyMutation(d, isSync) { diff --git a/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js b/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js -index 342e1df171368d312dc0372dace0c6b5a1eb9c61..e98368347aab6f22902e691e1909aa0333232140 100644 +index 38a23aaae8d683fa584329eced277dd8de55d1ff..278e06bc6c8c964581d461405a0f0a4544344fa1 100644 --- a/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js +++ b/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js -@@ -1254,16 +1254,40 @@ function parse(css, options = {}) { +@@ -1255,54 +1255,19 @@ function parse(css, options = {}) { + }); + } + function selector() { +- whitespace(); +- while (css[0] == '}') { +- error('extra closing bracket'); +- css = css.slice(1); +- whitespace(); +- } +- const m = match(/^(("(?:\\"|[^"])*"|'(?:\\'|[^'])*'|[^{])+)/); ++ const m = match(/^([^{]+)/); if (!m) { return; } -- return trim(m[0]) -+ return splitRootSelectors(trim(m[0]) +- const cleanedInput = m[0] +- .trim() ++ return trim(m[0]) .replace(/\/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*\/+/g, '') .replace(/"(?:\\"|[^"])*"|'(?:\\'|[^'])*'/g, (m) => { - return m.replace(/,/g, '\u200C'); -- }) -- .split(/\s*(?![^(]*\)),\s*/) -+ })) - .map((s) => { - return s.replace(/\u200C/g, ','); - }); +- return m.replace(/,/g, '\u200C'); +- }); +- return customSplit(cleanedInput).map((s) => s.replace(/\u200C/g, ',').trim()); +- } +- function customSplit(input) { +- const result = []; +- let currentSegment = ''; +- let depthParentheses = 0; +- let depthBrackets = 0; +- for (const char of input) { +- if (char === '(') { +- depthParentheses++; +- } +- else if (char === ')') { +- depthParentheses--; +- } +- else if (char === '[') { +- depthBrackets++; +- } +- else if (char === ']') { +- depthBrackets--; +- } +- if (char === ',' && depthParentheses === 0 && depthBrackets === 0) { +- result.push(currentSegment); +- currentSegment = ''; +- } +- else { +- currentSegment += char; +- } +- } +- if (currentSegment) { +- result.push(currentSegment); +- } +- return result; ++ return m.replace(/,/g, '\u200C'); ++ }) ++ .split(/\s*(?![^(]*\)),\s*/) ++ .map((s) => { ++ return s.replace(/\u200C/g, ','); ++ }); } -+ function splitRootSelectors(input) { -+ let parts = []; -+ let nestedLevel = 0; -+ let currentPart = ''; -+ -+ for (let i = 0; i < input.length; i++) { -+ const char = input[i]; -+ currentPart += char; -+ -+ if (char === '(') { -+ nestedLevel++; -+ } else if (char === ')') { -+ nestedLevel--; -+ } else if (char === ',' && nestedLevel === 0) { -+ parts.push(currentPart.slice(0, -1).trim()); -+ currentPart = ''; -+ } -+ } -+ -+ if (currentPart.trim() !== '') { -+ parts.push(currentPart.trim()); -+ } -+ -+ return parts; -+ } function declaration() { const pos = position(); - const propMatch = match(/^(\*?[-#\/\*\\\w]+(\[[0-9a-z_-]+\])?)\s*/); diff --git a/plugin-server/bin/generate_session_recordings_messages.py b/plugin-server/bin/generate_session_recordings_messages.py index 4b5462bebd3a7..cfd3d034d194b 100755 --- a/plugin-server/bin/generate_session_recordings_messages.py +++ b/plugin-server/bin/generate_session_recordings_messages.py @@ -53,7 +53,6 @@ import json import uuid from sys import stderr, stdout -from typing import List import numpy from faker import Faker @@ -144,7 +143,7 @@ def get_parser(): def chunked( data: str, chunk_size: int, -) -> List[str]: +) -> list[str]: return [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)] diff --git a/plugin-server/src/backfill.ts b/plugin-server/src/backfill.ts deleted file mode 100644 index dd2bd1b118a3e..0000000000000 --- a/plugin-server/src/backfill.ts +++ /dev/null @@ -1,163 +0,0 @@ -import { DateTime, Duration, Interval } from 'luxon' -import assert from 'node:assert/strict' - -import { defaultConfig } from './config/config' -import { initApp } from './init' -import { Hub, RawClickHouseEvent, TimestampFormat } from './types' -import { DB } from './utils/db/db' -import { createHub } from './utils/db/hub' -import { formPluginEvent } from './utils/event' -import { Status } from './utils/status' -import { castTimestampToClickhouseFormat } from './utils/utils' -import { PersonState } from './worker/ingestion/person-state' - -const status = new Status('backfill') - -export async function startBackfill() { - // This mode can be used as an nodejs counterpart to the django management commands, for incident remediation. - // Add your logic to the runBackfill function and run it: - // - locally with: cd plugin-server && pnpm start:dev -- --backfill - // - in a toolbox pod with: node ./plugin-server/dist/index.js -- --backfill - - defaultConfig.PLUGIN_SERVER_MODE = null // Disable all consuming capabilities - const noCapability = {} - initApp(defaultConfig) - const [hub, closeHub] = await createHub(defaultConfig, noCapability) - status.info('🏁', 'Bootstraping done, starting to backfill') - - await runBackfill(hub) - - // Gracefully tear down the clients. - status.info('🏁', 'Backfill done, starting shutdown') - await closeHub() -} - -async function runBackfill(hub: Hub) { - const lower_bound = DateTime.fromISO(process.env.BACKFILL_START!) - assert.ok(lower_bound.isValid, 'BACKFILL_START is an invalid time: ' + lower_bound.invalidReason) - const upper_bound = DateTime.fromISO(process.env.BACKFILL_END!) - assert.ok(upper_bound.isValid, 'BACKFILL_END is an invalid time: ' + upper_bound.invalidReason) - const lower_bound_ts = DateTime.fromISO(process.env.BACKFILL_START_TS!) - assert.ok(lower_bound_ts.isValid, 'BACKFILL_START_TS is an invalid time: ' + lower_bound_ts.invalidReason) - const upper_bound_ts = DateTime.fromISO(process.env.BACKFILL_END_TS!) - assert.ok(upper_bound_ts.isValid, 'BACKFILL_END_TS is an invalid time: ' + upper_bound_ts.invalidReason) - const step = Duration.fromISO(process.env.BACKFILL_STEP_INTERVAL!) - assert.ok(step.isValid, 'BACKFILL_STEP_INTERVAL is an invalid duration: ' + step.invalidReason) - - status.info('🕰', 'Running backfill with the following bounds', { - lower_bound, - upper_bound, - lower_bound_ts, - upper_bound_ts, - step, - }) - - let interrupted = false - process.on('SIGINT', function () { - interrupted = true - }) - - const windows = Interval.fromDateTimes(lower_bound, upper_bound).splitBy(step) - for (const window of windows) { - status.info('🕰', 'Processing events in window', { - window, - }) - - const events = await retrieveEvents(hub.db, window, lower_bound_ts, upper_bound_ts) - await handleBatch(hub.db, events) - - status.info('✅', 'Successfully processed events in window', { - window, - }) - if (interrupted) { - status.info('🛑', 'Stopping processing due to SIGINT') - break - } - } -} - -async function retrieveEvents( - db: DB, - window: Interval, - start_ts: DateTime, - end_ts: DateTime -): Promise { - const chTimestampLower = castTimestampToClickhouseFormat(window.start, TimestampFormat.ClickHouseSecondPrecision) - const chTimestampHigher = castTimestampToClickhouseFormat(window.end, TimestampFormat.ClickHouseSecondPrecision) - const chTimestampLowerTS = castTimestampToClickhouseFormat(start_ts, TimestampFormat.ClickHouseSecondPrecision) - const chTimestampHigherTS = castTimestampToClickhouseFormat(end_ts, TimestampFormat.ClickHouseSecondPrecision) - - // :TODO: Adding tag messes up the return value? - const fetchEventsQuery = ` - SELECT event, - uuid, - team_id, - distinct_id, - properties, - timestamp, - created_at, - elements_chain - FROM events - WHERE _timestamp >= '${chTimestampLower}' - AND _timestamp < '${chTimestampHigher}' - AND timestamp >= '${chTimestampLowerTS}' - AND timestamp < '${chTimestampHigherTS}' - AND event IN ('$merge_dangerously', '$create_alias', '$identify') - AND ((event = '$identify' and JSONExtractString(properties, '$anon_distinct_id') != '') OR - (event != '$identify' and JSONExtractString(properties, 'alias') != '')) - AND team_id NOT IN (26188, 31040) - ORDER BY _timestamp` - - let clickhouseFetchEventsResult: { data: RawClickHouseEvent[] } - // eslint-disable-next-line prefer-const - clickhouseFetchEventsResult = await db.clickhouseQuery(fetchEventsQuery) - return clickhouseFetchEventsResult.data -} - -// run merges parallel across teams, non-parallel within teams -async function handleBatch(db: DB, events: RawClickHouseEvent[]): Promise { - const batches = new Map() - for (const event of events) { - const siblings = batches.get(event.team_id) - if (siblings) { - siblings.push(event) - } else { - batches.set(event.team_id, [event]) - } - } - const batchQueue = Array.from(batches.values()) - status.info('⚙️', 'Processing events', { - eventCount: events.length, - batchCount: batchQueue.length, - }) - - async function processMicroBatches(batches: RawClickHouseEvent[][]): Promise { - let currentBatch - while ((currentBatch = batches.pop()) !== undefined) { - // Process every message sequentially, stash promises to await on later - for (const event of currentBatch) { - await handleEvent(db, event) - } - } - return Promise.resolve() - } - - const tasks = [...Array(defaultConfig.INGESTION_CONCURRENCY)].map(() => processMicroBatches(batchQueue)) - await Promise.all(tasks) -} - -async function handleEvent(db: DB, event: RawClickHouseEvent): Promise { - // single CH event handlin - const pluginEvent = formPluginEvent(event) - const ts: DateTime = DateTime.fromISO(pluginEvent.timestamp as string) - const processPerson = true - const personState = new PersonState( - pluginEvent, - pluginEvent.team_id, - pluginEvent.distinct_id, - ts, - processPerson, - db - ) - await personState.handleIdentifyOrAlias() -} diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index ea51dc1da394d..134736bcec319 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -2,6 +2,7 @@ import { LogLevel, PluginLogLevel, PluginsServerConfig, stringToPluginServerMode import { isDevEnv, isTestEnv, stringToBoolean } from '../utils/env-utils' import { KAFKAJS_LOG_LEVEL_MAPPING } from './constants' import { + KAFKA_CLICKHOUSE_HEATMAP_EVENTS, KAFKA_EVENTS_JSON, KAFKA_EVENTS_PLUGIN_INGESTION, KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, @@ -104,6 +105,7 @@ export function getDefaultConfig(): PluginsServerConfig { KAFKA_PARTITIONS_CONSUMED_CONCURRENTLY: 1, CLICKHOUSE_DISABLE_EXTERNAL_SCHEMAS_TEAMS: '', CLICKHOUSE_JSON_EVENTS_KAFKA_TOPIC: KAFKA_EVENTS_JSON, + CLICKHOUSE_HEATMAPS_KAFKA_TOPIC: KAFKA_CLICKHOUSE_HEATMAP_EVENTS, CONVERSION_BUFFER_ENABLED: false, CONVERSION_BUFFER_ENABLED_TEAMS: '', CONVERSION_BUFFER_TOPIC_ENABLED_TEAMS: '', @@ -135,6 +137,7 @@ export function getDefaultConfig(): PluginsServerConfig { RUSTY_HOOK_ROLLOUT_PERCENTAGE: 0, RUSTY_HOOK_URL: '', CAPTURE_CONFIG_REDIS_HOST: null, + LAZY_PERSON_CREATION_TEAMS: '', STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes STARTUP_PROFILE_CPU: false, diff --git a/plugin-server/src/config/kafka-topics.ts b/plugin-server/src/config/kafka-topics.ts index 71f9bd8ee79da..d93fa8f897afe 100644 --- a/plugin-server/src/config/kafka-topics.ts +++ b/plugin-server/src/config/kafka-topics.ts @@ -36,6 +36,8 @@ export const KAFKA_CLICKHOUSE_SESSION_RECORDING_EVENTS = `${prefix}clickhouse_se export const KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS = `${prefix}clickhouse_session_replay_events${suffix}` // write performance events to ClickHouse export const KAFKA_PERFORMANCE_EVENTS = `${prefix}clickhouse_performance_events${suffix}` +// write heatmap events to ClickHouse +export const KAFKA_CLICKHOUSE_HEATMAP_EVENTS = `${prefix}clickhouse_heatmap_events${suffix}` // log entries for ingestion into clickhouse export const KAFKA_LOG_ENTRIES = `${prefix}log_entries${suffix}` diff --git a/plugin-server/src/index.ts b/plugin-server/src/index.ts index 7c62632ac556b..7ee106b182b90 100644 --- a/plugin-server/src/index.ts +++ b/plugin-server/src/index.ts @@ -1,4 +1,3 @@ -import { startBackfill } from './backfill' import { getPluginServerCapabilities } from './capabilities' import { defaultConfig } from './config/config' import { initApp } from './init' @@ -51,9 +50,6 @@ switch (alternativeMode) { } })() break - case AlternativeMode.Backfill: - void startBackfill() - break default: // void the returned promise initApp(defaultConfig) diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 5e599a42c0897..030032063a2fc 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -118,6 +118,7 @@ export interface PluginsServerConfig { CLICKHOUSE_DISABLE_EXTERNAL_SCHEMAS: boolean // whether to disallow external schemas like protobuf for clickhouse kafka engine CLICKHOUSE_DISABLE_EXTERNAL_SCHEMAS_TEAMS: string // (advanced) a comma separated list of teams to disable clickhouse external schemas for CLICKHOUSE_JSON_EVENTS_KAFKA_TOPIC: string // (advanced) topic to send events to for clickhouse ingestion + CLICKHOUSE_HEATMAPS_KAFKA_TOPIC: string // (advanced) topic to send heatmap data to for clickhouse ingestion REDIS_URL: string POSTHOG_REDIS_PASSWORD: string POSTHOG_REDIS_HOST: string @@ -211,6 +212,7 @@ export interface PluginsServerConfig { SKIP_UPDATE_EVENT_AND_PROPERTIES_STEP: boolean PIPELINE_STEP_STALLED_LOG_TIMEOUT: number CAPTURE_CONFIG_REDIS_HOST: string | null // Redis cluster to use to coordinate with capture (overflow, routing) + LAZY_PERSON_CREATION_TEAMS: string // dump profiles to disk, covering the first N seconds of runtime STARTUP_PROFILE_DURATION_SECONDS: number @@ -296,6 +298,7 @@ export interface Hub extends PluginsServerConfig { pluginConfigsToSkipElementsParsing: ValueMatcher poeEmbraceJoinForTeams: ValueMatcher poeWritesExcludeTeams: ValueMatcher + lazyPersonCreationTeams: ValueMatcher // lookups eventsToDropByToken: Map } @@ -620,6 +623,7 @@ interface BaseEvent { export type ISOTimestamp = Brand export type ClickHouseTimestamp = Brand export type ClickHouseTimestampSecondPrecision = Brand +export type PersonMode = 'full' | 'propertyless' | 'force_upgrade' /** Raw event row from ClickHouse. */ export interface RawClickHouseEvent extends BaseEvent { @@ -639,7 +643,7 @@ export interface RawClickHouseEvent extends BaseEvent { group2_created_at?: ClickHouseTimestamp group3_created_at?: ClickHouseTimestamp group4_created_at?: ClickHouseTimestamp - person_mode: 'full' | 'propertyless' + person_mode: PersonMode } /** Parsed event row from ClickHouse. */ @@ -660,7 +664,7 @@ export interface ClickHouseEvent extends BaseEvent { group2_created_at?: DateTime | null group3_created_at?: DateTime | null group4_created_at?: DateTime | null - person_mode: 'full' | 'propertyless' + person_mode: PersonMode } /** Event in a database-agnostic shape, AKA an ingestion event. @@ -736,11 +740,24 @@ export interface RawPerson extends BasePerson { } /** Usable Person model. */ -export interface Person extends BasePerson { +export interface InternalPerson extends BasePerson { created_at: DateTime version: number } +/** Person model exposed outside of person-specific DB logic. */ +export interface Person { + team_id: number + properties: Properties + uuid: string + created_at: DateTime + + // Set to `true` when an existing person row was found for this `distinct_id`, but the event was + // sent with `$process_person_profile=false`. This is an unexpected branch that we want to flag + // for debugging and billing purposes, and typically means a misconfigured SDK. + force_upgrade?: boolean +} + /** Clickhouse Person model. */ export interface ClickHousePerson { id: string @@ -1103,3 +1120,26 @@ export type RRWebEvent = Record & { export interface ValueMatcher { (value: T): boolean } + +export type RawClickhouseHeatmapEvent = { + /** + * session id lets us offer example recordings on high traffic parts of the page, + * and could let us offer more advanced filtering of heatmap data + * we will break the relationship between particular sessions and clicks in aggregating this data + * it should always be treated as an exemplar and not as concrete values + */ + session_id: string + distinct_id: string + viewport_width: number + viewport_height: number + pointer_target_fixed: boolean + current_url: string + // x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x: number + // y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y: number + scale_factor: 16 // in the future we may support other values + timestamp: string + type: string + team_id: number +} diff --git a/plugin-server/src/utils/db/db.ts b/plugin-server/src/utils/db/db.ts index c7b6ce86a895a..92a4deeb3bddc 100644 --- a/plugin-server/src/utils/db/db.ts +++ b/plugin-server/src/utils/db/db.ts @@ -26,8 +26,8 @@ import { GroupKey, GroupTypeIndex, GroupTypeToColumnIndex, + InternalPerson, OrganizationMembershipLevel, - Person, PersonDistinctId, Plugin, PluginConfig, @@ -550,7 +550,7 @@ export class DB { } } - private toPerson(row: RawPerson): Person { + private toPerson(row: RawPerson): InternalPerson { return { ...row, created_at: DateTime.fromISO(row.created_at).toUTC(), @@ -558,9 +558,9 @@ export class DB { } } - public async fetchPersons(database?: Database.Postgres): Promise + public async fetchPersons(database?: Database.Postgres): Promise public async fetchPersons(database: Database.ClickHouse): Promise - public async fetchPersons(database: Database = Database.Postgres): Promise { + public async fetchPersons(database: Database = Database.Postgres): Promise { if (database === Database.ClickHouse) { const query = ` SELECT id, team_id, is_identified, ts as _timestamp, properties, created_at, is_del as is_deleted, _offset @@ -595,8 +595,12 @@ export class DB { public async fetchPerson( teamId: number, distinctId: string, - options: { forUpdate?: boolean } = {} - ): Promise { + options: { forUpdate?: boolean; useReadReplica?: boolean } = {} + ): Promise { + if (options.forUpdate && options.useReadReplica) { + throw new Error("can't enable both forUpdate and useReadReplica in db::fetchPerson") + } + let queryString = `SELECT posthog_person.id, posthog_person.uuid, @@ -621,7 +625,7 @@ export class DB { const values = [teamId, distinctId] const { rows } = await this.postgres.query( - PostgresUse.COMMON_WRITE, + options.useReadReplica ? PostgresUse.COMMON_READ : PostgresUse.COMMON_WRITE, queryString, values, 'fetchPerson' @@ -641,10 +645,10 @@ export class DB { isUserId: number | null, isIdentified: boolean, uuid: string, - distinctIds?: string[] - ): Promise { + distinctIds?: string[], + version = 0 + ): Promise { distinctIds ||= [] - const version = 0 // We're creating the person now! const { rows } = await this.postgres.query( PostgresUse.COMMON_WRITE, @@ -713,10 +717,10 @@ export class DB { // Currently in use, but there are various problems with this function public async updatePersonDeprecated( - person: Person, - update: Partial, + person: InternalPerson, + update: Partial, tx?: TransactionClient - ): Promise<[Person, ProducerRecord[]]> { + ): Promise<[InternalPerson, ProducerRecord[]]> { const updateValues = Object.values(unparsePersonPartial(update)) // short circuit if there are no updates to be made @@ -770,7 +774,7 @@ export class DB { return [updatedPerson, kafkaMessages] } - public async deletePerson(person: Person, tx?: TransactionClient): Promise { + public async deletePerson(person: InternalPerson, tx?: TransactionClient): Promise { const { rows } = await this.postgres.query<{ version: string }>( tx ?? PostgresUse.COMMON_WRITE, 'DELETE FROM posthog_person WHERE team_id = $1 AND id = $2 RETURNING version', @@ -789,10 +793,13 @@ export class DB { // PersonDistinctId // testutil - public async fetchDistinctIds(person: Person, database?: Database.Postgres): Promise - public async fetchDistinctIds(person: Person, database: Database.ClickHouse): Promise + public async fetchDistinctIds(person: InternalPerson, database?: Database.Postgres): Promise + public async fetchDistinctIds( + person: InternalPerson, + database: Database.ClickHouse + ): Promise public async fetchDistinctIds( - person: Person, + person: InternalPerson, database: Database = Database.Postgres ): Promise { if (database === Database.ClickHouse) { @@ -821,33 +828,36 @@ export class DB { } } - public async fetchDistinctIdValues(person: Person, database: Database = Database.Postgres): Promise { + public async fetchDistinctIdValues( + person: InternalPerson, + database: Database = Database.Postgres + ): Promise { const personDistinctIds = await this.fetchDistinctIds(person, database as any) return personDistinctIds.map((pdi) => pdi.distinct_id) } - public async addDistinctId(person: Person, distinctId: string): Promise { - const kafkaMessages = await this.addDistinctIdPooled(person, distinctId) + public async addDistinctId(person: InternalPerson, distinctId: string, version: number): Promise { + const kafkaMessages = await this.addDistinctIdPooled(person, distinctId, version) if (kafkaMessages.length) { await this.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) } } public async addDistinctIdPooled( - person: Person, + person: InternalPerson, distinctId: string, + version: number, tx?: TransactionClient ): Promise { const insertResult = await this.postgres.query( tx ?? PostgresUse.COMMON_WRITE, // NOTE: Keep this in sync with the posthog_persondistinctid INSERT in `createPerson` - 'INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, 0) RETURNING *', - [distinctId, person.id, person.team_id], + 'INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) VALUES ($1, $2, $3, $4) RETURNING *', + [distinctId, person.id, person.team_id, version], 'addDistinctIdPooled' ) - const { id, version: versionStr, ...personDistinctIdCreated } = insertResult.rows[0] as PersonDistinctId - const version = Number(versionStr || 0) + const { id, ...personDistinctIdCreated } = insertResult.rows[0] as PersonDistinctId const messages = [ { topic: KAFKA_PERSON_DISTINCT_ID, @@ -867,7 +877,11 @@ export class DB { return messages } - public async moveDistinctIds(source: Person, target: Person, tx?: TransactionClient): Promise { + public async moveDistinctIds( + source: InternalPerson, + target: InternalPerson, + tx?: TransactionClient + ): Promise { let movedDistinctIdResult: QueryResult | null = null try { movedDistinctIdResult = await this.postgres.query( @@ -955,7 +969,7 @@ export class DB { public async addPersonToCohort( cohortId: number, - personId: Person['id'], + personId: InternalPerson['id'], version: number | null ): Promise { const insertResult = await this.postgres.query( @@ -969,8 +983,8 @@ export class DB { public async updateCohortsAndFeatureFlagsForMerge( teamID: Team['id'], - sourcePersonID: Person['id'], - targetPersonID: Person['id'], + sourcePersonID: InternalPerson['id'], + targetPersonID: InternalPerson['id'], tx?: TransactionClient ): Promise { // When personIDs change, update places depending on a person_id foreign key diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index 5daabce8a4cf5..ae629f066444a 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -209,6 +209,7 @@ export async function createHub( pluginConfigsToSkipElementsParsing: buildIntegerMatcher(process.env.SKIP_ELEMENTS_PARSING_PLUGINS, true), poeEmbraceJoinForTeams: buildIntegerMatcher(process.env.POE_EMBRACE_JOIN_FOR_TEAMS, true), poeWritesExcludeTeams: buildIntegerMatcher(process.env.POE_WRITES_EXCLUDE_TEAMS, false), + lazyPersonCreationTeams: buildIntegerMatcher(process.env.LAZY_PERSON_CREATION_TEAMS, true), eventsToDropByToken: createEventsToDropByToken(process.env.DROP_EVENTS_BY_TOKEN_DISTINCT_ID), } diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts index dcf9c9af50720..1ecd1786f875a 100644 --- a/plugin-server/src/utils/db/utils.ts +++ b/plugin-server/src/utils/db/utils.ts @@ -8,7 +8,7 @@ import { KAFKA_PERSON } from '../../config/kafka-topics' import { BasePerson, ClickHousePerson, - Person, + InternalPerson, PluginLogEntryType, PluginLogLevel, RawPerson, @@ -17,7 +17,7 @@ import { import { status } from '../../utils/status' import { castTimestampOrNow } from '../../utils/utils' -export function unparsePersonPartial(person: Partial): Partial { +export function unparsePersonPartial(person: Partial): Partial { return { ...(person as BasePerson), ...(person.created_at ? { created_at: person.created_at.toISO() } : {}) } } @@ -116,7 +116,7 @@ export function personInitialAndUTMProperties(properties: Properties): Propertie return propertiesCopy } -export function generateKafkaPersonUpdateMessage(person: Person, isDeleted = false): ProducerRecord { +export function generateKafkaPersonUpdateMessage(person: InternalPerson, isDeleted = false): ProducerRecord { return { topic: KAFKA_PERSON, messages: [ diff --git a/plugin-server/src/worker/ingestion/event-pipeline/extractHeatmapDataStep.ts b/plugin-server/src/worker/ingestion/event-pipeline/extractHeatmapDataStep.ts new file mode 100644 index 0000000000000..3b16437da81f2 --- /dev/null +++ b/plugin-server/src/worker/ingestion/event-pipeline/extractHeatmapDataStep.ts @@ -0,0 +1,126 @@ +import { URL } from 'url' + +import { PreIngestionEvent, RawClickhouseHeatmapEvent, TimestampFormat } from '../../../types' +import { castTimestampOrNow } from '../../../utils/utils' +import { captureIngestionWarning } from '../utils' +import { EventPipelineRunner } from './runner' + +// This represents the scale factor for the heatmap data. Essentially how much we are reducing the resolution by. +const SCALE_FACTOR = 16 + +type HeatmapDataItem = { + x: number + y: number + target_fixed: boolean + type: string +} + +type HeatmapData = Record + +export function extractHeatmapDataStep( + runner: EventPipelineRunner, + event: PreIngestionEvent +): Promise<[PreIngestionEvent, Promise[]]> { + const { eventUuid, teamId } = event + + let acks: Promise[] = [] + + try { + const heatmapEvents = extractScrollDepthHeatmapData(event) ?? [] + + // eslint-disable-next-line @typescript-eslint/no-floating-promises + acks = heatmapEvents.map((rawEvent) => { + return runner.hub.kafkaProducer.produce({ + topic: runner.hub.CLICKHOUSE_HEATMAPS_KAFKA_TOPIC, + key: eventUuid, + value: Buffer.from(JSON.stringify(rawEvent)), + waitForAck: true, + }) + }) + } catch (e) { + acks.push( + captureIngestionWarning(runner.hub.kafkaProducer, teamId, 'invalid_heatmap_data', { + eventUuid, + }) + ) + } + + // We don't want to ingest this data to the events table + delete event.properties['$heatmap_data'] + + return Promise.resolve([event, acks]) +} + +function replacePathInUrl(url: string, newPath: string): string { + const parsedUrl = new URL(url) + parsedUrl.pathname = newPath + return parsedUrl.toString() +} + +function extractScrollDepthHeatmapData(event: PreIngestionEvent): RawClickhouseHeatmapEvent[] { + const { teamId, timestamp, properties } = event + const { + $viewport_height, + $viewport_width, + $session_id, + distinct_id, + $prev_pageview_pathname, + $prev_pageview_max_scroll, + $current_url, + $heatmap_data, + } = properties || {} + + let heatmapData = $heatmap_data as HeatmapData | null + + if ($prev_pageview_pathname && $current_url) { + // We are going to add the scroll depth info derived from the previous pageview to the current pageview's heatmap data + if (!heatmapData) { + heatmapData = {} + } + + const previousUrl = replacePathInUrl($current_url, $prev_pageview_pathname) + heatmapData[previousUrl] = heatmapData[previousUrl] || [] + heatmapData[previousUrl].push({ + x: 0, + y: $prev_pageview_max_scroll, + target_fixed: false, + type: 'scrolldepth', + }) + } + + let heatmapEvents: RawClickhouseHeatmapEvent[] = [] + + if (!heatmapData) { + return [] + } + + Object.entries(heatmapData).forEach(([url, items]) => { + if (Array.isArray(items)) { + heatmapEvents = heatmapEvents.concat( + (items as any[]).map( + (hme: { + x: number + y: number + target_fixed: boolean + type: string + }): RawClickhouseHeatmapEvent => ({ + type: hme.type, + x: Math.round(hme.x / SCALE_FACTOR), + y: Math.round(hme.y / SCALE_FACTOR), + pointer_target_fixed: hme.target_fixed, + viewport_height: Math.round($viewport_height / SCALE_FACTOR), + viewport_width: Math.round($viewport_width / SCALE_FACTOR), + current_url: url, + session_id: $session_id, + scale_factor: SCALE_FACTOR, + timestamp: castTimestampOrNow(timestamp ?? null, TimestampFormat.ClickHouse), + team_id: teamId, + distinct_id: distinct_id, + }) + ) + ) + } + }) + + return heatmapEvents +} diff --git a/plugin-server/src/worker/ingestion/event-pipeline/processPersonsStep.ts b/plugin-server/src/worker/ingestion/event-pipeline/processPersonsStep.ts index 3cc38242736d6..a0978497d7e34 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/processPersonsStep.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/processPersonsStep.ts @@ -23,6 +23,7 @@ export async function processPersonsStep( timestamp, processPerson, runner.hub.db, + runner.hub.lazyPersonCreationTeams(event.team_id), overridesWriter ).update() diff --git a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts index 9ad11391f0c2f..52e762949a924 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts @@ -10,6 +10,7 @@ import { normalizeProcessPerson } from '../../../utils/event' import { status } from '../../../utils/status' import { captureIngestionWarning, generateEventDeadLetterQueueMessage } from '../utils' import { createEventStep } from './createEventStep' +import { extractHeatmapDataStep } from './extractHeatmapDataStep' import { eventProcessedAndIngestedCounter, pipelineLastStepCounter, @@ -216,9 +217,19 @@ export class EventPipelineRunner { event.team_id ) + const [preparedEventWithoutHeatmaps, heatmapKafkaAcks] = await this.runStep( + extractHeatmapDataStep, + [this, preparedEvent], + event.team_id + ) + + if (heatmapKafkaAcks.length > 0) { + kafkaAcks.push(...heatmapKafkaAcks) + } + const [rawClickhouseEvent, eventAck] = await this.runStep( createEventStep, - [this, preparedEvent, person, processPerson], + [this, preparedEventWithoutHeatmaps, person, processPerson], event.team_id ) diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts index b7cd3b8b6afcc..7d605bc707092 100644 --- a/plugin-server/src/worker/ingestion/person-state.ts +++ b/plugin-server/src/worker/ingestion/person-state.ts @@ -4,10 +4,9 @@ import { ProducerRecord } from 'kafkajs' import { DateTime } from 'luxon' import { Counter } from 'prom-client' import { KafkaProducerWrapper } from 'utils/db/kafka-producer-wrapper' -import { parse as parseUuid, v5 as uuidv5 } from 'uuid' import { KAFKA_PERSON_OVERRIDE } from '../../config/kafka-topics' -import { Person, PropertyUpdateOperation, TimestampFormat } from '../../types' +import { InternalPerson, Person, PropertyUpdateOperation, TimestampFormat } from '../../types' import { DB } from '../../utils/db/db' import { PostgresRouter, PostgresUse, TransactionClient } from '../../utils/db/postgres' import { timeoutGuard } from '../../utils/db/utils' @@ -15,6 +14,7 @@ import { PeriodicTask } from '../../utils/periodic-task' import { promiseRetry } from '../../utils/retries' import { status } from '../../utils/status' import { castTimestampOrNow } from '../../utils/utils' +import { uuidFromDistinctId } from './person-uuid' import { captureIngestionWarning } from './utils' export const mergeFinalFailuresCounter = new Counter({ @@ -34,15 +34,6 @@ export const mergeTxnSuccessCounter = new Counter({ labelNames: ['call', 'oldPersonIdentified', 'newPersonIdentified', 'poEEmbraceJoin'], }) -// UUIDv5 requires a namespace, which is itself a UUID. This was a randomly generated UUIDv4 -// that must be used to deterministrically generate UUIDv5s for Person rows. -const PERSON_UUIDV5_NAMESPACE = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') - -function uuidFromDistinctId(teamId: number, distinctId: string): string { - // Deterministcally create a UUIDv5 based on the (team_id, distinct_id) pair. - return uuidv5(`${teamId}:${distinctId}`, PERSON_UUIDV5_NAMESPACE) -} - // used to prevent identify from being used with generic IDs // that we can safely assume stem from a bug or mistake // used to prevent identify from being used with generic IDs @@ -101,6 +92,7 @@ export class PersonState { private timestamp: DateTime, private processPerson: boolean, // $process_person_profile flag from the event private db: DB, + private lazyPersonCreation: boolean, private personOverrideWriter?: DeferredPersonOverrideWriter ) { this.eventProperties = event.properties! @@ -112,21 +104,46 @@ export class PersonState { async update(): Promise { if (!this.processPerson) { - // We don't need to handle any properties for `processPerson=false` events, so we can - // short circuit by just finding or creating a person and returning early. - // - // In the future, we won't even get or create a real Person for these events, and so - // the `processPerson` boolean can be removed from this class altogether, as this class - // shouldn't even need to be invoked. - const [person, _] = await promiseRetry(() => this.createOrGetPerson(), 'get_person_personless') + if (this.lazyPersonCreation) { + const existingPerson = await this.db.fetchPerson(this.teamId, this.distinctId, { useReadReplica: true }) + if (existingPerson) { + const person = existingPerson as Person - // Ensure person properties don't propagate elsewhere, such as onto the event itself. - person.properties = {} + // Ensure person properties don't propagate elsewhere, such as onto the event itself. + person.properties = {} - return person + // See documentation on the field. + person.force_upgrade = true + + return person + } + + // We need a value from the `person_created_column` in ClickHouse. This should be + // hidden from users for events without a real person, anyway. It's slightly offset + // from the 0 date (by 5 seconds) in order to assist in debugging by being + // harmlessly distinct from Unix UTC "0". + const createdAt = DateTime.utc(1970, 1, 1, 0, 0, 5) + + const fakePerson: Person = { + team_id: this.teamId, + properties: {}, + uuid: uuidFromDistinctId(this.teamId, this.distinctId), + created_at: createdAt, + } + return fakePerson + } else { + // We don't need to handle any properties for `processPerson=false` events, so we can + // short circuit by just finding or creating a person and returning early. + const [person, _] = await promiseRetry(() => this.createOrGetPerson(), 'get_person_personless') + + // Ensure person properties don't propagate elsewhere, such as onto the event itself. + person.properties = {} + + return person + } } - const person: Person | undefined = await this.handleIdentifyOrAlias() // TODO: make it also return a boolean for if we can exit early here + const person: InternalPerson | undefined = await this.handleIdentifyOrAlias() // TODO: make it also return a boolean for if we can exit early here if (person) { // try to shortcut if we have the person from identify or alias try { @@ -139,7 +156,7 @@ export class PersonState { return await this.handleUpdate() } - async handleUpdate(): Promise { + async handleUpdate(): Promise { // There are various reasons why update can fail: // - anothe thread created the person during a race // - the person might have been merged between start of processing and now @@ -147,7 +164,7 @@ export class PersonState { return await promiseRetry(() => this.updateProperties(), 'update_person') } - async updateProperties(): Promise { + async updateProperties(): Promise { const [person, propertiesHandled] = await this.createOrGetPerson() if (propertiesHandled) { return person @@ -158,7 +175,7 @@ export class PersonState { /** * @returns [Person, boolean that indicates if properties were already handled or not] */ - private async createOrGetPerson(): Promise<[Person, boolean]> { + private async createOrGetPerson(): Promise<[InternalPerson, boolean]> { let person = await this.db.fetchPerson(this.teamId, this.distinctId) if (person) { return [person, false] @@ -193,8 +210,9 @@ export class PersonState { isUserId: number | null, isIdentified: boolean, creatorEventUuid: string, - distinctIds: string[] - ): Promise { + distinctIds: string[], + version = 0 + ): Promise { if (distinctIds.length < 1) { throw new Error('at least 1 distinctId is required in `createPerson`') } @@ -221,14 +239,15 @@ export class PersonState { isUserId, isIdentified, uuid, - distinctIds + distinctIds, + version ) } - private async updatePersonProperties(person: Person): Promise { + private async updatePersonProperties(person: InternalPerson): Promise { person.properties ||= {} - const update: Partial = {} + const update: Partial = {} if (this.applyEventPropertyUpdates(person.properties)) { update.properties = person.properties } @@ -280,7 +299,7 @@ export class PersonState { // Alias & merge - async handleIdentifyOrAlias(): Promise { + async handleIdentifyOrAlias(): Promise { /** * strategy: * - if the two distinct ids passed don't match and aren't illegal, then mark `is_identified` to be true for the `distinct_id` person @@ -332,7 +351,7 @@ export class PersonState { mergeIntoDistinctId: string, teamId: number, timestamp: DateTime - ): Promise { + ): Promise { // No reason to alias person against itself. Done by posthog-node when updating user properties if (mergeIntoDistinctId === otherPersonDistinctId) { return undefined @@ -376,17 +395,36 @@ export class PersonState { mergeIntoDistinctId: string, teamId: number, timestamp: DateTime - ): Promise { + ): Promise { this.updateIsIdentified = true const otherPerson = await this.db.fetchPerson(teamId, otherPersonDistinctId) const mergeIntoPerson = await this.db.fetchPerson(teamId, mergeIntoDistinctId) + // Historically, we always INSERT-ed new `posthog_persondistinctid` rows with `version=0`. + // Overrides are only created when the version is > 0, see: + // https://github.com/PostHog/posthog/blob/92e17ce307a577c4233d4ab252eebc6c2207a5ee/posthog/models/person/sql.py#L269-L287 + // + // With the addition of optional person processing, we are now rolling out a change to + // lazily create `posthog_persondistinctid` and `posthog_person` rows. This means that: + // 1. At merge time, it's possible this `distinct_id` and its deterministically generated + // `person.uuid` has already been used for events in ClickHouse, but they have no + // corresponding rows in the `posthog_persondistinctid` or `posthog_person` tables + // 2. We need to assume the `distinct_id`/`person.uuid` have been used before (by + // `$process_person_profile=false` events) and create an override row for this + // `distinct_id` even though we're just now INSERT-ing it into Postgres/ClickHouse. We do + // this by starting with `version=1`, as if we had just deleted the old user and were + // updating the `distinct_id` row as part of the merge + let addDistinctIdVersion = 0 + if (this.lazyPersonCreation) { + addDistinctIdVersion = 1 + } + if (otherPerson && !mergeIntoPerson) { - await this.db.addDistinctId(otherPerson, mergeIntoDistinctId) + await this.db.addDistinctId(otherPerson, mergeIntoDistinctId, addDistinctIdVersion) return otherPerson } else if (!otherPerson && mergeIntoPerson) { - await this.db.addDistinctId(mergeIntoPerson, otherPersonDistinctId) + await this.db.addDistinctId(mergeIntoPerson, otherPersonDistinctId, addDistinctIdVersion) return mergeIntoPerson } else if (otherPerson && mergeIntoPerson) { if (otherPerson.id == mergeIntoPerson.id) { @@ -410,7 +448,8 @@ export class PersonState { null, true, this.event.uuid, - [mergeIntoDistinctId, otherPersonDistinctId] + [mergeIntoDistinctId, otherPersonDistinctId], + addDistinctIdVersion ) } @@ -420,11 +459,11 @@ export class PersonState { otherPerson, otherPersonDistinctId, }: { - mergeInto: Person + mergeInto: InternalPerson mergeIntoDistinctId: string - otherPerson: Person + otherPerson: InternalPerson otherPersonDistinctId: string - }): Promise { + }): Promise { const olderCreatedAt = DateTime.min(mergeInto.created_at, otherPerson.created_at) const mergeAllowed = this.isMergeAllowed(otherPerson) @@ -472,18 +511,18 @@ export class PersonState { return mergedPerson } - private isMergeAllowed(mergeFrom: Person): boolean { + private isMergeAllowed(mergeFrom: InternalPerson): boolean { // $merge_dangerously has no restrictions // $create_alias and $identify will not merge a user who's already identified into anyone else return this.event.event === '$merge_dangerously' || !mergeFrom.is_identified } private async handleMergeTransaction( - mergeInto: Person, - otherPerson: Person, + mergeInto: InternalPerson, + otherPerson: InternalPerson, createdAt: DateTime, properties: Properties - ): Promise<[ProducerRecord[], Person]> { + ): Promise<[ProducerRecord[], InternalPerson]> { mergeTxnAttemptCounter .labels({ call: this.event.event, // $identify, $create_alias or $merge_dangerously @@ -493,7 +532,7 @@ export class PersonState { }) .inc() - const result: [ProducerRecord[], Person] = await this.db.postgres.transaction( + const result: [ProducerRecord[], InternalPerson] = await this.db.postgres.transaction( PostgresUse.COMMON_WRITE, 'mergePeople', async (tx) => { @@ -557,7 +596,11 @@ type PersonOverrideDetails = { oldest_event: DateTime } -function getPersonOverrideDetails(teamId: number, oldPerson: Person, overridePerson: Person): PersonOverrideDetails { +function getPersonOverrideDetails( + teamId: number, + oldPerson: InternalPerson, + overridePerson: InternalPerson +): PersonOverrideDetails { if (teamId != oldPerson.team_id || teamId != overridePerson.team_id) { throw new Error('cannot merge persons across different teams') } diff --git a/plugin-server/src/worker/ingestion/person-uuid.ts b/plugin-server/src/worker/ingestion/person-uuid.ts new file mode 100644 index 0000000000000..78ff92eaca8ef --- /dev/null +++ b/plugin-server/src/worker/ingestion/person-uuid.ts @@ -0,0 +1,10 @@ +import { parse as parseUuid, v5 as uuidv5 } from 'uuid' + +// UUIDv5 requires a namespace, which is itself a UUID. This was a randomly generated UUIDv4 +// that must be used to deterministrically generate UUIDv5s for Person rows. +const PERSON_UUIDV5_NAMESPACE = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') + +export function uuidFromDistinctId(teamId: number, distinctId: string): string { + // Deterministcally create a UUIDv5 based on the (team_id, distinct_id) pair. + return uuidv5(`${teamId}:${distinctId}`, PERSON_UUIDV5_NAMESPACE) +} diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts index 8b57e26190046..d3f49b85cfd71 100644 --- a/plugin-server/src/worker/ingestion/process-event.ts +++ b/plugin-server/src/worker/ingestion/process-event.ts @@ -11,6 +11,7 @@ import { Hub, ISOTimestamp, Person, + PersonMode, PreIngestionEvent, RawClickHouseEvent, Team, @@ -239,6 +240,13 @@ export class EventsProcessor { // TODO: Remove Redis caching for person that's not used anymore + let personMode: PersonMode = 'full' + if (person.force_upgrade) { + personMode = 'force_upgrade' + } else if (!processPerson) { + personMode = 'propertyless' + } + const rawEvent: RawClickHouseEvent = { uuid, event: safeClickhouseString(event), @@ -251,7 +259,7 @@ export class EventsProcessor { person_id: person.uuid, person_properties: eventPersonProperties, person_created_at: castTimestampOrNow(person.created_at, TimestampFormat.ClickHouseSecondPrecision), - person_mode: processPerson ? 'full' : 'propertyless', + person_mode: personMode, ...groupsColumns, } diff --git a/plugin-server/tests/helpers/sql.ts b/plugin-server/tests/helpers/sql.ts index 8564250118550..db22c661fc7cc 100644 --- a/plugin-server/tests/helpers/sql.ts +++ b/plugin-server/tests/helpers/sql.ts @@ -3,7 +3,7 @@ import { DateTime } from 'luxon' import { defaultConfig } from '../../src/config/config' import { Hub, - Person, + InternalPerson, Plugin, PluginAttachmentDB, PluginConfig, @@ -403,6 +403,6 @@ export async function fetchPostgresPersons(db: DB, teamId: number) { ...rawPerson, created_at: DateTime.fromISO(rawPerson.created_at).toUTC(), version: Number(rawPerson.version || 0), - } as Person) + } as InternalPerson) ) } diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap new file mode 100644 index 0000000000000..12238bd7ab350 --- /dev/null +++ b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/extractHeatmapDataStep.test.ts.snap @@ -0,0 +1,5208 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`extractHeatmapDataStep() parses and ingests correct $heatmap_data 2`] = ` +Array [ + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 109, + 111, + 117, + 115, + 101, + 109, + 111, + 118, + 101, + 34, + 44, + 34, + 120, + 34, + 58, + 54, + 52, + 44, + 34, + 121, + 34, + 58, + 50, + 51, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 114, + 97, + 103, + 101, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 109, + 111, + 117, + 115, + 101, + 109, + 111, + 118, + 101, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 109, + 111, + 117, + 115, + 101, + 109, + 111, + 118, + 101, + 34, + 44, + 34, + 120, + 34, + 58, + 54, + 54, + 44, + 34, + 121, + 34, + 58, + 52, + 50, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 55, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 55, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 114, + 97, + 103, + 101, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 55, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 48, + 44, + 34, + 121, + 34, + 58, + 50, + 55, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 52, + 53, + 44, + 34, + 121, + 34, + 58, + 49, + 55, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], + Array [ + Object { + "key": "018eebf3-cb48-750b-bfad-36409ea6f2b2", + "topic": undefined, + "value": Object { + "data": Array [ + 123, + 34, + 116, + 121, + 112, + 101, + 34, + 58, + 34, + 99, + 108, + 105, + 99, + 107, + 34, + 44, + 34, + 120, + 34, + 58, + 55, + 44, + 34, + 121, + 34, + 58, + 57, + 44, + 34, + 112, + 111, + 105, + 110, + 116, + 101, + 114, + 95, + 116, + 97, + 114, + 103, + 101, + 116, + 95, + 102, + 105, + 120, + 101, + 100, + 34, + 58, + 102, + 97, + 108, + 115, + 101, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 104, + 101, + 105, + 103, + 104, + 116, + 34, + 58, + 56, + 51, + 44, + 34, + 118, + 105, + 101, + 119, + 112, + 111, + 114, + 116, + 95, + 119, + 105, + 100, + 116, + 104, + 34, + 58, + 54, + 55, + 44, + 34, + 99, + 117, + 114, + 114, + 101, + 110, + 116, + 95, + 117, + 114, + 108, + 34, + 58, + 34, + 104, + 116, + 116, + 112, + 58, + 47, + 47, + 108, + 111, + 99, + 97, + 108, + 104, + 111, + 115, + 116, + 58, + 51, + 48, + 48, + 48, + 47, + 34, + 44, + 34, + 115, + 101, + 115, + 115, + 105, + 111, + 110, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 99, + 100, + 45, + 55, + 48, + 100, + 97, + 45, + 56, + 57, + 53, + 102, + 45, + 98, + 54, + 99, + 102, + 51, + 53, + 50, + 98, + 100, + 54, + 56, + 56, + 34, + 44, + 34, + 115, + 99, + 97, + 108, + 101, + 95, + 102, + 97, + 99, + 116, + 111, + 114, + 34, + 58, + 49, + 54, + 44, + 34, + 116, + 105, + 109, + 101, + 115, + 116, + 97, + 109, + 112, + 34, + 58, + 34, + 50, + 48, + 50, + 52, + 45, + 48, + 52, + 45, + 49, + 55, + 32, + 49, + 50, + 58, + 48, + 54, + 58, + 52, + 54, + 46, + 56, + 54, + 49, + 34, + 44, + 34, + 116, + 101, + 97, + 109, + 95, + 105, + 100, + 34, + 58, + 49, + 44, + 34, + 100, + 105, + 115, + 116, + 105, + 110, + 99, + 116, + 95, + 105, + 100, + 34, + 58, + 34, + 48, + 49, + 56, + 101, + 101, + 98, + 102, + 51, + 45, + 55, + 57, + 98, + 49, + 45, + 55, + 48, + 56, + 50, + 45, + 97, + 55, + 99, + 54, + 45, + 101, + 101, + 98, + 53, + 54, + 97, + 51, + 54, + 48, + 48, + 50, + 102, + 34, + 125, + ], + "type": "Buffer", + }, + "waitForAck": true, + }, + ], +] +`; diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap index 9cd0d244500ae..fd5ccf04db02d 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap +++ b/plugin-server/tests/worker/ingestion/event-pipeline/__snapshots__/runner.test.ts.snap @@ -80,6 +80,21 @@ Array [ true, ], ], + Array [ + "extractHeatmapDataStep", + Array [ + Object { + "distinctId": "my_id", + "elementsList": Array [], + "event": "$pageview", + "eventUuid": "uuid1", + "ip": "127.0.0.1", + "properties": Object {}, + "teamId": 2, + "timestamp": "2020-02-23T02:15:00.000Z", + }, + ], + ], Array [ "createEventStep", Array [ diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts new file mode 100644 index 0000000000000..ef654deb0df36 --- /dev/null +++ b/plugin-server/tests/worker/ingestion/event-pipeline/extractHeatmapDataStep.test.ts @@ -0,0 +1,226 @@ +import { ISOTimestamp, PreIngestionEvent } from '../../../../src/types' +import { cloneObject } from '../../../../src/utils/utils' +import { extractHeatmapDataStep } from '../../../../src/worker/ingestion/event-pipeline/extractHeatmapDataStep' + +jest.mock('../../../../src/worker/plugins/run') + +const preIngestionEvent: PreIngestionEvent = { + eventUuid: '018eebf3-cb48-750b-bfad-36409ea6f2b2', + event: 'Clicked button', + distinctId: '018eebf3-79b1-7082-a7c6-eeb56a36002f', + properties: { + $current_url: 'http://localhost:3000/', + $host: 'localhost:3000', + $pathname: '/', + $viewport_height: 1328, + $viewport_width: 1071, + distinct_id: '018eebf3-79b1-7082-a7c6-eeb56a36002f', + $device_id: '018eebf3-79b1-7082-a7c6-eeb56a36002f', + $session_id: '018eebf3-79cd-70da-895f-b6cf352bd688', + $window_id: '018eebf3-79cd-70da-895f-b6d09add936a', + $heatmap_data: { + 'http://localhost:3000/': [ + { + x: 1020, + y: 363, + target_fixed: false, + type: 'mousemove', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'rageclick', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'click', + }, + { + x: 634, + y: 460, + target_fixed: false, + type: 'mousemove', + }, + { + x: 1052, + y: 665, + target_fixed: false, + type: 'mousemove', + }, + { + x: 632, + y: 436, + target_fixed: false, + type: 'click', + }, + { + x: 632, + y: 436, + target_fixed: false, + type: 'click', + }, + { + x: 632, + y: 436, + target_fixed: false, + type: 'rageclick', + }, + { + x: 632, + y: 436, + target_fixed: false, + type: 'click', + }, + { + x: 713, + y: 264, + target_fixed: false, + type: 'click', + }, + { + x: 119, + y: 143, + target_fixed: false, + type: 'click', + }, + ], + }, + }, + timestamp: '2024-04-17T12:06:46.861Z' as ISOTimestamp, + teamId: 1, +} + +describe('extractHeatmapDataStep()', () => { + let runner: any + let event: PreIngestionEvent + + beforeEach(() => { + event = cloneObject(preIngestionEvent) + runner = { + hub: { + kafkaProducer: { + produce: jest.fn((e) => Promise.resolve(e)), + }, + }, + nextStep: (...args: any[]) => args, + } + }) + + it('parses and ingests correct $heatmap_data', async () => { + const response = await extractHeatmapDataStep(runner, event) + expect(response[0]).toEqual(event) + expect(response[0].properties.$heatmap_data).toBeUndefined() + expect(response[1]).toHaveLength(16) + expect(runner.hub.kafkaProducer.produce).toBeCalledTimes(16) + + const firstProduceCall = runner.hub.kafkaProducer.produce.mock.calls[0][0] + + const parsed = JSON.parse(firstProduceCall.value.toString()) + + expect(parsed).toMatchInlineSnapshot(` + Object { + "current_url": "http://localhost:3000/", + "distinct_id": "018eebf3-79b1-7082-a7c6-eeb56a36002f", + "pointer_target_fixed": false, + "scale_factor": 16, + "session_id": "018eebf3-79cd-70da-895f-b6cf352bd688", + "team_id": 1, + "timestamp": "2024-04-17 12:06:46.861", + "type": "mousemove", + "viewport_height": 83, + "viewport_width": 67, + "x": 64, + "y": 23, + } + `) + + // The rest we can just compare the buffers + expect(runner.hub.kafkaProducer.produce.mock.calls).toMatchSnapshot() + }) + + it('ignores events without $heatmap_data', async () => { + event.properties.$heatmap_data = null + const response = await extractHeatmapDataStep(runner, event) + expect(response).toEqual([event, []]) + expect(response[0].properties.$heatmap_data).toBeUndefined() + }) + + it('ignores events with bad $heatmap_data', async () => { + event.properties.$heatmap_data = 'wat' + const response = await extractHeatmapDataStep(runner, event) + expect(response).toEqual([event, []]) + expect(response[0].properties.$heatmap_data).toBeUndefined() + }) + + it('additionally parses ', async () => { + event.properties = { + ...event.properties, + $prev_pageview_pathname: '/test', + $prev_pageview_max_scroll: 225, + $prev_pageview_last_content: 1445, + $prev_pageview_max_content: 1553, + } + + const response = await extractHeatmapDataStep(runner, event) + // We do delete heatmap data + expect(response[0].properties.$heatmap_data).toBeUndefined() + // We don't delete scroll properties + expect(response[0].properties.$prev_pageview_max_scroll).toEqual(225) + + expect(response[1]).toHaveLength(17) + + const allParsedMessages = runner.hub.kafkaProducer.produce.mock.calls.map((call) => + JSON.parse(call[0].value.toString()) + ) + + expect(allParsedMessages.find((x) => x.type === 'scrolldepth')).toMatchInlineSnapshot(` + Object { + "current_url": "http://localhost:3000/test", + "distinct_id": "018eebf3-79b1-7082-a7c6-eeb56a36002f", + "pointer_target_fixed": false, + "scale_factor": 16, + "session_id": "018eebf3-79cd-70da-895f-b6cf352bd688", + "team_id": 1, + "timestamp": "2024-04-17 12:06:46.861", + "type": "scrolldepth", + "viewport_height": 83, + "viewport_width": 67, + "x": 0, + "y": 14, + } + `) + }) +}) diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts index 818d96f2656dc..0cf0fcdb08cd5 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts @@ -120,6 +120,7 @@ describe('EventPipelineRunner', () => { 'normalizeEventStep', 'processPersonsStep', 'prepareEventStep', + 'extractHeatmapDataStep', 'createEventStep', ]) expect(runner.stepsWithArgs).toMatchSnapshot() @@ -147,6 +148,7 @@ describe('EventPipelineRunner', () => { 'normalizeEventStep', 'processPersonsStep', 'prepareEventStep', + 'extractHeatmapDataStep', 'createEventStep', ]) }) @@ -169,7 +171,7 @@ describe('EventPipelineRunner', () => { const result = await runner.runEventPipeline(pipelineEvent) expect(result.error).toBeUndefined() - expect(pipelineStepMsSummarySpy).toHaveBeenCalledTimes(6) + expect(pipelineStepMsSummarySpy).toHaveBeenCalledTimes(7) expect(pipelineLastStepCounterSpy).toHaveBeenCalledTimes(1) expect(eventProcessedAndIngestedCounterSpy).toHaveBeenCalledTimes(1) expect(pipelineStepMsSummarySpy).toHaveBeenCalledWith('createEventStep') diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts index 9b634cd59fa1e..0d8aa9b509c95 100644 --- a/plugin-server/tests/worker/ingestion/person-state.test.ts +++ b/plugin-server/tests/worker/ingestion/person-state.test.ts @@ -1,9 +1,8 @@ import { PluginEvent } from '@posthog/plugin-scaffold' import { DateTime } from 'luxon' -import { parse as parseUuid, v5 as uuidv5 } from 'uuid' import { waitForExpect } from '../../../functional_tests/expectations' -import { Database, Hub, Person } from '../../../src/types' +import { Database, Hub, InternalPerson } from '../../../src/types' import { DependencyUnavailableError } from '../../../src/utils/db/error' import { createHub } from '../../../src/utils/db/hub' import { PostgresUse } from '../../../src/utils/db/postgres' @@ -15,6 +14,7 @@ import { FlatPersonOverrideWriter, PersonState, } from '../../../src/worker/ingestion/person-state' +import { uuidFromDistinctId } from '../../../src/worker/ingestion/person-uuid' import { delayUntilEventIngested } from '../../helpers/clickhouse' import { WaitEvent } from '../../helpers/promises' import { createOrganization, createTeam, fetchPostgresPersons, insertRow } from '../../helpers/sql' @@ -34,17 +34,6 @@ interface PersonOverridesMode { ): Promise> } -function uuidFromDistinctId(teamId: number, distinctId: string): string { - // The UUID generation code here is deliberately copied from `person-state` rather than imported, - // so that someone can't accidentally change how `person-state` UUID generation works and still - // have the tests pass. - // - // It is very important that Person UUIDs are deterministically generated and that this format - // doesn't change without a lot of thought and planning about side effects! - const namespace = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') - return uuidv5(`${teamId}:${distinctId}`, namespace) -} - const PersonOverridesModes: Record = { disabled: undefined, 'deferred, without mappings (flat)': { @@ -118,7 +107,12 @@ describe('PersonState.update()', () => { await hub.db.clickhouseQuery('SYSTEM START MERGES') }) - function personState(event: Partial, customHub?: Hub, processPerson = true) { + function personState( + event: Partial, + customHub?: Hub, + processPerson = true, + lazyPersonCreation = false + ) { const fullEvent = { team_id: teamId, properties: {}, @@ -132,6 +126,7 @@ describe('PersonState.update()', () => { timestamp, processPerson, customHub ? customHub.db : hub.db, + lazyPersonCreation, overridesMode?.getWriter(customHub ?? hub) ) } @@ -145,12 +140,17 @@ describe('PersonState.update()', () => { return (await hub.db.clickhouseQuery(query)).data } + async function fetchOverridesForDistinctId(distinctId: string) { + const query = `SELECT * FROM person_distinct_id_overrides_mv FINAL WHERE team_id = ${teamId} AND distinct_id = '${distinctId}'` + return (await hub.db.clickhouseQuery(query)).data + } + async function fetchPersonsRowsWithVersionHigerEqualThan(version = 1) { const query = `SELECT * FROM person FINAL WHERE team_id = ${teamId} AND version >= ${version}` return (await hub.db.clickhouseQuery(query)).data } - async function fetchDistinctIdsClickhouse(person: Person) { + async function fetchDistinctIdsClickhouse(person: InternalPerson) { return hub.db.fetchDistinctIdValues(person, Database.ClickHouse) } @@ -184,6 +184,107 @@ describe('PersonState.update()', () => { expect(personPrimaryTeam.uuid).not.toEqual(personOtherTeam.uuid) }) + it('returns an ephemeral user object when lazy creation is enabled and $process_person_profile=false', async () => { + const event_uuid = new UUIDT().toString() + + const hubParam = undefined + const processPerson = false + const lazyPersonCreation = true + const fakePerson = await personState( + { + event: '$pageview', + distinct_id: newUserDistinctId, + uuid: event_uuid, + properties: { $set: { should_be_dropped: 100 } }, + }, + hubParam, + processPerson, + lazyPersonCreation + ).update() + await hub.db.kafkaProducer.flush() + + expect(fakePerson).toEqual( + expect.objectContaining({ + team_id: teamId, + uuid: newUserUuid, // deterministic even though no user rows were created + properties: {}, // empty even though there was a $set attempted + created_at: DateTime.utc(1970, 1, 1, 0, 0, 5), // fake person created_at + }) + ) + expect(fakePerson.force_upgrade).toBeUndefined() + + // verify there is no Postgres person + const persons = await fetchPostgresPersonsH() + expect(persons.length).toEqual(0) + + // verify there are no Postgres distinct_ids + const distinctIds = await hub.db.fetchDistinctIdValues(fakePerson as InternalPerson) + expect(distinctIds).toEqual(expect.arrayContaining([])) + }) + + it('merging with lazy person creation creates an override and force_upgrade works', async () => { + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) + + const hubParam = undefined + let processPerson = true + const lazyPersonCreation = true + await personState( + { + event: '$identify', + distinct_id: newUserDistinctId, + properties: { + $anon_distinct_id: oldUserDistinctId, + }, + }, + hubParam, + processPerson, + lazyPersonCreation + ).update() + await hub.db.kafkaProducer.flush() + + await delayUntilEventIngested(() => fetchOverridesForDistinctId(newUserDistinctId)) + const chOverrides = await fetchOverridesForDistinctId(newUserDistinctId) + expect(chOverrides.length).toEqual(1) + + // Override created for Person that never existed in the DB + expect(chOverrides).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + distinct_id: newUserDistinctId, + person_id: oldUserUuid, + version: 1, + }), + ]) + ) + + // Using the `distinct_id` again with `processPerson=false` results in + // `force_upgrade=true` and real Person `uuid` and `created_at` + processPerson = false + const event_uuid = new UUIDT().toString() + const fakePerson = await personState( + { + event: '$pageview', + distinct_id: newUserDistinctId, + uuid: event_uuid, + properties: { $set: { should_be_dropped: 100 } }, + }, + hubParam, + processPerson, + lazyPersonCreation + ).update() + await hub.db.kafkaProducer.flush() + + expect(fakePerson).toEqual( + expect.objectContaining({ + team_id: teamId, + uuid: oldUserUuid, // *old* user, because it existed before the merge + properties: {}, // empty even though there was a $set attempted + created_at: timestamp, // *not* the fake person created_at + force_upgrade: true, + }) + ) + }) + it('creates person if they are new', async () => { const event_uuid = new UUIDT().toString() const person = await personState({ @@ -587,7 +688,7 @@ describe('PersonState.update()', () => { it('handles race condition when person provided has been merged', async () => { // TODO: we don't handle this currently person having been changed / updated properties can get overridden // Pass in a person, but another thread merges it - we shouldn't error in this case, but instead if we couldn't update we should retry? - const mergeDeletedPerson: Person = { + const mergeDeletedPerson: InternalPerson = { created_at: timestamp, version: 0, id: 0, @@ -1097,7 +1198,7 @@ describe('PersonState.update()', () => { uuidFromDistinctId(teamId, distinctId), [distinctId] ) - await hub.db.addDistinctId(person, distinctId) // this throws + await hub.db.addDistinctId(person, distinctId, 0) // this throws }) const person = await personState({ @@ -1599,7 +1700,7 @@ describe('PersonState.update()', () => { }) it(`postgres and clickhouse get updated`, async () => { - const first: Person = await hub.db.createPerson( + const first: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1610,7 +1711,7 @@ describe('PersonState.update()', () => { firstUserUuid, [firstUserDistinctId] ) - const second: Person = await hub.db.createPerson( + const second: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1691,7 +1792,7 @@ describe('PersonState.update()', () => { }) it(`throws if postgres unavailable`, async () => { - const first: Person = await hub.db.createPerson( + const first: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1702,7 +1803,7 @@ describe('PersonState.update()', () => { firstUserUuid, [firstUserDistinctId] ) - const second: Person = await hub.db.createPerson( + const second: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1863,7 +1964,7 @@ describe('PersonState.update()', () => { if (!overridesMode?.supportsSyncTransaction) { return } - const first: Person = await hub.db.createPerson( + const first: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1874,7 +1975,7 @@ describe('PersonState.update()', () => { firstUserUuid, [firstUserDistinctId] ) - const second: Person = await hub.db.createPerson( + const second: InternalPerson = await hub.db.createPerson( timestamp, {}, {}, @@ -1990,7 +2091,7 @@ describe('PersonState.update()', () => { }) it(`handles a chain of overrides being applied concurrently`, async () => { - const first: Person = await hub.db.createPerson( + const first: InternalPerson = await hub.db.createPerson( timestamp, { first: true }, {}, @@ -2001,7 +2102,7 @@ describe('PersonState.update()', () => { firstUserUuid, [firstUserDistinctId] ) - const second: Person = await hub.db.createPerson( + const second: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 2 }), { second: true }, {}, @@ -2012,7 +2113,7 @@ describe('PersonState.update()', () => { secondUserUuid, [secondUserDistinctId] ) - const third: Person = await hub.db.createPerson( + const third: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 5 }), { third: true }, {}, @@ -2135,7 +2236,7 @@ describe('PersonState.update()', () => { }) it(`handles a chain of overrides being applied out of order`, async () => { - const first: Person = await hub.db.createPerson( + const first: InternalPerson = await hub.db.createPerson( timestamp, { first: true }, {}, @@ -2146,7 +2247,7 @@ describe('PersonState.update()', () => { firstUserUuid, [firstUserDistinctId] ) - const second: Person = await hub.db.createPerson( + const second: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 2 }), { second: true }, {}, @@ -2157,7 +2258,7 @@ describe('PersonState.update()', () => { secondUserUuid, [secondUserDistinctId] ) - const third: Person = await hub.db.createPerson( + const third: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 5 }), { third: true }, {}, diff --git a/plugin-server/tests/worker/ingestion/person-uuid.test.ts b/plugin-server/tests/worker/ingestion/person-uuid.test.ts new file mode 100644 index 0000000000000..c27106dc4e1f2 --- /dev/null +++ b/plugin-server/tests/worker/ingestion/person-uuid.test.ts @@ -0,0 +1,11 @@ +import { uuidFromDistinctId } from '../../../src/worker/ingestion/person-uuid' + +jest.setTimeout(5000) // 5 sec timeout + +describe('uuidFromDistinctId', () => { + it('generates deterministic UUIDs', () => { + expect(uuidFromDistinctId(1, 'test')).toMatchInlineSnapshot(`"246f7a43-5507-564f-b687-793ee3c2dd79"`) + expect(uuidFromDistinctId(2, 'test')).toMatchInlineSnapshot(`"00ce873a-549c-548e-bbec-cc804a385dd8"`) + expect(uuidFromDistinctId(1, 'test2')).toMatchInlineSnapshot(`"45c17302-ee44-5596-916a-0eba21f4b638"`) + }) +}) diff --git a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts index 5c764e5809b40..142b7c6938bd6 100644 --- a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts +++ b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts @@ -287,7 +287,7 @@ describe('postgres parity', () => { // add 'anotherOne' to person - await hub.db.addDistinctId(postgresPerson, 'anotherOne') + await hub.db.addDistinctId(postgresPerson, 'anotherOne', 0) await delayUntilEventIngested(() => hub.db.fetchDistinctIdValues(postgresPerson, Database.ClickHouse), 2) diff --git a/plugin-server/tests/worker/ingestion/process-event.test.ts b/plugin-server/tests/worker/ingestion/process-event.test.ts index 19f822fbb0b6e..b9947bb7eec74 100644 --- a/plugin-server/tests/worker/ingestion/process-event.test.ts +++ b/plugin-server/tests/worker/ingestion/process-event.test.ts @@ -171,6 +171,36 @@ describe('EventsProcessor#createEvent()', () => { ) }) + it('force_upgrade persons are recorded as such', async () => { + const processPerson = false + person.force_upgrade = true + await eventsProcessor.createEvent( + { ...preIngestionEvent, properties: { $group_0: 'group_key' } }, + person, + processPerson + ) + + await eventsProcessor.kafkaProducer.flush() + + const events = await delayUntilEventIngested(() => hub.db.fetchEvents()) + expect(events.length).toEqual(1) + expect(events[0]).toEqual( + expect.objectContaining({ + uuid: eventUuid, + event: '$pageview', + properties: {}, // $group_0 is removed + timestamp: expect.any(DateTime), + team_id: 2, + distinct_id: 'my_id', + elements_chain: null, + created_at: expect.any(DateTime), + person_id: personUuid, + person_properties: {}, + person_mode: 'force_upgrade', + }) + ) + }) + it('handles the person no longer existing', async () => { // This person is never in the DB, but createEvent gets a Person object and should use that const uuid = new UUIDT().toString() diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6a945ef52e6b4..4c8b10e662c7a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,12 @@ overrides: playwright: 1.41.2 patchedDependencies: - rrweb@2.0.0-alpha.12: - hash: t3xxecww6aodjl4qopwv6jdxmq - path: patches/rrweb@2.0.0-alpha.12.patch + heatmap.js@2.0.5: + hash: gydrxrztd4ruyhouu6tu7zh43e + path: patches/heatmap.js@2.0.5.patch + rrweb@2.0.0-alpha.13: + hash: awo7hmdoglvq4pl5b43lreupza + path: patches/rrweb@2.0.0-alpha.13.patch dependencies: '@ant-design/icons': @@ -56,11 +59,11 @@ dependencies: specifier: ^2.1.2 version: 2.1.2(react@18.2.0) '@rrweb/types': - specifier: 2.0.0-alpha.12 - version: 2.0.0-alpha.12 + specifier: 2.0.0-alpha.13 + version: 2.0.0-alpha.13 '@sentry/react': - specifier: 7.22.0 - version: 7.22.0(react@18.2.0) + specifier: 7.112.1 + version: 7.112.1(react@18.2.0) '@tailwindcss/container-queries': specifier: ^0.1.1 version: 0.1.1(tailwindcss@3.4.0) @@ -196,6 +199,9 @@ dependencies: fuse.js: specifier: ^6.6.2 version: 6.6.2 + heatmap.js: + specifier: ^2.0.5 + version: 2.0.5(patch_hash=gydrxrztd4ruyhouu6tu7zh43e) husky: specifier: ^7.0.4 version: 7.0.4 @@ -254,11 +260,11 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.128.2 - version: 1.128.2 + specifier: 1.130.0 + version: 1.130.0 posthog-js-lite: - specifier: 2.5.0 - version: 2.5.0 + specifier: 3.0.0 + version: 3.0.0 prettier: specifier: ^2.8.8 version: 2.8.8 @@ -326,8 +332,8 @@ dependencies: specifier: ^1.5.1 version: 1.5.1 rrweb: - specifier: 2.0.0-alpha.12 - version: 2.0.0-alpha.12(patch_hash=t3xxecww6aodjl4qopwv6jdxmq) + specifier: 2.0.0-alpha.13 + version: 2.0.0-alpha.13(patch_hash=awo7hmdoglvq4pl5b43lreupza) sass: specifier: ^1.26.2 version: 1.56.0 @@ -387,8 +393,8 @@ devDependencies: specifier: 1.41.2 version: 1.41.2 '@sentry/types': - specifier: 7.22.0 - version: 7.22.0 + specifier: 7.112.1 + version: 7.112.1 '@storybook/addon-a11y': specifier: ^7.6.4 version: 7.6.4 @@ -467,6 +473,9 @@ devDependencies: '@types/dompurify': specifier: ^3.0.3 version: 3.0.3 + '@types/heatmap.js': + specifier: ^2.0.41 + version: 2.0.41 '@types/image-blob-reduce': specifier: ^4.1.1 version: 4.1.1 @@ -584,6 +593,9 @@ devDependencies: eslint-plugin-storybook: specifier: ^0.6.15 version: 0.6.15(eslint@8.52.0)(typescript@4.9.5) + eslint-plugin-unused-imports: + specifier: ^3.1.0 + version: 3.1.0(@typescript-eslint/eslint-plugin@7.1.1)(eslint@8.52.0) file-loader: specifier: ^6.1.0 version: 6.2.0(webpack@5.88.2) @@ -664,7 +676,7 @@ devDependencies: version: 7.6.4 storybook-addon-pseudo-states: specifier: 2.1.2 - version: 2.1.2(@storybook/components@7.6.4)(@storybook/core-events@7.6.4)(@storybook/manager-api@7.6.17)(@storybook/preview-api@7.6.17)(@storybook/theming@7.6.4)(react-dom@18.2.0)(react@18.2.0) + version: 2.1.2(@storybook/components@7.6.4)(@storybook/core-events@7.6.4)(@storybook/manager-api@7.6.18)(@storybook/preview-api@7.6.18)(@storybook/theming@7.6.4)(react-dom@18.2.0)(react@18.2.0) style-loader: specifier: ^2.0.0 version: 2.0.0(webpack@5.88.2) @@ -5880,55 +5892,105 @@ packages: type-fest: 2.19.0 dev: false - /@rrweb/types@2.0.0-alpha.12: - resolution: {integrity: sha512-dHFMvVBA9JD8MBV+waHtJs8lIeTPCVeT3h+WZ5s8a+EX42TQ08t+ObFMK0OYEszvWgJMJ5WZi5lXTvF46P27Hg==} + /@rrweb/types@2.0.0-alpha.13: + resolution: {integrity: sha512-ytq+MeVm/vP2ybw+gTAN3Xvt7HN2yS+wlbfnwHpQMftxrwzq0kEZHdw+Jp5WUvvpONWzXriNAUU9dW0qLGkzNg==} + dependencies: + rrweb-snapshot: 2.0.0-alpha.13 + dev: false + + /@sentry-internal/feedback@7.112.1: + resolution: {integrity: sha512-ejE4eRXLqv5emxVWudBkRQCv5Q7s21thei7gqSxGLBXe8AUrCjTiD0qA1ToJAKcleIyRRf/TQvGb/T7U6vwAAw==} + engines: {node: '>=12'} + dependencies: + '@sentry/core': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 + dev: false + + /@sentry-internal/replay-canvas@7.112.1: + resolution: {integrity: sha512-+xDd/LEiJZGk4PQKs4xcAWKJFzFKpuNF64DFW/JWuJ5FDnKB+t7w198nQyAZKGjupN7LixLb49Z8O2Gda7fHQQ==} + engines: {node: '>=12'} dependencies: - rrweb-snapshot: 2.0.0-alpha.12 + '@sentry/core': 7.112.1 + '@sentry/replay': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 dev: false - /@sentry/browser@7.22.0: - resolution: {integrity: sha512-8MA+f46+T3G7fb4BYYX9Wl3bMDloG5a3Ng0GWdBeq6DE2tXVHeCvba8Yrrcnn1qFHpmnOn5Nz4xWBUDs3uBFxA==} + /@sentry-internal/tracing@7.112.1: + resolution: {integrity: sha512-pZVIOB6+t4HlgU3mCRtIbvo//t8uQY9tnBjbJJ2nEv8nTu8A7/dZ5ebrLOWStV3bNp/+uCqLuLuuimJeNNn6vQ==} engines: {node: '>=8'} dependencies: - '@sentry/core': 7.22.0 - '@sentry/types': 7.22.0 - '@sentry/utils': 7.22.0 - tslib: 1.14.1 + '@sentry/core': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 dev: false - /@sentry/core@7.22.0: - resolution: {integrity: sha512-qYJiJrL1mfQQln84mNunBRUkXq7xDGQQoNh4Sz9VYP5698va51zmS5BLYRCZ5CkPwRYNuhUqlUXN7bpYGYOOIA==} + /@sentry/browser@7.112.1: + resolution: {integrity: sha512-NRTo3mJbhiCd9GEFEWL8SplFJhTCPjiAlOhjUw8MnJb7pkxWm2xhC7PVi6SUE8hF/g1rrEwgUr9SX5v8+xwK6g==} engines: {node: '>=8'} dependencies: - '@sentry/types': 7.22.0 - '@sentry/utils': 7.22.0 - tslib: 1.14.1 + '@sentry-internal/feedback': 7.112.1 + '@sentry-internal/replay-canvas': 7.112.1 + '@sentry-internal/tracing': 7.112.1 + '@sentry/core': 7.112.1 + '@sentry/integrations': 7.112.1 + '@sentry/replay': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 + dev: false + + /@sentry/core@7.112.1: + resolution: {integrity: sha512-ZhOxt4sZVLqHurWqIY1ExWYZ20ViFTbqgW2GdJGHz4XwJhBln0ZVpHD+tKXy3GBEY+2Ee4qoqHi6tDrFgPvJqw==} + engines: {node: '>=8'} + dependencies: + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 dev: false - /@sentry/react@7.22.0(react@18.2.0): - resolution: {integrity: sha512-nbZD+bobhV65r/4mpfRgGct1nrYWEmnNzTYZM4PQyPyImuk/VmNNdnzP3BLWqAnV4LvbVWEkgZIcquN8yA098g==} + /@sentry/integrations@7.112.1: + resolution: {integrity: sha512-jIgXT+ahUS7zmhDMAzsgQHCNA6ZwZAp0Bwjoz0tcuGzNcv7mOCnjHz5YooJVQgXuREV653RmEuGGTklrpn6S2w==} + engines: {node: '>=8'} + dependencies: + '@sentry/core': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 + localforage: 1.10.0 + dev: false + + /@sentry/react@7.112.1(react@18.2.0): + resolution: {integrity: sha512-q0fDW3omq/NPaL7yRqWA1USxGtEAcdFZOngIMsr9Bc4fJBGXDO+xLwPWjo1MIVvdDBJJYL/9Z56ppqTb3kiGXw==} engines: {node: '>=8'} peerDependencies: react: 15.x || 16.x || 17.x || 18.x dependencies: - '@sentry/browser': 7.22.0 - '@sentry/types': 7.22.0 - '@sentry/utils': 7.22.0 + '@sentry/browser': 7.112.1 + '@sentry/core': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 hoist-non-react-statics: 3.3.2 react: 18.2.0 - tslib: 1.14.1 dev: false - /@sentry/types@7.22.0: - resolution: {integrity: sha512-LhCL+wb1Jch+OesB2CIt6xpfO1Ab6CRvoNYRRzVumWPLns1T3ZJkarYfhbLaOEIb38EIbPgREdxn2AJT560U4Q==} + /@sentry/replay@7.112.1: + resolution: {integrity: sha512-4lobxfgmbB2C7ZHk1inWt9IRIvlQa2Sczau5ngE4Qd4mZSKIgIYGtIJC52uOuGvBcP8gHiIbA7ACihkd7834Ew==} + engines: {node: '>=12'} + dependencies: + '@sentry-internal/tracing': 7.112.1 + '@sentry/core': 7.112.1 + '@sentry/types': 7.112.1 + '@sentry/utils': 7.112.1 + dev: false + + /@sentry/types@7.112.1: + resolution: {integrity: sha512-5dLIxWZfCXH5kExrsWc+R6loMr3RR6OQuonVNL3Fa8Dw37Q7aExCrjRmocOHeQKhHwNBd3QhYm7phjnbxS6Oaw==} engines: {node: '>=8'} - /@sentry/utils@7.22.0: - resolution: {integrity: sha512-1GiNw1opIngxg0nktCTc9wibh4/LV12kclrnB9dAOHrqazZXHXZRAkjqrhQphKcMpT+3By91W6EofjaDt5a/hg==} + /@sentry/utils@7.112.1: + resolution: {integrity: sha512-/AMGDD6OMvT2cpfL5KuDC10oTS8yOt7BAPomXJNS/xn1TRcEEEZ1TWbYZiGT5ijggQEL1OXSojpeQU8XEW8dcQ==} engines: {node: '>=8'} dependencies: - '@sentry/types': 7.22.0 - tslib: 1.14.1 + '@sentry/types': 7.112.1 dev: false /@sideway/address@4.1.4: @@ -6283,11 +6345,11 @@ packages: tiny-invariant: 1.3.1 dev: true - /@storybook/channels@7.6.17: - resolution: {integrity: sha512-GFG40pzaSxk1hUr/J/TMqW5AFDDPUSu+HkeE/oqSWJbOodBOLJzHN6CReJS6y1DjYSZLNFt1jftPWZZInG/XUA==} + /@storybook/channels@7.6.18: + resolution: {integrity: sha512-ayMJ6GJot81URJySXcwZG1mLacblUVdLgAMIhU7oSW1K1v4KvQPxv3FqjNN+48g/1s+2A9UraCDqN0qzO3wznQ==} dependencies: - '@storybook/client-logger': 7.6.17 - '@storybook/core-events': 7.6.17 + '@storybook/client-logger': 7.6.18 + '@storybook/core-events': 7.6.18 '@storybook/global': 5.0.0 qs: 6.12.1 telejson: 7.2.0 @@ -6374,8 +6436,8 @@ packages: '@storybook/global': 5.0.0 dev: true - /@storybook/client-logger@7.6.17: - resolution: {integrity: sha512-6WBYqixAXNAXlSaBWwgljWpAu10tPRBJrcFvx2gPUne58EeMM20Gi/iHYBz2kMCY+JLAgeIH7ZxInqwO8vDwiQ==} + /@storybook/client-logger@7.6.18: + resolution: {integrity: sha512-/mSKa968G++M7RTW1XLM0jgNMUATxKv/vggLyQ9Oo2UpQhRaXX8dKRl7GVu2yFDRm9sDKs7rg+KSsstrEjQcSg==} dependencies: '@storybook/global': 5.0.0 dev: true @@ -6511,8 +6573,8 @@ packages: ts-dedent: 2.2.0 dev: true - /@storybook/core-events@7.6.17: - resolution: {integrity: sha512-AriWMCm/k1cxlv10f+jZ1wavThTRpLaN3kY019kHWbYT9XgaSuLU67G7GPr3cGnJ6HuA6uhbzu8qtqVCd6OfXA==} + /@storybook/core-events@7.6.18: + resolution: {integrity: sha512-K4jrHedFRfokvkIfKfNtQTcguPzeWF3oiuyXQR4gv4bnMCndCoiSRKfCE5zesgGmfml/Krt2zb4nNz/UPLbDeA==} dependencies: ts-dedent: 2.2.0 dev: true @@ -6646,8 +6708,8 @@ packages: type-fest: 2.19.0 dev: true - /@storybook/csf@0.1.4: - resolution: {integrity: sha512-B9UI/lsQMjF+oEfZCI6YXNoeuBcGZoOP5x8yKbe2tIEmsMjSztFKkpPzi5nLCnBk/MBtl6QJeI3ksJnbsWPkOw==} + /@storybook/csf@0.1.5: + resolution: {integrity: sha512-pW7Dtk/bE2JGrAe/KuBY4Io02NBe/2CLP2DkgVgWlSwvEVdm/rbQyiwy8RaL0lQlJCv9CsGBY+n9HQG8d4bZjQ==} dependencies: type-fest: 2.19.0 dev: true @@ -6675,17 +6737,17 @@ packages: resolution: {integrity: sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==} dev: true - /@storybook/manager-api@7.6.17(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-IJIV1Yc6yw1dhCY4tReHCfBnUKDqEBnMyHp3mbXpsaHxnxJZrXO45WjRAZIKlQKhl/Ge1CrnznmHRCmYgqmrWg==} + /@storybook/manager-api@7.6.18(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-4c2japUMjnHiel38wQoNWh5RVac6ATMcWxvzPhOKx3I19gbSoUF1CcDg+1piRMWuSyzUBIBlIrBB3s4/02gnnA==} dependencies: - '@storybook/channels': 7.6.17 - '@storybook/client-logger': 7.6.17 - '@storybook/core-events': 7.6.17 - '@storybook/csf': 0.1.4 + '@storybook/channels': 7.6.18 + '@storybook/client-logger': 7.6.18 + '@storybook/core-events': 7.6.18 + '@storybook/csf': 0.1.5 '@storybook/global': 5.0.0 - '@storybook/router': 7.6.17 - '@storybook/theming': 7.6.17(react-dom@18.2.0)(react@18.2.0) - '@storybook/types': 7.6.17 + '@storybook/router': 7.6.18 + '@storybook/theming': 7.6.18(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.6.18 dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 @@ -6790,15 +6852,15 @@ packages: - webpack-plugin-serve dev: true - /@storybook/preview-api@7.6.17: - resolution: {integrity: sha512-wLfDdI9RWo1f2zzFe54yRhg+2YWyxLZvqdZnSQ45mTs4/7xXV5Wfbv3QNTtcdw8tT3U5KRTrN1mTfTCiRJc0Kw==} + /@storybook/preview-api@7.6.18: + resolution: {integrity: sha512-X3r3MnoLJWUhHTVFggJcfHzDLCKSOdHNOpXXRNkdG2WXFcCZAlTdm0KqThCvQmdqS4OAOJMfn4pHqtxPG8yfyg==} dependencies: - '@storybook/channels': 7.6.17 - '@storybook/client-logger': 7.6.17 - '@storybook/core-events': 7.6.17 - '@storybook/csf': 0.1.4 + '@storybook/channels': 7.6.18 + '@storybook/client-logger': 7.6.18 + '@storybook/core-events': 7.6.18 + '@storybook/csf': 0.1.5 '@storybook/global': 5.0.0 - '@storybook/types': 7.6.17 + '@storybook/types': 7.6.18 '@types/qs': 6.9.15 dequal: 2.0.3 lodash: 4.17.21 @@ -6958,10 +7020,10 @@ packages: - supports-color dev: true - /@storybook/router@7.6.17: - resolution: {integrity: sha512-GnyC0j6Wi5hT4qRhSyT8NPtJfGmf82uZw97LQRWeyYu5gWEshUdM7aj40XlNiScd5cZDp0owO1idduVF2k2l2A==} + /@storybook/router@7.6.18: + resolution: {integrity: sha512-Kw6nAPWRAFE9DM//pnyjL7Xnxt+yQIONdERDnPrdEmHG5mErXGtO18aFMsb/7GiAD50J/i5ObTp7FJsWffAnbg==} dependencies: - '@storybook/client-logger': 7.6.17 + '@storybook/client-logger': 7.6.18 memoizerific: 1.11.3 qs: 6.12.1 dev: true @@ -7044,14 +7106,14 @@ packages: - ts-node dev: true - /@storybook/theming@7.6.17(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-ZbaBt3KAbmBtfjNqgMY7wPMBshhSJlhodyMNQypv+95xLD/R+Az6aBYbpVAOygLaUQaQk4ar7H/Ww6lFIoiFbA==} + /@storybook/theming@7.6.18(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-5nwqV/rAVzS8wZ6DbsX5/ugDLV189hn2m3K9JlJmhVW9b2mSDYW5i1cTjpoChh1t9gMZl82VPnEhgPRMx5bXgw==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: '@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@18.2.0) - '@storybook/client-logger': 7.6.17 + '@storybook/client-logger': 7.6.18 '@storybook/global': 5.0.0 memoizerific: 1.11.3 react: 18.2.0 @@ -7081,10 +7143,10 @@ packages: file-system-cache: 2.3.0 dev: true - /@storybook/types@7.6.17: - resolution: {integrity: sha512-GRY0xEJQ0PrL7DY2qCNUdIfUOE0Gsue6N+GBJw9ku1IUDFLJRDOF+4Dx2BvYcVCPI5XPqdWKlEyZdMdKjiQN7Q==} + /@storybook/types@7.6.18: + resolution: {integrity: sha512-W7/8kUtMhEopZhwXFMOKlXwQCrz0PBJ5wQwmJNZ4i0YPTVfFzb+/6pgpkzUNtbXiTp6dfxi3ERoAF9wz9Zyt7w==} dependencies: - '@storybook/channels': 7.6.17 + '@storybook/channels': 7.6.18 '@types/babel__core': 7.20.5 '@types/express': 4.17.21 file-system-cache: 2.3.0 @@ -7734,7 +7796,7 @@ packages: resolution: {integrity: sha512-C3zfBrhHZvrpAAK3YXqLWVAGo87A4SvJ83Q/zVJ8rFWJdKejUnDYaWZPkA8K84kb2vDA/g90LTQAz7etXcgoQQ==} dependencies: '@types/d3-array': 3.0.3 - '@types/geojson': 7946.0.10 + '@types/geojson': 7946.0.12 dev: true /@types/d3-delaunay@6.0.1: @@ -7776,7 +7838,7 @@ packages: /@types/d3-geo@3.0.2: resolution: {integrity: sha512-DbqK7MLYA8LpyHQfv6Klz0426bQEf7bRTvhMy44sNGVyZoWn//B0c+Qbeg8Osi2Obdc9BLLXYAKpyWege2/7LQ==} dependencies: - '@types/geojson': 7946.0.10 + '@types/geojson': 7946.0.12 dev: true /@types/d3-hierarchy@3.1.0: @@ -7985,13 +8047,8 @@ packages: resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==} dev: true - /@types/geojson@7946.0.10: - resolution: {integrity: sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==} - dev: true - /@types/geojson@7946.0.12: resolution: {integrity: sha512-uK2z1ZHJyC0nQRbuovXFt4mzXDwf27vQeUWNhfKGwRcWW429GOhP8HxUHlM6TLH4bzmlv/HlEjpvJh3JfmGsAA==} - dev: false /@types/google.maps@3.55.4: resolution: {integrity: sha512-Ip3IfRs3RZjeC88V8FGnWQTQXeS5gkJedPSosN6DMi9Xs8buGTpsPq6UhREoZsGH+62VoQ6jiRBUR8R77If69w==} @@ -8009,6 +8066,12 @@ packages: '@types/unist': 2.0.6 dev: false + /@types/heatmap.js@2.0.41: + resolution: {integrity: sha512-3oHffxC+N+1EKXjeC65klk1kHnLJ5i6tEKFNb/04J+qSfQuCliacsNBWDpt59JfG2vBXRRn+ICbzRZj48j6HfQ==} + dependencies: + '@types/leaflet': 0.7.40 + dev: true + /@types/hogan.js@3.0.5: resolution: {integrity: sha512-/uRaY3HGPWyLqOyhgvW9Aa43BNnLZrNeQxl2p8wqId4UHMfPKolSB+U7BlZyO1ng7MkLnyEAItsBzCG0SDhqrA==} dev: false @@ -8078,6 +8141,12 @@ packages: resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} dev: true + /@types/leaflet@0.7.40: + resolution: {integrity: sha512-R2UwXOKwnKZi9zNm37WbPTAVuqHmysE6NVihkc5DUrovTirUxFSbZzvXrlwv0n5sibe0w8VF1bWu0ta4kZlAaA==} + dependencies: + '@types/geojson': 7946.0.12 + dev: true + /@types/less@3.0.6: resolution: {integrity: sha512-PecSzorDGdabF57OBeQO/xFbAkYWo88g4Xvnsx7LRwqLC17I7OoKtA3bQB9uXkY6UkMWCOsA8HSVpaoitscdXw==} dev: false @@ -12084,6 +12153,26 @@ packages: - typescript dev: true + /eslint-plugin-unused-imports@3.1.0(@typescript-eslint/eslint-plugin@7.1.1)(eslint@8.52.0): + resolution: {integrity: sha512-9l1YFCzXKkw1qtAru1RWUtG2EVDZY0a0eChKXcL+EZ5jitG7qxdctu4RnvhOJHv4xfmUf7h+JJPINlVpGhZMrw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + '@typescript-eslint/eslint-plugin': 6 - 7 + eslint: '8' + peerDependenciesMeta: + '@typescript-eslint/eslint-plugin': + optional: true + dependencies: + '@typescript-eslint/eslint-plugin': 7.1.1(@typescript-eslint/parser@7.1.1)(eslint@8.52.0)(typescript@4.9.5) + eslint: 8.52.0 + eslint-rule-composer: 0.3.0 + dev: true + + /eslint-rule-composer@0.3.0: + resolution: {integrity: sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==} + engines: {node: '>=4.0.0'} + dev: true + /eslint-scope@5.1.1: resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} engines: {node: '>=8.0.0'} @@ -13273,6 +13362,11 @@ packages: resolution: {integrity: sha512-tUCGvt191vNSQgttSyJoibR+VO+I6+iCHIUdhzEMJKE+EAL8BwCN7fUOZlY4ofOelNHsK+gEjxB/B+9N3EWtdA==} dev: true + /heatmap.js@2.0.5(patch_hash=gydrxrztd4ruyhouu6tu7zh43e): + resolution: {integrity: sha512-CG2gYFP5Cv9IQCXEg3ZRxnJDyAilhWnQlAuHYGuWVzv6mFtQelS1bR9iN80IyDmFECbFPbg6I0LR5uAFHgCthw==} + dev: false + patched: true + /helpertypes@0.0.19: resolution: {integrity: sha512-J00e55zffgi3yVnUp0UdbMztNkr2PnizEkOe9URNohnrNhW5X0QpegkuLpOmFQInpi93Nb8MCjQRHAiCDF42NQ==} engines: {node: '>=10.0.0'} @@ -13539,6 +13633,10 @@ packages: requiresBuild: true optional: true + /immediate@3.0.6: + resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + dev: false + /immutable@4.1.0: resolution: {integrity: sha512-oNkuqVTA8jqG1Q6c+UglTOD1xhC1BtjKI7XkCXRkZHrN5m18/XsnUp8Q89GkQO/z+0WjonSvl0FLhDYftp46nQ==} @@ -15229,6 +15327,12 @@ packages: type-check: 0.4.0 dev: true + /lie@3.1.1: + resolution: {integrity: sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==} + dependencies: + immediate: 3.0.6 + dev: false + /lilconfig@2.1.0: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} engines: {node: '>=10'} @@ -15331,6 +15435,12 @@ packages: json5: 2.2.3 dev: true + /localforage@1.10.0: + resolution: {integrity: sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==} + dependencies: + lie: 3.1.1 + dev: false + /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} engines: {node: '>=6'} @@ -17453,12 +17563,12 @@ packages: picocolors: 1.0.0 source-map-js: 1.0.2 - /posthog-js-lite@2.5.0: - resolution: {integrity: sha512-Urvlp0Vu9h3td0BVFWt0QXFJDoOZcaAD83XM9d91NKMKTVPZtfU0ysoxstIf5mw/ce9ZfuMgpWPaagrZI4rmSg==} + /posthog-js-lite@3.0.0: + resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.128.2: - resolution: {integrity: sha512-uKjCfRZKCs346bXNEyyjHkbkaTASUcRSWtA3u1yJTDElUuuiGkCKUrbRz6fG1+2019Y8ebuXvwVfqQQwODks7A==} + /posthog-js@1.130.0: + resolution: {integrity: sha512-bCrw5HunoXLybO20Q1bYEg68i5WCZWKxhStYJK4OR/9jrm7GwZ53GDrN78p8apFi0EH5ay4YZGbLFSkg+SsZWQ==} dependencies: fflate: 0.4.8 preact: 10.20.2 @@ -19157,27 +19267,27 @@ packages: resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==} dev: false - /rrdom@2.0.0-alpha.12: - resolution: {integrity: sha512-xdl51tE/ruN+zO0iEalEyyaQtbb4lb2HLULA6ug/zyApQjIVevaDc3BJnyXbDhd0eYgY9flBXTZ2gM64htUwHQ==} + /rrdom@2.0.0-alpha.13: + resolution: {integrity: sha512-GJD3L2MPbIg3+VgCwwfujB4HRXyMfDdg8o3djPjSB9rMX2b52Hx2tBUmwmdnWfgWKtYGDJ2wVX9Dng3tZEBHVA==} dependencies: - rrweb-snapshot: 2.0.0-alpha.12 + rrweb-snapshot: 2.0.0-alpha.13 dev: false - /rrweb-snapshot@2.0.0-alpha.12: - resolution: {integrity: sha512-i4sz9469dbsEGFiBzCkq+7I7M+imPeC3NrKgrrdJ2tXu9H+/eegNe4SrQgCsLBeSZHZDHU0o9L5rxTAiapWbGg==} + /rrweb-snapshot@2.0.0-alpha.13: + resolution: {integrity: sha512-slbhNBCYjxLGCeH95a67ECCy5a22nloXp1F5wF7DCzUNw80FN7tF9Lef1sRGLNo32g3mNqTc2sWLATlKejMxYw==} dev: false - /rrweb@2.0.0-alpha.12(patch_hash=t3xxecww6aodjl4qopwv6jdxmq): - resolution: {integrity: sha512-lUGwBV7gmbwz1dIgzo9EEayIVyxoTIF6NBF6+Jctqs4Uy45QkyARtikpQlCUfxVCGTCQ0FOee9jeVYsG39oq1g==} + /rrweb@2.0.0-alpha.13(patch_hash=awo7hmdoglvq4pl5b43lreupza): + resolution: {integrity: sha512-a8GXOCnzWHNaVZPa7hsrLZtNZ3CGjiL+YrkpLo0TfmxGLhjNZbWY2r7pE06p+FcjFNlgUVTmFrSJbK3kO7yxvw==} dependencies: - '@rrweb/types': 2.0.0-alpha.12 + '@rrweb/types': 2.0.0-alpha.13 '@types/css-font-loading-module': 0.0.7 '@xstate/fsm': 1.6.5 base64-arraybuffer: 1.0.2 fflate: 0.4.8 mitt: 3.0.0 - rrdom: 2.0.0-alpha.12 - rrweb-snapshot: 2.0.0-alpha.12 + rrdom: 2.0.0-alpha.13 + rrweb-snapshot: 2.0.0-alpha.13 dev: false patched: true @@ -19879,7 +19989,7 @@ packages: resolution: {integrity: sha512-4QcZ+yx7nzEFiV4BMLnr/pRa5HYzNITX2ri0Zh6sT9EyQHbBHacC6YigllUPU9X3D0f/22QCgfokpKs52YRrUg==} dev: true - /storybook-addon-pseudo-states@2.1.2(@storybook/components@7.6.4)(@storybook/core-events@7.6.4)(@storybook/manager-api@7.6.17)(@storybook/preview-api@7.6.17)(@storybook/theming@7.6.4)(react-dom@18.2.0)(react@18.2.0): + /storybook-addon-pseudo-states@2.1.2(@storybook/components@7.6.4)(@storybook/core-events@7.6.4)(@storybook/manager-api@7.6.18)(@storybook/preview-api@7.6.18)(@storybook/theming@7.6.4)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-AHv6q1JiQEUnMyZE3729iV6cNmBW7bueeytc4Lga4+8W1En8YNea5VjqAdrDNJhXVU0QEEIGtxkD3EoC9aVWLw==} peerDependencies: '@storybook/components': ^7.4.6 @@ -19897,8 +20007,8 @@ packages: dependencies: '@storybook/components': 7.6.4(@types/react-dom@18.2.14)(@types/react@17.0.52)(react-dom@18.2.0)(react@18.2.0) '@storybook/core-events': 7.6.4 - '@storybook/manager-api': 7.6.17(react-dom@18.2.0)(react@18.2.0) - '@storybook/preview-api': 7.6.17 + '@storybook/manager-api': 7.6.18(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.6.18 '@storybook/theming': 7.6.4(react-dom@18.2.0)(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) @@ -20764,6 +20874,7 @@ packages: /tslib@1.14.1: resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + dev: true /tslib@2.4.1: resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} diff --git a/posthog/admin/inlines/action_inline.py b/posthog/admin/inlines/action_inline.py index 47b8b9b7600dd..ed75369ecb0c0 100644 --- a/posthog/admin/inlines/action_inline.py +++ b/posthog/admin/inlines/action_inline.py @@ -8,3 +8,4 @@ class ActionInline(admin.TabularInline): model = Action classes = ("collapse",) autocomplete_fields = ("created_by",) + exclude = ("events",) diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index e233519351638..453a2c4417d82 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -4,6 +4,8 @@ from posthog.batch_exports import http as batch_exports from posthog.settings import EE_AVAILABLE from posthog.warehouse.api import external_data_source, saved_query, table, view_link, external_data_schema +from ..heatmaps.heatmaps_api import LegacyHeatmapViewSet, HeatmapViewSet +from .session import SessionViewSet from ..session_recordings.session_recording_api import SessionRecordingViewSet from . import ( activity_log, @@ -316,7 +318,7 @@ def api_not_found(request): # Legacy endpoints CH (to be removed eventually) router.register(r"cohort", LegacyCohortViewSet, basename="cohort") router.register(r"element", LegacyElementViewSet, basename="element") -router.register(r"element", LegacyElementViewSet, basename="element") +router.register(r"heatmap", LegacyHeatmapViewSet, basename="heatmap") router.register(r"event", LegacyEventViewSet, basename="event") # Nested endpoints CH @@ -331,6 +333,8 @@ def api_not_found(request): "project_session_recordings", ["team_id"], ) +projects_router.register(r"heatmaps", HeatmapViewSet, "project_heatmaps", ["team_id"]) +projects_router.register(r"sessions", SessionViewSet, "project_sessions", ["team_id"]) if EE_AVAILABLE: from ee.clickhouse.views.experiments import ClickhouseExperimentsViewSet diff --git a/posthog/api/action.py b/posthog/api/action.py index 437f0227c817f..38eb33d10745e 100644 --- a/posthog/api/action.py +++ b/posthog/api/action.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, cast +from typing import Any, cast from django.db.models import Count, Prefetch from rest_framework import request, serializers, viewsets @@ -123,7 +123,7 @@ def create(self, validated_data: Any) -> Any: return instance - def update(self, instance: Any, validated_data: Dict[str, Any]) -> Any: + def update(self, instance: Any, validated_data: dict[str, Any]) -> Any: steps = validated_data.pop("steps", None) # If there's no steps property at all we just ignore it # If there is a step property but it's an empty array [], we'll delete all the steps @@ -182,7 +182,7 @@ def get_queryset(self): def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: actions = self.get_queryset() - actions_list: List[Dict[Any, Any]] = self.serializer_class( + actions_list: list[dict[Any, Any]] = self.serializer_class( actions, many=True, context={"request": request} ).data # type: ignore return Response({"results": actions_list}) diff --git a/posthog/api/activity_log.py b/posthog/api/activity_log.py index fefa2554d19a3..35ff30d5703a6 100644 --- a/posthog/api/activity_log.py +++ b/posthog/api/activity_log.py @@ -1,5 +1,5 @@ import time -from typing import Any, Optional, Dict +from typing import Any, Optional from django.db.models import Q, QuerySet @@ -49,7 +49,7 @@ class ActivityLogPagination(pagination.CursorPagination): # context manager for gathering a sequence of server timings class ServerTimingsGathered: # Class level dictionary to store timings - timings_dict: Dict[str, float] = {} + timings_dict: dict[str, float] = {} def __call__(self, name): self.name = name diff --git a/posthog/api/annotation.py b/posthog/api/annotation.py index 4806d5a632f25..7216efe6cd643 100644 --- a/posthog/api/annotation.py +++ b/posthog/api/annotation.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from django.db.models import Q, QuerySet from django.db.models.signals import post_save @@ -40,11 +40,11 @@ class Meta: "updated_at", ] - def update(self, instance: Annotation, validated_data: Dict[str, Any]) -> Annotation: + def update(self, instance: Annotation, validated_data: dict[str, Any]) -> Annotation: instance.team_id = self.context["team_id"] return super().update(instance, validated_data) - def create(self, validated_data: Dict[str, Any], *args: Any, **kwargs: Any) -> Annotation: + def create(self, validated_data: dict[str, Any], *args: Any, **kwargs: Any) -> Annotation: request = self.context["request"] team = self.context["get_team"]() annotation = Annotation.objects.create( diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index d054e0cc37a97..3afdda18ef853 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -1,10 +1,14 @@ -from typing import Any +import datetime as dt import uuid +from typing import Any +from django.db.models import Q, Sum +from django.db.models.functions import Coalesce, TruncDay from rest_framework import mixins, request, response, viewsets from rest_framework.decorators import action from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.models import BatchExportRun from posthog.models.plugin import PluginConfig from posthog.queries.app_metrics.app_metrics import ( AppMetricsErrorDetailsQuery, @@ -19,6 +23,7 @@ AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer, ) +from posthog.utils import relative_date_parse class AppMetricsViewSet(TeamAndOrgViewSetMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): @@ -27,28 +32,24 @@ class AppMetricsViewSet(TeamAndOrgViewSetMixin, mixins.RetrieveModelMixin, views def retrieve(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: try: - # probe if we have a valid uuid, and thus are requesting metrics for a batch export - uuid.UUID(kwargs["pk"]) + rows = self.get_batch_export_runs_app_metrics_queryset(batch_export_id=kwargs["pk"]) + + dates = [row["dates"].strftime("%Y-%m-%d") for row in rows] + successes = [row["successes"] for row in rows] + failures = [row["failures"] for row in rows] return response.Response( { - "metrics": [ - { - "dates": [ - "2024-01-04", - "2024-01-05", - "2024-01-06", - "2024-01-07", - "2024-01-08", - "2024-01-09", - "2024-01-10", - "2024-01-11", - ], - "successes": [0, 0, 0, 0, 0, 0, 9379, 6237], - "successes_on_retry": [0, 0, 0, 0, 0, 0, 0, 0], - "failures": [0, 0, 0, 0, 0, 0, 665, 0], - "totals": {"successes": 15616, "successes_on_retry": 0, "failures": 665}, - } - ], + "metrics": { + "dates": dates, + "successes": successes, + "successes_on_retry": [0] * len(dates), + "failures": failures, + "totals": { + "successes": sum(successes), + "successes_on_retry": 0, + "failures": sum(failures), + }, + }, "errors": None, } ) @@ -74,6 +75,60 @@ def error_details(self, request: request.Request, *args: Any, **kwargs: Any) -> error_details = AppMetricsErrorDetailsQuery(self.team, plugin_config.pk, filter).run() return response.Response({"result": error_details}) + def get_batch_export_runs_app_metrics_queryset(self, batch_export_id: str): + """Use the Django ORM to fetch app metrics for batch export runs. + + Attempts to (roughly) match the following (much more readable) query: + ``` + select + date_trunc('day', last_updated_at) as dates, + sum(case when status = 'Completed' then coalesce(records_total_count, 0) else 0) as successes, + sum(case when status != 'Completed' then coalesce(records_total_count, 0) else 0) as failures + from + posthog_batchexportrun + where + batch_export_id = :batch_export_id + and last_updated_at between :date_from and :date_to + and status != 'Running' + group by + date_trunc('day', last_updated_at) + order by + dates + ``` + + A truncated 'last_updated_at' is used as the grouping date as it reflects when a particular run + was last updated. It feels easier to explain to users that if they see metrics for today, those + correspond to runs that happened today, even if the runs themselves exported data from a year ago + (because it was a backfill). + + Raises: + ValueError: If provided 'batch_export_id' is not a valid UUID. + """ + batch_export_uuid = uuid.UUID(batch_export_id) + + after = self.request.GET.get("date_from", "-30d") + before = self.request.GET.get("date_to", None) + after_datetime = relative_date_parse(after, self.team.timezone_info) + before_datetime = ( + relative_date_parse(before, self.team.timezone_info) if before else dt.datetime.now(dt.timezone.utc) + ) + date_range = (after_datetime, before_datetime) + return ( + BatchExportRun.objects.filter(batch_export_id=batch_export_uuid, last_updated_at__range=date_range) + .annotate(dates=TruncDay("last_updated_at")) + .values("dates") + .annotate( + successes=Sum( + Coalesce("records_total_count", 0), filter=Q(status=BatchExportRun.Status.COMPLETED), default=0 + ), + failures=Sum( + Coalesce("records_total_count", 0), filter=~Q(status=BatchExportRun.Status.COMPLETED), default=0 + ), + ) + .order_by("dates") + .all() + ) + class HistoricalExportsAppMetricsViewSet( TeamAndOrgViewSetMixin, diff --git a/posthog/api/authentication.py b/posthog/api/authentication.py index 069acac50c95f..d7911059506ed 100644 --- a/posthog/api/authentication.py +++ b/posthog/api/authentication.py @@ -1,6 +1,6 @@ import datetime import time -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast from uuid import uuid4 from django.conf import settings @@ -92,7 +92,7 @@ class LoginSerializer(serializers.Serializer): email = serializers.EmailField() password = serializers.CharField() - def to_representation(self, instance: Any) -> Dict[str, Any]: + def to_representation(self, instance: Any) -> dict[str, Any]: return {"success": True} def _check_if_2fa_required(self, user: User) -> bool: @@ -113,7 +113,7 @@ def _check_if_2fa_required(self, user: User) -> bool: pass return True - def create(self, validated_data: Dict[str, str]) -> Any: + def create(self, validated_data: dict[str, str]) -> Any: # Check SSO enforcement (which happens at the domain level) sso_enforcement = OrganizationDomain.objects.get_sso_enforcement_for_email_address(validated_data["email"]) if sso_enforcement: @@ -159,10 +159,10 @@ def create(self, validated_data: Dict[str, str]) -> Any: class LoginPrecheckSerializer(serializers.Serializer): email = serializers.EmailField() - def to_representation(self, instance: Dict[str, str]) -> Dict[str, Any]: + def to_representation(self, instance: dict[str, str]) -> dict[str, Any]: return instance - def create(self, validated_data: Dict[str, str]) -> Any: + def create(self, validated_data: dict[str, str]) -> Any: email = validated_data.get("email", "") # TODO: Refactor methods below to remove duplicate queries return { diff --git a/posthog/api/capture.py b/posthog/api/capture.py index 31592e90e790d..aaa3998213ce8 100644 --- a/posthog/api/capture.py +++ b/posthog/api/capture.py @@ -18,7 +18,8 @@ from sentry_sdk.api import capture_exception, start_span from statshog.defaults.django import statsd from token_bucket import Limiter, MemoryStorage -from typing import Any, Dict, Iterator, List, Optional, Tuple, Set +from typing import Any, Optional +from collections.abc import Iterator from ee.billing.quota_limiting import QuotaLimitingCaches from posthog.api.utils import get_data, get_token, safe_clickhouse_string @@ -129,12 +130,12 @@ def build_kafka_event_data( distinct_id: str, ip: Optional[str], site_url: str, - data: Dict, + data: dict, now: datetime, sent_at: Optional[datetime], event_uuid: UUIDT, token: str, -) -> Dict: +) -> dict: logger.debug("build_kafka_event_data", token=token) return { "uuid": str(event_uuid), @@ -168,10 +169,10 @@ def _kafka_topic(event_name: str, historical: bool = False, overflowing: bool = def log_event( - data: Dict, + data: dict, event_name: str, partition_key: Optional[str], - headers: Optional[List] = None, + headers: Optional[list] = None, historical: bool = False, overflowing: bool = False, ) -> FutureRecordMetadata: @@ -205,7 +206,7 @@ def _datetime_from_seconds_or_millis(timestamp: str) -> datetime: return datetime.fromtimestamp(timestamp_number, timezone.utc) -def _get_sent_at(data, request) -> Tuple[Optional[datetime], Any]: +def _get_sent_at(data, request) -> tuple[Optional[datetime], Any]: try: if request.GET.get("_"): # posthog-js sent_at = request.GET["_"] @@ -253,7 +254,7 @@ def _check_token_shape(token: Any) -> Optional[str]: return None -def get_distinct_id(data: Dict[str, Any]) -> str: +def get_distinct_id(data: dict[str, Any]) -> str: raw_value: Any = "" try: raw_value = data["$distinct_id"] @@ -274,12 +275,12 @@ def get_distinct_id(data: Dict[str, Any]) -> str: return str(raw_value)[0:200] -def drop_performance_events(events: List[Any]) -> List[Any]: +def drop_performance_events(events: list[Any]) -> list[Any]: cleaned_list = [event for event in events if event.get("event") != "$performance_event"] return cleaned_list -def drop_events_over_quota(token: str, events: List[Any]) -> List[Any]: +def drop_events_over_quota(token: str, events: list[Any]) -> list[Any]: if not settings.EE_AVAILABLE: return events @@ -381,7 +382,7 @@ def get_event(request): structlog.contextvars.bind_contextvars(token=token) - replay_events: List[Any] = [] + replay_events: list[Any] = [] historical = token in settings.TOKENS_HISTORICAL_DATA with start_span(op="request.process"): @@ -437,7 +438,7 @@ def get_event(request): generate_exception_response("capture", f"Invalid payload: {e}", code="invalid_payload"), ) - futures: List[FutureRecordMetadata] = [] + futures: list[FutureRecordMetadata] = [] with start_span(op="kafka.produce") as span: span.set_tag("event.count", len(processed_events)) @@ -536,7 +537,7 @@ def get_event(request): return cors_response(request, JsonResponse({"status": 1})) -def preprocess_events(events: List[Dict[str, Any]]) -> Iterator[Tuple[Dict[str, Any], UUIDT, str]]: +def preprocess_events(events: list[dict[str, Any]]) -> Iterator[tuple[dict[str, Any], UUIDT, str]]: for event in events: event_uuid = UUIDT() distinct_id = get_distinct_id(event) @@ -580,7 +581,7 @@ def capture_internal( event_uuid=None, token=None, historical=False, - extra_headers: List[Tuple[str, str]] | None = None, + extra_headers: list[tuple[str, str]] | None = None, ): if event_uuid is None: event_uuid = UUIDT() @@ -624,7 +625,11 @@ def capture_internal( ): kafka_partition_key = None else: - kafka_partition_key = hashlib.sha256(candidate_partition_key.encode()).hexdigest() + if settings.CAPTURE_SKIP_KEY_HASHING: + kafka_partition_key = candidate_partition_key + else: + # TODO: remove after progressive rollout of the option + kafka_partition_key = hashlib.sha256(candidate_partition_key.encode()).hexdigest() return log_event(parsed_event, event["event"], partition_key=kafka_partition_key, historical=historical) @@ -680,7 +685,7 @@ def is_randomly_partitioned(candidate_partition_key: str) -> bool: @cache_for(timedelta(seconds=30), background_refresh=True) -def _list_overflowing_keys(input_type: InputType) -> Set[str]: +def _list_overflowing_keys(input_type: InputType) -> set[str]: """Retrieve the active overflows from Redis with caching and pre-fetching cache_for will keep the old value if Redis is temporarily unavailable. diff --git a/posthog/api/cohort.py b/posthog/api/cohort.py index 64eb30db9b0ba..af85769fd0ff1 100644 --- a/posthog/api/cohort.py +++ b/posthog/api/cohort.py @@ -18,7 +18,7 @@ from posthog.metrics import LABEL_TEAM_ID from posthog.renderers import SafeJSONRenderer from datetime import datetime -from typing import Any, Dict, cast, Optional +from typing import Any, cast, Optional from django.conf import settings from django.db.models import QuerySet, Prefetch, prefetch_related_objects, OuterRef, Subquery @@ -133,7 +133,7 @@ class Meta: "experiment_set", ] - def _handle_static(self, cohort: Cohort, context: Dict, validated_data: Dict) -> None: + def _handle_static(self, cohort: Cohort, context: dict, validated_data: dict) -> None: request = self.context["request"] if request.FILES.get("csv"): self._calculate_static_by_csv(request.FILES["csv"], cohort) @@ -149,7 +149,7 @@ def _handle_static(self, cohort: Cohort, context: Dict, validated_data: Dict) -> if filter_data: insert_cohort_from_insight_filter.delay(cohort.pk, filter_data) - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Cohort: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort: request = self.context["request"] validated_data["created_by"] = request.user @@ -176,7 +176,7 @@ def _calculate_static_by_csv(self, file, cohort: Cohort) -> None: distinct_ids_and_emails = [row[0] for row in reader if len(row) > 0 and row] calculate_cohort_from_list.delay(cohort.pk, distinct_ids_and_emails) - def validate_query(self, query: Optional[Dict]) -> Optional[Dict]: + def validate_query(self, query: Optional[dict]) -> Optional[dict]: if not query: return None if not isinstance(query, dict): @@ -186,7 +186,7 @@ def validate_query(self, query: Optional[Dict]) -> Optional[Dict]: ActorsQuery.model_validate(query) return query - def validate_filters(self, request_filters: Dict): + def validate_filters(self, request_filters: dict): if isinstance(request_filters, dict) and "properties" in request_filters: if self.context["request"].method == "PATCH": parsed_filter = Filter(data=request_filters) @@ -225,7 +225,7 @@ def validate_filters(self, request_filters: Dict): else: raise ValidationError("Filters must be a dictionary with a 'properties' key.") - def update(self, cohort: Cohort, validated_data: Dict, *args: Any, **kwargs: Any) -> Cohort: # type: ignore + def update(self, cohort: Cohort, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort: # type: ignore request = self.context["request"] user = cast(User, request.user) @@ -498,7 +498,7 @@ def insert_cohort_query_actors_into_ch(cohort: Cohort): insert_actors_into_cohort_by_query(cohort, query, {}, context) -def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: Dict): +def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict): from_existing_cohort_id = filter_data.get("from_cohort_id") context: HogQLContext @@ -561,7 +561,7 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: Dict): insert_actors_into_cohort_by_query(cohort, query, params, context) -def insert_actors_into_cohort_by_query(cohort: Cohort, query: str, params: Dict[str, Any], context: HogQLContext): +def insert_actors_into_cohort_by_query(cohort: Cohort, query: str, params: dict[str, Any], context: HogQLContext): try: sync_execute( INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID.format(cohort_table=PERSON_STATIC_COHORT_TABLE, query=query), @@ -600,7 +600,7 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, cohort = Cohort.objects.get(pk=cohort_id, team_id=team_id) matcher_cache = FlagsMatcherCache(team_id) uuids_to_add_to_cohort = [] - cohorts_cache: Dict[int, CohortOrEmpty] = {} + cohorts_cache: dict[int, CohortOrEmpty] = {} if feature_flag.uses_cohorts: # TODO: Consider disabling flags with cohorts for creating static cohorts @@ -709,7 +709,7 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, capture_exception(err) -def get_default_person_property(prop: Property, cohorts_cache: Dict[int, CohortOrEmpty]): +def get_default_person_property(prop: Property, cohorts_cache: dict[int, CohortOrEmpty]): default_person_properties = {} if prop.operator not in ("is_set", "is_not_set") and prop.type == "person": @@ -725,7 +725,7 @@ def get_default_person_property(prop: Property, cohorts_cache: Dict[int, CohortO return default_person_properties -def get_default_person_properties_for_cohort(cohort: Cohort, cohorts_cache: Dict[int, CohortOrEmpty]) -> Dict[str, str]: +def get_default_person_properties_for_cohort(cohort: Cohort, cohorts_cache: dict[int, CohortOrEmpty]) -> dict[str, str]: """ Returns a dictionary of default person properties to use when evaluating a feature flag """ diff --git a/posthog/api/comments.py b/posthog/api/comments.py index 8b9a9174dda61..63ef5d1d33a16 100644 --- a/posthog/api/comments.py +++ b/posthog/api/comments.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, cast +from typing import Any, cast from django.db import transaction from django.db.models import QuerySet @@ -40,7 +40,7 @@ def create(self, validated_data: Any) -> Any: validated_data["team_id"] = self.context["team_id"] return super().create(validated_data) - def update(self, instance: Comment, validated_data: Dict, **kwargs) -> Comment: + def update(self, instance: Comment, validated_data: dict, **kwargs) -> Comment: request = self.context["request"] with transaction.atomic(): diff --git a/posthog/api/dashboards/dashboard.py b/posthog/api/dashboards/dashboard.py index 100e8745b8db1..850e29b52a4e3 100644 --- a/posthog/api/dashboards/dashboard.py +++ b/posthog/api/dashboards/dashboard.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, List, Optional, Type, cast +from typing import Any, Optional, cast import structlog from django.db.models import Prefetch, QuerySet @@ -155,13 +155,13 @@ class Meta: ] read_only_fields = ["creation_mode", "effective_restriction_level", "is_shared"] - def validate_filters(self, value) -> Dict: + def validate_filters(self, value) -> dict: if not isinstance(value, dict): raise serializers.ValidationError("Filters must be a dictionary") return value - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Dashboard: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Dashboard: request = self.context["request"] validated_data["created_by"] = request.user team_id = self.context["team_id"] @@ -260,7 +260,7 @@ def _deep_duplicate_tiles(self, dashboard: Dashboard, existing_tile: DashboardTi color=existing_tile.color, ) - def update(self, instance: Dashboard, validated_data: Dict, *args: Any, **kwargs: Any) -> Dashboard: + def update(self, instance: Dashboard, validated_data: dict, *args: Any, **kwargs: Any) -> Dashboard: can_user_restrict = self.user_permissions.dashboard(instance).can_restrict if "restriction_level" in validated_data and not can_user_restrict: raise exceptions.PermissionDenied( @@ -292,11 +292,11 @@ def update(self, instance: Dashboard, validated_data: Dict, *args: Any, **kwargs return instance @staticmethod - def _update_tiles(instance: Dashboard, tile_data: Dict, user: User) -> None: + def _update_tiles(instance: Dashboard, tile_data: dict, user: User) -> None: tile_data.pop("is_cached", None) # read only field if tile_data.get("text", None): - text_json: Dict = tile_data.get("text", {}) + text_json: dict = tile_data.get("text", {}) created_by_json = text_json.get("created_by", None) if created_by_json: last_modified_by = user @@ -348,7 +348,7 @@ def _undo_delete_related_tiles(instance: Dashboard) -> None: insights_to_undelete.append(tile.insight) Insight.objects.bulk_update(insights_to_undelete, ["deleted"]) - def get_tiles(self, dashboard: Dashboard) -> Optional[List[ReturnDict]]: + def get_tiles(self, dashboard: Dashboard) -> Optional[list[ReturnDict]]: if self.context["view"].action == "list": return None @@ -398,23 +398,25 @@ class DashboardsViewSet( viewsets.ModelViewSet, ): scope_object = "dashboard" - queryset = Dashboard.objects.order_by("name") + queryset = Dashboard.objects_including_soft_deleted.order_by("name") permission_classes = [CanEditDashboard] - def get_serializer_class(self) -> Type[BaseSerializer]: + def get_serializer_class(self) -> type[BaseSerializer]: return DashboardBasicSerializer if self.action == "list" else DashboardSerializer def get_queryset(self) -> QuerySet: - if ( + queryset = super().get_queryset() + + include_deleted = ( self.action == "partial_update" and "deleted" in self.request.data and not self.request.data.get("deleted") and len(self.request.data) == 1 - ): + ) + + if not include_deleted: # a dashboard can be un-deleted by patching {"deleted": False} - queryset = Dashboard.objects_including_soft_deleted - else: - queryset = super().get_queryset() + queryset = queryset.filter(deleted=False) queryset = queryset.prefetch_related("sharingconfiguration_set").select_related( "team__organization", @@ -510,7 +512,7 @@ def create_from_template_json(self, request: Request, *args: Any, **kwargs: Any) class LegacyDashboardsViewSet(DashboardsViewSet): derive_current_team_from_user_only = True - def get_parents_query_dict(self) -> Dict[str, Any]: + def get_parents_query_dict(self) -> dict[str, Any]: if not self.request.user.is_authenticated or "share_token" in self.request.GET: return {} return {"team_id": self.team_id} diff --git a/posthog/api/dashboards/dashboard_template_json_schema_parser.py b/posthog/api/dashboards/dashboard_template_json_schema_parser.py index 3463601514e01..8f9149cd84d11 100644 --- a/posthog/api/dashboards/dashboard_template_json_schema_parser.py +++ b/posthog/api/dashboards/dashboard_template_json_schema_parser.py @@ -15,9 +15,7 @@ class DashboardTemplateCreationJSONSchemaParser(JSONParser): The template is sent in the "template" key""" def parse(self, stream, media_type=None, parser_context=None): - data = super(DashboardTemplateCreationJSONSchemaParser, self).parse( - stream, media_type or "application/json", parser_context - ) + data = super().parse(stream, media_type or "application/json", parser_context) try: template = data["template"] jsonschema.validate(template, dashboard_template_schema) diff --git a/posthog/api/dashboards/dashboard_templates.py b/posthog/api/dashboards/dashboard_templates.py index 6e8752e0cbd39..03740b06ebd6b 100644 --- a/posthog/api/dashboards/dashboard_templates.py +++ b/posthog/api/dashboards/dashboard_templates.py @@ -1,6 +1,5 @@ import json from pathlib import Path -from typing import Dict import structlog from django.db.models import Q @@ -50,7 +49,7 @@ class Meta: "scope", ] - def create(self, validated_data: Dict, *args, **kwargs) -> DashboardTemplate: + def create(self, validated_data: dict, *args, **kwargs) -> DashboardTemplate: if not validated_data["tiles"]: raise ValidationError(detail="You need to provide tiles for the template.") @@ -61,7 +60,7 @@ def create(self, validated_data: Dict, *args, **kwargs) -> DashboardTemplate: validated_data["team_id"] = self.context["team_id"] return super().create(validated_data, *args, **kwargs) - def update(self, instance: DashboardTemplate, validated_data: Dict, *args, **kwargs) -> DashboardTemplate: + def update(self, instance: DashboardTemplate, validated_data: dict, *args, **kwargs) -> DashboardTemplate: # if the original request was to make the template scope to team only, and the template is none then deny the request if validated_data.get("scope") == "team" and instance.scope == "global" and not instance.team_id: raise ValidationError(detail="The original templates cannot be made private as they would be lost.") diff --git a/posthog/api/dashboards/test/test_dashboard_templates.py b/posthog/api/dashboards/test/test_dashboard_templates.py index f07610ba90351..e562b3798d895 100644 --- a/posthog/api/dashboards/test/test_dashboard_templates.py +++ b/posthog/api/dashboards/test/test_dashboard_templates.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, List +from typing import Optional from rest_framework import status @@ -510,7 +510,7 @@ def test_filter_template_list_by_scope(self): assert flag_response.status_code == status.HTTP_200_OK assert [(r["id"], r["scope"]) for r in flag_response.json()["results"]] == [(flag_template_id, "feature_flag")] - def create_template(self, overrides: Dict[str, str | List[str]], team_id: Optional[int] = None) -> str: + def create_template(self, overrides: dict[str, str | list[str]], team_id: Optional[int] = None) -> str: template = {**variable_template, **overrides} response = self.client.post( f"/api/projects/{team_id or self.team.pk}/dashboard_templates", diff --git a/posthog/api/dead_letter_queue.py b/posthog/api/dead_letter_queue.py index 93e2b09370b0e..2bab687543568 100644 --- a/posthog/api/dead_letter_queue.py +++ b/posthog/api/dead_letter_queue.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, List, Optional, Union +from typing import Any, Optional, Union from rest_framework import mixins, permissions, serializers, viewsets @@ -65,7 +65,7 @@ class DeadLetterQueueMetric: key: str = "" metric: str = "" value: Union[str, bool, int, None] = None - subrows: Optional[List[Any]] = None + subrows: Optional[list[Any]] = None def __init__(self, **kwargs): for field in ("key", "metric", "value", "subrows"): @@ -138,7 +138,7 @@ def get_dead_letter_queue_events_last_24h() -> int: )[0][0] -def get_dead_letter_queue_events_per_error(offset: Optional[int] = 0) -> List[Union[str, int]]: +def get_dead_letter_queue_events_per_error(offset: Optional[int] = 0) -> list[Union[str, int]]: return sync_execute( f""" SELECT error, count(*) AS c @@ -151,7 +151,7 @@ def get_dead_letter_queue_events_per_error(offset: Optional[int] = 0) -> List[Un ) -def get_dead_letter_queue_events_per_location(offset: Optional[int] = 0) -> List[Union[str, int]]: +def get_dead_letter_queue_events_per_location(offset: Optional[int] = 0) -> list[Union[str, int]]: return sync_execute( f""" SELECT error_location, count(*) AS c @@ -164,7 +164,7 @@ def get_dead_letter_queue_events_per_location(offset: Optional[int] = 0) -> List ) -def get_dead_letter_queue_events_per_day(offset: Optional[int] = 0) -> List[Union[str, int]]: +def get_dead_letter_queue_events_per_day(offset: Optional[int] = 0) -> list[Union[str, int]]: return sync_execute( f""" SELECT toDate(error_timestamp) as day, count(*) AS c @@ -177,7 +177,7 @@ def get_dead_letter_queue_events_per_day(offset: Optional[int] = 0) -> List[Unio ) -def get_dead_letter_queue_events_per_tag(offset: Optional[int] = 0) -> List[Union[str, int]]: +def get_dead_letter_queue_events_per_tag(offset: Optional[int] = 0) -> list[Union[str, int]]: return sync_execute( f""" SELECT arrayJoin(tags) as tag, count(*) as c from events_dead_letter_queue diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 3a6e08bc7a7a0..d2c61a8924bb9 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -1,6 +1,6 @@ import re from random import random -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from urllib.parse import urlparse import structlog @@ -56,7 +56,7 @@ def on_permitted_recording_domain(team: Team, request: HttpRequest) -> bool: return is_authorized_web_client or is_authorized_mobile_client -def hostname_in_allowed_url_list(allowed_url_list: Optional[List[str]], hostname: Optional[str]) -> bool: +def hostname_in_allowed_url_list(allowed_url_list: Optional[list[str]], hostname: Optional[str]) -> bool: if not hostname: return False @@ -182,7 +182,7 @@ def get_decide(request: HttpRequest): if geoip_enabled: property_overrides = get_geoip_properties(get_ip_address(request)) - all_property_overrides: Dict[str, Union[str, int]] = { + all_property_overrides: dict[str, Union[str, int]] = { **property_overrides, **(data.get("person_properties") or {}), } @@ -254,6 +254,7 @@ def get_decide(request: HttpRequest): response["sessionRecording"] = _session_recording_config_response(request, team) response["surveys"] = True if team.surveys_opt_in else False + response["heatmaps"] = True if team.heatmaps_opt_in else False site_apps = [] # errors mean the database is unavailable, bail in this case @@ -296,8 +297,8 @@ def get_decide(request: HttpRequest): return cors_response(request, JsonResponse(response)) -def _session_recording_config_response(request: HttpRequest, team: Team) -> bool | Dict: - session_recording_config_response: bool | Dict = False +def _session_recording_config_response(request: HttpRequest, team: Team) -> bool | dict: + session_recording_config_response: bool | dict = False try: if team.session_recording_opt_in and ( @@ -312,7 +313,7 @@ def _session_recording_config_response(request: HttpRequest, team: Team) -> bool linked_flag = None linked_flag_config = team.session_recording_linked_flag or None - if isinstance(linked_flag_config, Dict): + if isinstance(linked_flag_config, dict): linked_flag_key = linked_flag_config.get("key", None) linked_flag_variant = linked_flag_config.get("variant", None) if linked_flag_variant is not None: @@ -330,7 +331,7 @@ def _session_recording_config_response(request: HttpRequest, team: Team) -> bool "networkPayloadCapture": team.session_recording_network_payload_capture_config or None, } - if isinstance(team.session_replay_config, Dict): + if isinstance(team.session_replay_config, dict): record_canvas = team.session_replay_config.get("record_canvas", False) session_recording_config_response.update( { diff --git a/posthog/api/documentation.py b/posthog/api/documentation.py index 47820a9cb2203..3cae48fcdb006 100644 --- a/posthog/api/documentation.py +++ b/posthog/api/documentation.py @@ -1,5 +1,5 @@ import re -from typing import Dict, get_args +from typing import get_args from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import ( @@ -215,7 +215,7 @@ def preprocess_exclude_path_format(endpoints, **kwargs): def custom_postprocessing_hook(result, generator, request, public): all_tags = [] - paths: Dict[str, Dict] = {} + paths: dict[str, dict] = {} for path, methods in result["paths"].items(): paths[path] = {} diff --git a/posthog/api/early_access_feature.py b/posthog/api/early_access_feature.py index 911c860a75a16..57885666fde7d 100644 --- a/posthog/api/early_access_feature.py +++ b/posthog/api/early_access_feature.py @@ -1,5 +1,3 @@ -from typing import Type - from django.http import JsonResponse from rest_framework.response import Response from posthog.api.feature_flag import FeatureFlagSerializer, MinimalFeatureFlagSerializer @@ -221,7 +219,7 @@ class EarlyAccessFeatureViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): scope_object = "early_access_feature" queryset = EarlyAccessFeature.objects.select_related("feature_flag").all() - def get_serializer_class(self) -> Type[serializers.Serializer]: + def get_serializer_class(self) -> type[serializers.Serializer]: if self.request.method == "POST": return EarlyAccessFeatureSerializerCreateOnly else: diff --git a/posthog/api/element.py b/posthog/api/element.py index d7b721dee8195..b617ea8be28b5 100644 --- a/posthog/api/element.py +++ b/posthog/api/element.py @@ -1,4 +1,4 @@ -from typing import Literal, Tuple +from typing import Literal from rest_framework import request, response, serializers, viewsets from rest_framework.decorators import action @@ -128,8 +128,8 @@ def stats(self, request: request.Request, **kwargs) -> response.Response: else: return response.Response(serialized_elements) - def _events_filter(self, request) -> Tuple[Literal["$autocapture", "$rageclick"], ...]: - event_to_filter: Tuple[Literal["$autocapture", "$rageclick"], ...] = () + def _events_filter(self, request) -> tuple[Literal["$autocapture", "$rageclick"], ...]: + event_to_filter: tuple[Literal["$autocapture", "$rageclick"], ...] = () # when multiple includes are sent expects them as separate parameters # e.g. ?include=a&include=b events_to_include = request.query_params.getlist("include", []) diff --git a/posthog/api/event.py b/posthog/api/event.py index 6366ee866f657..5c642a2612973 100644 --- a/posthog/api/event.py +++ b/posthog/api/event.py @@ -1,7 +1,7 @@ import json import urllib from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import Any, List, Optional, Union # noqa: UP035 from django.db.models.query import Prefetch from drf_spectacular.types import OpenApiTypes @@ -94,7 +94,7 @@ def _build_next_url( self, request: request.Request, last_event_timestamp: datetime, - order_by: List[str], + order_by: list[str], ) -> str: params = request.GET.dict() reverse = "-timestamp" in order_by @@ -175,7 +175,7 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response. team = self.team filter = Filter(request=request, team=self.team) - order_by: List[str] = ( + order_by: list[str] = ( list(json.loads(request.GET["orderBy"])) if request.GET.get("orderBy") else ["-timestamp"] ) @@ -217,11 +217,11 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response. capture_exception(ex) raise ex - def _get_people(self, query_result: List[Dict], team: Team) -> Dict[str, Any]: + def _get_people(self, query_result: List[dict], team: Team) -> dict[str, Any]: # noqa: UP006 distinct_ids = [event["distinct_id"] for event in query_result] persons = get_persons_by_distinct_ids(team.pk, distinct_ids) persons = persons.prefetch_related(Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) - distinct_to_person: Dict[str, Person] = {} + distinct_to_person: dict[str, Person] = {} for person in persons: for distinct_id in person.distinct_ids: distinct_to_person[distinct_id] = person diff --git a/posthog/api/event_definition.py b/posthog/api/event_definition.py index 82a9c0617bd74..76314578fb98f 100644 --- a/posthog/api/event_definition.py +++ b/posthog/api/event_definition.py @@ -1,4 +1,4 @@ -from typing import Any, Literal, Tuple, Type, cast +from typing import Any, Literal, cast from django.db.models import Manager, Prefetch from rest_framework import ( @@ -117,7 +117,7 @@ def get_queryset(self): def _ordering_params_from_request( self, - ) -> Tuple[str, Literal["ASC", "DESC"]]: + ) -> tuple[str, Literal["ASC", "DESC"]]: order_direction: Literal["ASC", "DESC"] ordering = self.request.GET.get("ordering") @@ -154,7 +154,7 @@ def get_object(self): return EventDefinition.objects.get(id=id, team_id=self.team_id) - def get_serializer_class(self) -> Type[serializers.ModelSerializer]: + def get_serializer_class(self) -> type[serializers.ModelSerializer]: serializer_class = self.serializer_class if EE_AVAILABLE and self.request.user.organization.is_feature_available( # type: ignore AvailableFeature.INGESTION_TAXONOMY diff --git a/posthog/api/exports.py b/posthog/api/exports.py index 2099b2f169e2e..9fbaea35df3c2 100644 --- a/posthog/api/exports.py +++ b/posthog/api/exports.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Any, Dict +from typing import Any import structlog from django.http import HttpResponse @@ -40,7 +40,7 @@ class Meta: ] read_only_fields = ["id", "created_at", "has_content", "filename"] - def validate(self, data: Dict) -> Dict: + def validate(self, data: dict) -> dict: if not data.get("export_format"): raise ValidationError("Must provide export format") @@ -61,13 +61,13 @@ def validate(self, data: Dict) -> Dict: def synthetic_create(self, reason: str, *args: Any, **kwargs: Any) -> ExportedAsset: return self._create_asset(self.validated_data, user=None, reason=reason) - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> ExportedAsset: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> ExportedAsset: request = self.context["request"] return self._create_asset(validated_data, user=request.user, reason=None) def _create_asset( self, - validated_data: Dict, + validated_data: dict, user: User | None, reason: str | None, ) -> ExportedAsset: diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index e09e70c01b6f1..bd53f02955252 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast from datetime import datetime from django.db.models import QuerySet, Q, deletion @@ -145,12 +145,12 @@ def get_is_simple_flag(self, feature_flag: FeatureFlag) -> bool: and feature_flag.aggregation_group_type_index is None ) - def get_features(self, feature_flag: FeatureFlag) -> Dict: + def get_features(self, feature_flag: FeatureFlag) -> dict: from posthog.api.early_access_feature import MinimalEarlyAccessFeatureSerializer return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data - def get_surveys(self, feature_flag: FeatureFlag) -> Dict: + def get_surveys(self, feature_flag: FeatureFlag) -> dict: from posthog.api.survey import SurveyAPISerializer return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data @@ -241,6 +241,14 @@ def properties_all_match(predicate): detail=f"Invalid date value: {prop.value}", code="invalid_date" ) + # make sure regex and icontains properties have string values + if prop.operator in ["regex", "icontains", "not_regex", "not_icontains"] and not isinstance( + prop.value, str + ): + raise serializers.ValidationError( + detail=f"Invalid value for operator {prop.operator}: {prop.value}", code="invalid_value" + ) + payloads = filters.get("payloads", {}) if not isinstance(payloads, dict): @@ -255,7 +263,7 @@ def properties_all_match(predicate): return filters - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> FeatureFlag: request = self.context["request"] validated_data["created_by"] = request.user validated_data["team_id"] = self.context["team_id"] @@ -291,7 +299,7 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag return instance - def update(self, instance: FeatureFlag, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag: + def update(self, instance: FeatureFlag, validated_data: dict, *args: Any, **kwargs: Any) -> FeatureFlag: if "deleted" in validated_data and validated_data["deleted"] is True and instance.features.count() > 0: raise exceptions.ValidationError( "Cannot delete a feature flag that is in use with early access features. Please delete the early access feature before deleting the flag." @@ -488,13 +496,11 @@ def my_flags(self, request: request.Request, **kwargs): feature_flags, many=True, context=self.get_serializer_context() ).data return Response( - ( - { - "feature_flag": feature_flag, - "value": matches.get(feature_flag["key"], False), - } - for feature_flag in all_serialized_flags - ) + { + "feature_flag": feature_flag, + "value": matches.get(feature_flag["key"], False), + } + for feature_flag in all_serialized_flags ) @action( @@ -508,7 +514,7 @@ def local_evaluation(self, request: request.Request, **kwargs): should_send_cohorts = "send_cohorts" in request.GET cohorts = {} - seen_cohorts_cache: Dict[int, CohortOrEmpty] = {} + seen_cohorts_cache: dict[int, CohortOrEmpty] = {} if should_send_cohorts: seen_cohorts_cache = { diff --git a/posthog/api/geoip.py b/posthog/api/geoip.py index d3d029cdd3f33..7a749c0b294c2 100644 --- a/posthog/api/geoip.py +++ b/posthog/api/geoip.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional import structlog from django.contrib.gis.geoip2 import GeoIP2 @@ -27,7 +27,7 @@ ] -def get_geoip_properties(ip_address: Optional[str]) -> Dict[str, str]: +def get_geoip_properties(ip_address: Optional[str]) -> dict[str, str]: """ Returns a dictionary of geoip properties for the given ip address. diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 2bd16bc432e4a..a2fe0c53edc2c 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -1,6 +1,6 @@ import json from functools import lru_cache -from typing import Any, Dict, List, Optional, Type, Union, cast +from typing import Any, Optional, Union, cast import structlog from django.db import transaction @@ -54,7 +54,7 @@ from posthog.hogql.errors import ExposedHogQLError from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.apply_dashboard_filters import DATA_TABLE_LIKE_NODE_KINDS -from posthog.hogql_queries.legacy_compatibility.feature_flag import hogql_insights_enabled +from posthog.hogql_queries.legacy_compatibility.feature_flag import should_use_hogql_backend_in_insight_serialization from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query from posthog.kafka_client.topics import KAFKA_METRICS_TIME_TO_SEE_DATA from posthog.models import DashboardTile, Filter, Insight, User @@ -118,7 +118,7 @@ def log_insight_activity( team_id: int, user: User, was_impersonated: bool, - changes: Optional[List[Change]] = None, + changes: Optional[list[Change]] = None, ) -> None: """ Insight id and short_id are passed separately as some activities (like delete) alter the Insight instance @@ -148,7 +148,7 @@ class QuerySchemaParser(JSONParser): """ def parse(self, stream, media_type=None, parser_context=None): - data = super(QuerySchemaParser, self).parse(stream, media_type, parser_context) + data = super().parse(stream, media_type, parser_context) try: query = data.get("query", None) if query: @@ -197,7 +197,7 @@ class Meta: ] read_only_fields = ("short_id", "updated_at", "last_refresh", "refreshing") - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Any: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError() def to_representation(self, instance): @@ -306,7 +306,7 @@ class Meta: "is_cached", ) - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Insight: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Insight: request = self.context["request"] tags = validated_data.pop("tags", None) # tags are created separately as global tag relationships team_id = self.context["team_id"] @@ -345,8 +345,8 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Insight: return insight - def update(self, instance: Insight, validated_data: Dict, **kwargs) -> Insight: - dashboards_before_change: List[Union[str, Dict]] = [] + def update(self, instance: Insight, validated_data: dict, **kwargs) -> Insight: + dashboards_before_change: list[Union[str, dict]] = [] try: # since it is possible to be undeleting a soft deleted insight # the state captured before the update has to include soft deleted insights @@ -411,7 +411,7 @@ def _log_insight_update(self, before_update, dashboards_before_change, updated_i changes=changes, ) - def _synthetic_dashboard_changes(self, dashboards_before_change: List[Dict]) -> List[Change]: + def _synthetic_dashboard_changes(self, dashboards_before_change: list[dict]) -> list[Change]: artificial_dashboard_changes = self.context.get("after_dashboard_changes", []) if artificial_dashboard_changes: return [ @@ -426,7 +426,7 @@ def _synthetic_dashboard_changes(self, dashboards_before_change: List[Dict]) -> return [] - def _update_insight_dashboards(self, dashboards: List[Dashboard], instance: Insight) -> None: + def _update_insight_dashboards(self, dashboards: list[Dashboard], instance: Insight) -> None: old_dashboard_ids = [tile.dashboard_id for tile in instance.dashboard_tiles.all()] new_dashboard_ids = [d.id for d in dashboards if not d.deleted] @@ -530,8 +530,8 @@ def insight_result(self, insight: Insight) -> InsightResult: except ExposedHogQLError as e: raise ValidationError(str(e)) - if not self.context["request"].user.is_anonymous and hogql_insights_enabled( - self.context["request"].user, insight.filters.get("insight", schema.InsightType.TRENDS) + if not self.context["request"].user.is_anonymous and should_use_hogql_backend_in_insight_serialization( + self.context["request"].user ): # TRICKY: As running `filters`-based insights on the HogQL-based engine is a transitional mechanism, # we fake the insight being properly `query`-based. @@ -598,14 +598,14 @@ class InsightViewSet( parser_classes = (QuerySchemaParser,) - def get_serializer_class(self) -> Type[serializers.BaseSerializer]: + def get_serializer_class(self) -> type[serializers.BaseSerializer]: if (self.action == "list" or self.action == "retrieve") and str_to_bool( self.request.query_params.get("basic", "0") ): return InsightBasicSerializer return super().get_serializer_class() - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() context["is_shared"] = isinstance(self.request.successful_authenticator, SharingAccessTokenAuthentication) return context @@ -867,7 +867,7 @@ def trend(self, request: request.Request, *args: Any, **kwargs: Any): return Response({**result, "next": next}) @cached_by_filters - def calculate_trends(self, request: request.Request) -> Dict[str, Any]: + def calculate_trends(self, request: request.Request) -> dict[str, Any]: team = self.team filter = Filter(request=request, team=self.team) @@ -919,7 +919,7 @@ def funnel(self, request: request.Request, *args: Any, **kwargs: Any) -> Respons return Response(funnel) @cached_by_filters - def calculate_funnel(self, request: request.Request) -> Dict[str, Any]: + def calculate_funnel(self, request: request.Request) -> dict[str, Any]: team = self.team filter = Filter(request=request, data={"insight": INSIGHT_FUNNELS}, team=self.team) @@ -959,7 +959,7 @@ def retention(self, request: request.Request, *args: Any, **kwargs: Any) -> Resp return Response(result) @cached_by_filters - def calculate_retention(self, request: request.Request) -> Dict[str, Any]: + def calculate_retention(self, request: request.Request) -> dict[str, Any]: team = self.team data = {} if not request.GET.get("date_from") and not request.data.get("date_from"): @@ -989,7 +989,7 @@ def path(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: return Response(result) @cached_by_filters - def calculate_path(self, request: request.Request) -> Dict[str, Any]: + def calculate_path(self, request: request.Request) -> dict[str, Any]: team = self.team filter = PathFilter(request=request, data={"insight": INSIGHT_PATHS}, team=self.team) diff --git a/posthog/api/instance_settings.py b/posthog/api/instance_settings.py index dc0b41e5cb1da..13c1461ba5655 100644 --- a/posthog/api/instance_settings.py +++ b/posthog/api/instance_settings.py @@ -1,5 +1,5 @@ import re -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Optional, Union from rest_framework import exceptions, mixins, permissions, serializers, viewsets @@ -50,7 +50,7 @@ def __init__(self, **kwargs): setattr(self, field, kwargs.get(field, None)) -def get_instance_setting(key: str, setting_config: Optional[Tuple] = None) -> InstanceSettingHelper: +def get_instance_setting(key: str, setting_config: Optional[tuple] = None) -> InstanceSettingHelper: setting_config = setting_config or CONSTANCE_CONFIG[key] is_secret = key in SECRET_SETTINGS value = get_instance_setting_raw(key) @@ -73,7 +73,7 @@ class InstanceSettingsSerializer(serializers.Serializer): editable = serializers.BooleanField(read_only=True) is_secret = serializers.BooleanField(read_only=True) - def update(self, instance: InstanceSettingHelper, validated_data: Dict[str, Any]) -> InstanceSettingHelper: + def update(self, instance: InstanceSettingHelper, validated_data: dict[str, Any]) -> InstanceSettingHelper: if instance.key not in SETTINGS_ALLOWING_API_OVERRIDE: raise serializers.ValidationError("This setting cannot be updated from the API.", code="no_api_override") diff --git a/posthog/api/instance_status.py b/posthog/api/instance_status.py index c0dff3a3e4a1c..1e001b74703be 100644 --- a/posthog/api/instance_status.py +++ b/posthog/api/instance_status.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Union +from typing import Any, Union from django.conf import settings from django.db import connection @@ -40,7 +40,7 @@ def list(self, request: Request) -> Response: redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() - metrics: List[Dict[str, Union[str, bool, int, float, Dict[str, Any]]]] = [] + metrics: list[dict[str, Union[str, bool, int, float, dict[str, Any]]]] = [] metrics.append( {"key": "posthog_git_sha", "metric": "PostHog Git SHA", "value": get_git_commit_short() or "unknown"} diff --git a/posthog/api/mixins.py b/posthog/api/mixins.py index 69b83d3469e01..a326eb3d1d2cd 100644 --- a/posthog/api/mixins.py +++ b/posthog/api/mixins.py @@ -1,4 +1,4 @@ -from typing import TypeVar, Type +from typing import TypeVar from pydantic import BaseModel, ValidationError @@ -9,7 +9,7 @@ class PydanticModelMixin: - def get_model(self, data: dict, model: Type[T]) -> T: + def get_model(self, data: dict, model: type[T]) -> T: try: return model.model_validate(data) except ValidationError as exc: diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 5910af4948c38..4125b79dd6551 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Any, Type +from typing import Optional, Any from django.db.models import Q import structlog from django.db import transaction @@ -58,7 +58,7 @@ def log_notebook_activity( team_id: int, user: User, was_impersonated: bool, - changes: Optional[List[Change]] = None, + changes: Optional[list[Change]] = None, ) -> None: short_id = str(notebook.short_id) @@ -118,7 +118,7 @@ class Meta: "last_modified_by", ] - def create(self, validated_data: Dict, *args, **kwargs) -> Notebook: + def create(self, validated_data: dict, *args, **kwargs) -> Notebook: request = self.context["request"] team = self.context["get_team"]() @@ -141,7 +141,7 @@ def create(self, validated_data: Dict, *args, **kwargs) -> Notebook: return notebook - def update(self, instance: Notebook, validated_data: Dict, **kwargs) -> Notebook: + def update(self, instance: Notebook, validated_data: dict, **kwargs) -> Notebook: try: before_update = Notebook.objects.get(pk=instance.id) except Notebook.DoesNotExist: @@ -240,7 +240,7 @@ class NotebookViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.Model filterset_fields = ["short_id"] lookup_field = "short_id" - def get_serializer_class(self) -> Type[BaseSerializer]: + def get_serializer_class(self) -> type[BaseSerializer]: return NotebookMinimalSerializer if self.action == "list" else NotebookSerializer def get_queryset(self) -> QuerySet: @@ -298,8 +298,8 @@ def _filter_request(self, request: Request, queryset: QuerySet) -> QuerySet: if target: # the JSONB query requires a specific structure - basic_structure = List[Dict[str, Any]] - nested_structure = basic_structure | List[Dict[str, basic_structure]] + basic_structure = list[dict[str, Any]] + nested_structure = basic_structure | list[dict[str, basic_structure]] presence_match_structure: basic_structure | nested_structure = [{"type": f"ph-{target}"}] diff --git a/posthog/api/organization.py b/posthog/api/organization.py index ea1a9f31615b1..f528d5413190a 100644 --- a/posthog/api/organization.py +++ b/posthog/api/organization.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Optional, Union, cast from django.db.models import Model, QuerySet from django.shortcuts import get_object_or_404 @@ -108,7 +108,7 @@ class Meta: }, # slug is not required here as it's generated automatically for new organizations } - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Organization: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Organization: serializers.raise_errors_on_nested_writes("create", self, validated_data) user = self.context["request"].user organization, _, _ = Organization.objects.bootstrap(user, **validated_data) @@ -119,11 +119,11 @@ def get_membership_level(self, organization: Organization) -> Optional[Organizat membership = self.user_permissions.organization_memberships.get(organization.pk) return membership.level if membership is not None else None - def get_teams(self, instance: Organization) -> List[Dict[str, Any]]: + def get_teams(self, instance: Organization) -> list[dict[str, Any]]: visible_teams = instance.teams.filter(id__in=self.user_permissions.team_ids_visible_for_user) return TeamBasicSerializer(visible_teams, context=self.context, many=True).data # type: ignore - def get_metadata(self, instance: Organization) -> Dict[str, Union[str, int, object]]: + def get_metadata(self, instance: Organization) -> dict[str, Union[str, int, object]]: return { "instance_tag": settings.INSTANCE_TAG, } @@ -210,7 +210,7 @@ def perform_destroy(self, organization: Organization): ignore_conflicts=True, ) - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: return { **super().get_serializer_context(), "user_permissions": UserPermissions(cast(User, self.request.user)), diff --git a/posthog/api/organization_domain.py b/posthog/api/organization_domain.py index b3a4ada0b4e06..81b8c8efad8b7 100644 --- a/posthog/api/organization_domain.py +++ b/posthog/api/organization_domain.py @@ -1,5 +1,5 @@ import re -from typing import Any, Dict, cast +from typing import Any, cast from rest_framework import exceptions, request, response, serializers from rest_framework.decorators import action @@ -38,7 +38,7 @@ class Meta: "has_saml": {"read_only": True}, } - def create(self, validated_data: Dict[str, Any]) -> OrganizationDomain: + def create(self, validated_data: dict[str, Any]) -> OrganizationDomain: validated_data["organization"] = self.context["view"].organization validated_data.pop( "jit_provisioning_enabled", None @@ -56,7 +56,7 @@ def validate_domain(self, domain: str) -> str: raise serializers.ValidationError("Please enter a valid domain or subdomain name.") return domain - def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]: + def validate(self, attrs: dict[str, Any]) -> dict[str, Any]: instance = cast(OrganizationDomain, self.instance) if instance and not instance.verified_at: diff --git a/posthog/api/organization_feature_flag.py b/posthog/api/organization_feature_flag.py index 0ed25ada28eef..d2468cb07ce12 100644 --- a/posthog/api/organization_feature_flag.py +++ b/posthog/api/organization_feature_flag.py @@ -1,4 +1,3 @@ -from typing import Dict from django.core.exceptions import ObjectDoesNotExist from rest_framework.response import Response from rest_framework.decorators import action @@ -95,13 +94,13 @@ def copy_flags(self, request, *args, **kwargs): continue # get all linked cohorts, sorted by creation order - seen_cohorts_cache: Dict[int, CohortOrEmpty] = {} + seen_cohorts_cache: dict[int, CohortOrEmpty] = {} sorted_cohort_ids = flag_to_copy.get_cohort_ids( seen_cohorts_cache=seen_cohorts_cache, sort_by_topological_order=True ) # destination cohort id is different from original cohort id - create mapping - name_to_dest_cohort_id: Dict[str, int] = {} + name_to_dest_cohort_id: dict[str, int] = {} # create cohorts in the destination project if len(sorted_cohort_ids): for cohort_id in sorted_cohort_ids: diff --git a/posthog/api/organization_invite.py b/posthog/api/organization_invite.py index 6a8140479a950..961f2cddba27d 100644 --- a/posthog/api/organization_invite.py +++ b/posthog/api/organization_invite.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, cast +from typing import Any, cast from rest_framework import ( exceptions, @@ -49,7 +49,7 @@ def validate_target_email(self, email: str): local_part, domain = email.split("@") return f"{local_part}@{domain.lower()}" - def create(self, validated_data: Dict[str, Any], *args: Any, **kwargs: Any) -> OrganizationInvite: + def create(self, validated_data: dict[str, Any], *args: Any, **kwargs: Any) -> OrganizationInvite: if OrganizationMembership.objects.filter( organization_id=self.context["organization_id"], user__email=validated_data["target_email"], diff --git a/posthog/api/person.py b/posthog/api/person.py index 942f07e9a9ef8..bedec209168d9 100644 --- a/posthog/api/person.py +++ b/posthog/api/person.py @@ -1,18 +1,17 @@ import json import posthoganalytics +from posthog.models.person.missing_person import MissingPerson from posthog.renderers import SafeJSONRenderer from datetime import datetime -from typing import ( +from typing import ( # noqa: UP035 Any, - Callable, - Dict, List, Optional, - Tuple, - Type, TypeVar, + Union, cast, ) +from collections.abc import Callable from django.db.models import Prefetch from django.shortcuts import get_object_or_404 @@ -176,10 +175,20 @@ def get_name(self, person: Person) -> str: team = self.context["get_team"]() return get_person_name(team, person) - def to_representation(self, instance: Person) -> Dict[str, Any]: - representation = super().to_representation(instance) - representation["distinct_ids"] = sorted(representation["distinct_ids"], key=is_anonymous_id) - return representation + def to_representation(self, instance: Union[Person, MissingPerson]) -> dict[str, Any]: + if isinstance(instance, Person): + representation = super().to_representation(instance) + representation["distinct_ids"] = sorted(representation["distinct_ids"], key=is_anonymous_id) + return representation + elif isinstance(instance, MissingPerson): + return { + "id": None, + "name": None, + "distinct_ids": [instance.distinct_id], + "properties": instance.properties, + "created_at": None, + "uuid": instance.uuid, + } # person distinct ids can grow to be a very large list @@ -192,7 +201,7 @@ def get_distinct_ids(self, person): def get_funnel_actor_class(filter: Filter) -> Callable: - funnel_actor_class: Type[ActorBaseQuery] + funnel_actor_class: type[ActorBaseQuery] if filter.correlation_person_entity and EE_AVAILABLE: if EE_AVAILABLE: @@ -678,7 +687,7 @@ def _set_properties(self, properties, user): ) # PRAGMA: Methods for getting Persons via clickhouse queries - def _respond_with_cached_results(self, results_package: Dict[str, Tuple[List, Optional[str], Optional[str], int]]): + def _respond_with_cached_results(self, results_package: dict[str, tuple[List, Optional[str], Optional[str], int]]): # noqa: UP006 if not results_package: return response.Response(data=[]) @@ -705,7 +714,7 @@ def funnel(self, request: request.Request, **kwargs) -> response.Response: @cached_by_filters def calculate_funnel_persons( self, request: request.Request - ) -> Dict[str, Tuple[List, Optional[str], Optional[str], int]]: + ) -> dict[str, tuple[List, Optional[str], Optional[str], int]]: # noqa: UP006 filter = Filter(request=request, data={"insight": INSIGHT_FUNNELS}, team=self.team) filter = prepare_actor_query_filter(filter) funnel_actor_class = get_funnel_actor_class(filter) @@ -734,7 +743,7 @@ def path(self, request: request.Request, **kwargs) -> response.Response: @cached_by_filters def calculate_path_persons( self, request: request.Request - ) -> Dict[str, Tuple[List, Optional[str], Optional[str], int]]: + ) -> dict[str, tuple[List, Optional[str], Optional[str], int]]: # noqa: UP006 filter = PathFilter(request=request, data={"insight": INSIGHT_PATHS}, team=self.team) filter = prepare_actor_query_filter(filter) @@ -769,7 +778,7 @@ def trends(self, request: request.Request, *args: Any, **kwargs: Any) -> Respons @cached_by_filters def calculate_trends_persons( self, request: request.Request - ) -> Dict[str, Tuple[List, Optional[str], Optional[str], int]]: + ) -> dict[str, tuple[List, Optional[str], Optional[str], int]]: # noqa: UP006 filter = Filter(request=request, team=self.team) filter = prepare_actor_query_filter(filter) entity = get_target_entity(filter) diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index 2a6e00f325451..7a4dea1a8d7a9 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -2,7 +2,7 @@ import os import re import subprocess -from typing import Any, Dict, List, Optional, Set, cast, Literal +from typing import Any, Optional, cast, Literal import requests from dateutil.relativedelta import relativedelta @@ -64,8 +64,8 @@ def _update_plugin_attachments(request: request.Request, plugin_config: PluginCo def get_plugin_config_changes( - old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=None -) -> List[Change]: + old_config: dict[str, Any], new_config: dict[str, Any], secret_fields=None +) -> list[Change]: if secret_fields is None: secret_fields = [] config_changes = dict_changes_between("Plugin", old_config, new_config) @@ -103,8 +103,8 @@ def log_enabled_change_activity( def log_config_update_activity( new_plugin_config: PluginConfig, - old_config: Dict[str, Any], - secret_fields: Set[str], + old_config: dict[str, Any], + secret_fields: set[str], old_enabled: bool, user: User, was_impersonated: bool, @@ -280,7 +280,7 @@ def get_latest_tag(self, plugin: Plugin) -> Optional[str]: def get_organization_name(self, plugin: Plugin) -> str: return plugin.organization.name - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Plugin: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Plugin: validated_data["url"] = self.initial_data.get("url", None) validated_data["organization_id"] = self.context["organization_id"] validated_data["updated_at"] = now() @@ -291,7 +291,7 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Plugin: return plugin - def update(self, plugin: Plugin, validated_data: Dict, *args: Any, **kwargs: Any) -> Plugin: # type: ignore + def update(self, plugin: Plugin, validated_data: dict, *args: Any, **kwargs: Any) -> Plugin: # type: ignore context_organization = self.context["get_organization"]() if ( "is_global" in validated_data @@ -387,7 +387,7 @@ def check_for_updates(self, request: request.Request, **kwargs): @action(methods=["GET"], detail=True) def source(self, request: request.Request, **kwargs): plugin = self.get_plugin_with_permissions(reason="source editing") - response: Dict[str, str] = {} + response: dict[str, str] = {} for source in PluginSourceFile.objects.filter(plugin=plugin): response[source.filename] = source.source return Response(response) @@ -395,7 +395,7 @@ def source(self, request: request.Request, **kwargs): @action(methods=["PATCH"], detail=True) def update_source(self, request: request.Request, **kwargs): plugin = self.get_plugin_with_permissions(reason="source editing") - sources: Dict[str, PluginSourceFile] = {} + sources: dict[str, PluginSourceFile] = {} performed_changes = False for plugin_source_file in PluginSourceFile.objects.filter(plugin=plugin): sources[plugin_source_file.filename] = plugin_source_file @@ -438,7 +438,7 @@ def update_source(self, request: request.Request, **kwargs): sources[key].error = error sources[key].save() - response: Dict[str, str] = {} + response: dict[str, str] = {} for _, source in sources.items(): response[source.filename] = source.source @@ -476,7 +476,7 @@ def upgrade(self, request: request.Request, **kwargs): Plugin.PluginType.SOURCE, Plugin.PluginType.LOCAL, ): - validated_data: Dict[str, Any] = {} + validated_data: dict[str, Any] = {} plugin_json = update_validated_data_from_url(validated_data, plugin.url) with transaction.atomic(): serializer.update(plugin, validated_data) @@ -647,7 +647,7 @@ def get_error(self, plugin_config: PluginConfig) -> None: # error details instead. return None - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> PluginConfig: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> PluginConfig: if not can_configure_plugins(self.context["get_organization"]()): raise ValidationError("Plugin configuration is not available for the current organization!") validated_data["team_id"] = self.context["team_id"] @@ -682,7 +682,7 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> PluginConfi def update( # type: ignore self, plugin_config: PluginConfig, - validated_data: Dict, + validated_data: dict, *args: Any, **kwargs: Any, ) -> PluginConfig: @@ -731,7 +731,7 @@ def get_queryset(self): queryset = queryset.filter(deleted=False) return queryset.order_by("order", "plugin_id") - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() if context["view"].action in ("retrieve", "list"): context["delivery_rates_1d"] = TeamPluginsDeliveryRateQuery(self.team).run() @@ -856,7 +856,7 @@ def frontend(self, request: request.Request, **kwargs): content = plugin_source.transpiled or "" return HttpResponse(content, content_type="application/javascript; charset=UTF-8") - obj: Dict[str, Any] = {} + obj: dict[str, Any] = {} if not plugin_source: obj = {"no_frontend": True} elif plugin_source.status is None or plugin_source.status == PluginSourceFile.Status.LOCKED: @@ -868,7 +868,7 @@ def frontend(self, request: request.Request, **kwargs): return HttpResponse(content, content_type="application/javascript; charset=UTF-8") -def _get_secret_fields_for_plugin(plugin: Plugin) -> Set[str]: +def _get_secret_fields_for_plugin(plugin: Plugin) -> set[str]: # A set of keys for config fields that have secret = true secret_fields = {field["key"] for field in plugin.config_schema if isinstance(field, dict) and field.get("secret")} return secret_fields diff --git a/posthog/api/property_definition.py b/posthog/api/property_definition.py index 584644f902b33..7db63497d5a9d 100644 --- a/posthog/api/property_definition.py +++ b/posthog/api/property_definition.py @@ -1,6 +1,6 @@ import dataclasses import json -from typing import Any, Dict, List, Optional, Type, cast +from typing import Any, Optional, cast from django.db import connection from django.db.models import Prefetch @@ -35,7 +35,7 @@ class PropertyDefinitionQuerySerializer(serializers.Serializer): ) type = serializers.ChoiceField( - choices=["event", "person", "group"], + choices=["event", "person", "group", "session"], help_text="What property definitions to return", default="event", ) @@ -125,7 +125,7 @@ class QueryContext: posthog_eventproperty_table_join_alias = "check_for_matching_event_property" - params: Dict = dataclasses.field(default_factory=dict) + params: dict = dataclasses.field(default_factory=dict) def with_properties_to_filter(self, properties_to_filter: Optional[str]) -> "QueryContext": if properties_to_filter: @@ -192,6 +192,16 @@ def with_type_filter(self, type: str, group_type_index: Optional[int]): "group_type_index": group_type_index, }, ) + elif type == "session": + return dataclasses.replace( + self, + should_join_event_property=False, + params={ + **self.params, + "type": PropertyDefinition.Type.SESSION, + "group_type_index": -1, + }, + ) def with_event_property_filter( self, event_names: Optional[str], filter_by_event_names: Optional[bool] @@ -219,7 +229,7 @@ def with_event_property_filter( params={**self.params, "event_names": list(map(str, event_names or []))}, ) - def with_search(self, search_query: str, search_kwargs: Dict) -> "QueryContext": + def with_search(self, search_query: str, search_kwargs: dict) -> "QueryContext": return dataclasses.replace( self, search_query=search_query, @@ -443,7 +453,7 @@ def get_count(self, queryset) -> int: return self.count - def paginate_queryset(self, queryset, request, view=None) -> Optional[List[Any]]: + def paginate_queryset(self, queryset, request, view=None) -> Optional[list[Any]]: """ Assumes the queryset has already had pagination applied """ @@ -570,7 +580,7 @@ def get_queryset(self): return queryset.raw(query_context.as_sql(order_by_verified), params=query_context.params) - def get_serializer_class(self) -> Type[serializers.ModelSerializer]: + def get_serializer_class(self) -> type[serializers.ModelSerializer]: serializer_class = self.serializer_class if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): try: diff --git a/posthog/api/query.py b/posthog/api/query.py index 2712b8e6619f0..197fe79f18e1f 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -79,7 +79,9 @@ def create(self, request, *args, **kwargs) -> Response: result = process_query_model( self.team, data.query, - execution_mode=ExecutionMode.CALCULATION_REQUESTED if data.refresh else ExecutionMode.CACHE_ONLY, + execution_mode=ExecutionMode.CALCULATION_ALWAYS + if data.refresh + else ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE, ) return Response(result) except (ExposedHogQLError, ExposedCHQueryError) as e: diff --git a/posthog/api/routing.py b/posthog/api/routing.py index b768538c05d50..02654051a3f12 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -1,5 +1,5 @@ from functools import cached_property, lru_cache -from typing import TYPE_CHECKING, Any, Dict, Optional, cast +from typing import TYPE_CHECKING, Any, Optional, cast from rest_framework.exceptions import AuthenticationFailed, NotFound, ValidationError from rest_framework.permissions import IsAuthenticated @@ -50,7 +50,7 @@ class TeamAndOrgViewSetMixin(_GenericViewSet): # Rewrite filter queries, so that for example foreign keys can be accessed # Example: {"team_id": "foo__team_id"} will make the viewset filtered by obj.foo.team_id instead of obj.team_id - filter_rewrite_rules: Dict[str, str] = {} + filter_rewrite_rules: dict[str, str] = {} authentication_classes = [] permission_classes = [] @@ -170,7 +170,7 @@ def filter_queryset_by_parents_lookups(self, queryset): return queryset @cached_property - def parents_query_dict(self) -> Dict[str, Any]: + def parents_query_dict(self) -> dict[str, Any]: # used to override the last visited project if there's a token in the request team_from_request = self._get_team_from_request() @@ -213,7 +213,7 @@ def parents_query_dict(self) -> Dict[str, Any]: result[query_lookup] = query_value return result - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: serializer_context = super().get_serializer_context() if hasattr(super(), "get_serializer_context") else {} serializer_context.update(self.parents_query_dict) # The below are lambdas for lazy evaluation (i.e. we only query Postgres for team/org if actually needed) diff --git a/posthog/api/scheduled_change.py b/posthog/api/scheduled_change.py index 5d1878ebfe4fb..2100f6b7bdc7d 100644 --- a/posthog/api/scheduled_change.py +++ b/posthog/api/scheduled_change.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from rest_framework import ( serializers, viewsets, @@ -29,7 +29,7 @@ class Meta: ] read_only_fields = ["id", "created_at", "created_by", "updated_at"] - def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> ScheduledChange: + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> ScheduledChange: request = self.context["request"] validated_data["created_by"] = request.user validated_data["team_id"] = self.context["team_id"] diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py index 2387791e99d98..20029c00bd589 100644 --- a/posthog/api/services/query.py +++ b/posthog/api/services/query.py @@ -11,7 +11,7 @@ from posthog.hogql.autocomplete import get_hogql_autocomplete from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.modifiers import create_default_modifiers_for_team -from posthog.hogql_queries.query_runner import ExecutionMode, get_query_runner +from posthog.hogql_queries.query_runner import CacheMissResponse, ExecutionMode, get_query_runner from posthog.models import Team from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions @@ -41,7 +41,7 @@ logger = structlog.get_logger(__name__) -QUERY_WITH_RUNNER = ( +QUERY_WITH_RUNNER_USING_CACHE = ( TrendsQuery | FunnelsQuery | RetentionQuery @@ -61,7 +61,7 @@ def process_query( query_json: dict, *, limit_context: Optional[LimitContext] = None, - execution_mode: ExecutionMode = ExecutionMode.CALCULATION_ONLY_IF_STALE, + execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE, ) -> dict: model = QuerySchemaRoot.model_validate(query_json) tag_queries(query=query_json) @@ -78,45 +78,52 @@ def process_query_model( query: BaseModel, # mypy has problems with unions and isinstance *, limit_context: Optional[LimitContext] = None, - execution_mode: ExecutionMode = ExecutionMode.CALCULATION_ONLY_IF_STALE, + execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE, ) -> dict: result: dict | BaseModel - if isinstance(query, QUERY_WITH_RUNNER): # type: ignore - query_runner = get_query_runner(query, team, limit_context=limit_context) - result = query_runner.run(execution_mode=execution_mode) - elif isinstance(query, QUERY_WITH_RUNNER_NO_CACHE): # type: ignore - query_runner = get_query_runner(query, team, limit_context=limit_context) - result = query_runner.calculate() - elif isinstance(query, HogQLAutocomplete): - result = get_hogql_autocomplete(query=query, team=team) - elif isinstance(query, HogQLMetadata): - metadata_query = HogQLMetadata.model_validate(query) - metadata_response = get_hogql_metadata(query=metadata_query, team=team) - result = metadata_response - elif isinstance(query, DatabaseSchemaQuery): - database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) - context = HogQLContext(team_id=team.pk, team=team, database=database) - result = serialize_database(context) - elif isinstance(query, TimeToSeeDataSessionsQuery): - sessions_query_serializer = SessionsQuerySerializer(data=query) - sessions_query_serializer.is_valid(raise_exception=True) - result = {"results": get_sessions(sessions_query_serializer).data} - elif isinstance(query, TimeToSeeDataQuery): - serializer = SessionEventsQuerySerializer( - data={ - "team_id": team.pk, - "session_start": query.sessionStart, - "session_end": query.sessionEnd, - "session_id": query.sessionId, - } - ) - serializer.is_valid(raise_exception=True) - result = get_session_events(serializer) or {} - elif hasattr(query, "source") and isinstance(query.source, BaseModel): - result = process_query_model(team, query.source) + if execution_mode == ExecutionMode.CACHE_ONLY_NEVER_CALCULATE and not isinstance( + query, + QUERY_WITH_RUNNER_USING_CACHE, # type: ignore + ): + result = CacheMissResponse(cache_key=None) else: - raise ValidationError(f"Unsupported query kind: {query.__class__.__name__}") + if isinstance(query, QUERY_WITH_RUNNER_USING_CACHE): # type: ignore + query_runner = get_query_runner(query, team, limit_context=limit_context) + result = query_runner.run(execution_mode=execution_mode) + elif isinstance(query, QUERY_WITH_RUNNER_NO_CACHE): # type: ignore + # TODO: These queries should be using the QueryRunner caching layer too + query_runner = get_query_runner(query, team, limit_context=limit_context) + result = query_runner.calculate() + elif isinstance(query, HogQLAutocomplete): + result = get_hogql_autocomplete(query=query, team=team) + elif isinstance(query, HogQLMetadata): + metadata_query = HogQLMetadata.model_validate(query) + metadata_response = get_hogql_metadata(query=metadata_query, team=team) + result = metadata_response + elif isinstance(query, DatabaseSchemaQuery): + database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) + context = HogQLContext(team_id=team.pk, team=team, database=database) + result = serialize_database(context) + elif isinstance(query, TimeToSeeDataSessionsQuery): + sessions_query_serializer = SessionsQuerySerializer(data=query) + sessions_query_serializer.is_valid(raise_exception=True) + result = {"results": get_sessions(sessions_query_serializer).data} + elif isinstance(query, TimeToSeeDataQuery): + serializer = SessionEventsQuerySerializer( + data={ + "team_id": team.pk, + "session_start": query.sessionStart, + "session_end": query.sessionEnd, + "session_id": query.sessionId, + } + ) + serializer.is_valid(raise_exception=True) + result = get_session_events(serializer) or {} + elif hasattr(query, "source") and isinstance(query.source, BaseModel): + result = process_query_model(team, query.source) + else: + raise ValidationError(f"Unsupported query kind: {query.__class__.__name__}") if isinstance(result, BaseModel): return result.model_dump() diff --git a/posthog/api/session.py b/posthog/api/session.py new file mode 100644 index 0000000000000..b4c79600d1999 --- /dev/null +++ b/posthog/api/session.py @@ -0,0 +1,56 @@ +import json + +from rest_framework import request, response, viewsets +from rest_framework.decorators import action +from rest_framework.exceptions import ValidationError + +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.hogql.database.schema.sessions import get_lazy_session_table_properties, get_lazy_session_table_values +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) +from posthog.utils import convert_property_value, flatten + + +class SessionViewSet( + TeamAndOrgViewSetMixin, + viewsets.ViewSet, +): + scope_object = "query" + throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] + + @action(methods=["GET"], detail=False) + def values(self, request: request.Request, **kwargs) -> response.Response: + team = self.team + + key = request.GET.get("key") + search_term = request.GET.get("value") + + if not key: + raise ValidationError(detail=f"Key not provided") + + result = get_lazy_session_table_values(key, search_term=search_term, team=team) + + flattened = [] + for value in result: + try: + # Try loading as json for dicts or arrays + flattened.append(json.loads(value[0])) + except json.decoder.JSONDecodeError: + flattened.append(value[0]) + return response.Response([{"name": convert_property_value(value)} for value in flatten(flattened)]) + + @action(methods=["GET"], detail=False) + def property_definitions(self, request: request.Request, **kwargs) -> response.Response: + search = request.GET.get("search") + + # unlike e.g. event properties, there's a very limited number of session properties, + # so we can just return them all + results = get_lazy_session_table_properties(search) + return response.Response( + { + "count": len(results), + "results": results, + } + ) diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py index c7ab40fb0f89d..3d4a2d693749c 100644 --- a/posthog/api/sharing.py +++ b/posthog/api/sharing.py @@ -1,6 +1,6 @@ import json from datetime import timedelta -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast from urllib.parse import urlparse, urlunparse from django.core.serializers.json import DjangoJSONEncoder @@ -87,7 +87,7 @@ class SharingConfigurationViewSet(TeamAndOrgViewSetMixin, mixins.ListModelMixin, def get_serializer_context( self, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: context = super().get_serializer_context() dashboard_id = context.get("dashboard_id") @@ -113,7 +113,7 @@ def get_serializer_context( return context - def _get_sharing_configuration(self, context: Dict[str, Any]): + def _get_sharing_configuration(self, context: dict[str, Any]): """ Gets but does not create a SharingConfiguration. Only once enabled do we actually store it """ @@ -247,7 +247,7 @@ def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Any: "user_permissions": UserPermissions(cast(User, request.user), resource.team), "is_shared": True, } - exported_data: Dict[str, Any] = {"type": "embed" if embedded else "scene"} + exported_data: dict[str, Any] = {"type": "embed" if embedded else "scene"} if isinstance(resource, SharingConfiguration) and request.path.endswith(f".png"): exported_data["accessToken"] = resource.access_token diff --git a/posthog/api/signup.py b/posthog/api/signup.py index c31f37b891eb3..8385dc7759798 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Union, cast +from typing import Any, Optional, Union, cast from urllib.parse import urlencode import structlog @@ -71,7 +71,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.is_social_signup = False - def get_fields(self) -> Dict[str, serializers.Field]: + def get_fields(self) -> dict[str, serializers.Field]: fields = super().get_fields() if settings.DEMO: # There's no password in the demo env @@ -156,7 +156,7 @@ def enter_demo(self, validated_data) -> User: def create_team(self, organization: Organization, user: User) -> Team: return Team.objects.create_with_data(user=user, organization=organization) - def to_representation(self, instance) -> Dict: + def to_representation(self, instance) -> dict: data = UserBasicSerializer(instance=instance).data data["redirect_url"] = get_redirect_url(data["uuid"], data["is_email_verified"]) return data @@ -185,7 +185,7 @@ def to_representation(self, instance): data["redirect_url"] = get_redirect_url(data["uuid"], data["is_email_verified"]) return data - def validate(self, data: Dict[str, Any]) -> Dict[str, Any]: + def validate(self, data: dict[str, Any]) -> dict[str, Any]: if "request" not in self.context or not self.context["request"].user.is_authenticated: # If there's no authenticated user and we're creating a new one, attributes are required. @@ -469,7 +469,7 @@ def social_create_user( return {"is_new": False} backend_processor = "social_create_user" - email = details["email"][0] if isinstance(details["email"], (list, tuple)) else details["email"] + email = details["email"][0] if isinstance(details["email"], list | tuple) else details["email"] full_name = ( details.get("fullname") or f"{details.get('first_name') or ''} {details.get('last_name') or ''}".strip() diff --git a/posthog/api/survey.py b/posthog/api/survey.py index cb991a5f95abe..3ffce982b8981 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -1,5 +1,4 @@ from contextlib import contextmanager -from typing import Type from django.http import JsonResponse @@ -271,7 +270,7 @@ class SurveyViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): scope_object = "survey" queryset = Survey.objects.select_related("linked_flag", "targeting_flag").all() - def get_serializer_class(self) -> Type[serializers.Serializer]: + def get_serializer_class(self) -> type[serializers.Serializer]: if self.request.method == "POST" or self.request.method == "PATCH": return SurveySerializerCreateUpdateOnly else: diff --git a/posthog/api/tagged_item.py b/posthog/api/tagged_item.py index 85aa08323a04c..d73275523b639 100644 --- a/posthog/api/tagged_item.py +++ b/posthog/api/tagged_item.py @@ -50,7 +50,7 @@ def _attempt_set_tags(self, tags, obj, force_create=False): obj.prefetched_tags = tagged_item_objects def to_representation(self, obj): - ret = super(TaggedItemSerializerMixin, self).to_representation(obj) + ret = super().to_representation(obj) ret["tags"] = [] if self._is_licensed(): if hasattr(obj, "prefetched_tags"): @@ -61,12 +61,12 @@ def to_representation(self, obj): def create(self, validated_data): validated_data.pop("tags", None) - instance = super(TaggedItemSerializerMixin, self).create(validated_data) + instance = super().create(validated_data) self._attempt_set_tags(self.initial_data.get("tags"), instance) return instance def update(self, instance, validated_data): - instance = super(TaggedItemSerializerMixin, self).update(instance, validated_data) + instance = super().update(instance, validated_data) self._attempt_set_tags(self.initial_data.get("tags"), instance) return instance @@ -96,7 +96,7 @@ def prefetch_tagged_items_if_available(self, queryset: QuerySet) -> QuerySet: return queryset def get_queryset(self): - queryset = super(TaggedItemViewSetMixin, self).get_queryset() + queryset = super().get_queryset() return self.prefetch_tagged_items_if_available(queryset) diff --git a/posthog/api/team.py b/posthog/api/team.py index c8b2513b6798c..bdf225924ded2 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -1,6 +1,6 @@ import json from functools import cached_property -from typing import Any, Dict, List, Optional, Type, cast +from typing import Any, Optional, cast from django.core.cache import cache from django.shortcuts import get_object_or_404 @@ -111,6 +111,7 @@ class Meta: "recording_domains", "inject_web_apps", "surveys_opt_in", + "heatmaps_opt_in", ] @@ -166,6 +167,7 @@ class Meta: "extra_settings", "has_completed_onboarding_for", "surveys_opt_in", + "heatmaps_opt_in", ) read_only_fields = ( "id", @@ -190,11 +192,11 @@ def get_has_group_types(self, team: Team) -> bool: def get_groups_on_events_querying_enabled(self, team: Team) -> bool: return groups_on_events_querying_enabled() - def validate_session_recording_linked_flag(self, value) -> Dict | None: + def validate_session_recording_linked_flag(self, value) -> dict | None: if value is None: return None - if not isinstance(value, Dict): + if not isinstance(value, dict): raise exceptions.ValidationError("Must provide a dictionary or None.") received_keys = value.keys() valid_keys = [ @@ -208,11 +210,11 @@ def validate_session_recording_linked_flag(self, value) -> Dict | None: return value - def validate_session_recording_network_payload_capture_config(self, value) -> Dict | None: + def validate_session_recording_network_payload_capture_config(self, value) -> dict | None: if value is None: return None - if not isinstance(value, Dict): + if not isinstance(value, dict): raise exceptions.ValidationError("Must provide a dictionary or None.") if not all(key in ["recordHeaders", "recordBody"] for key in value.keys()): @@ -222,11 +224,11 @@ def validate_session_recording_network_payload_capture_config(self, value) -> Di return value - def validate_session_replay_config(self, value) -> Dict | None: + def validate_session_replay_config(self, value) -> dict | None: if value is None: return None - if not isinstance(value, Dict): + if not isinstance(value, dict): raise exceptions.ValidationError("Must provide a dictionary or None.") known_keys = ["record_canvas", "ai_config"] @@ -240,9 +242,9 @@ def validate_session_replay_config(self, value) -> Dict | None: return value - def validate_session_replay_ai_summary_config(self, value: Dict | None) -> Dict | None: + def validate_session_replay_ai_summary_config(self, value: dict | None) -> dict | None: if value is not None: - if not isinstance(value, Dict): + if not isinstance(value, dict): raise exceptions.ValidationError("Must provide a dictionary or None.") allowed_keys = [ @@ -294,7 +296,7 @@ def validate(self, attrs: Any) -> Any: ) return super().validate(attrs) - def create(self, validated_data: Dict[str, Any], **kwargs) -> Team: + def create(self, validated_data: dict[str, Any], **kwargs) -> Team: serializers.raise_errors_on_nested_writes("create", self, validated_data) request = self.context["request"] organization = self.context["view"].organization # Use the org we used to validate permissions @@ -337,7 +339,7 @@ def _handle_timezone_update(self, team: Team) -> None: hashes = InsightCachingState.objects.filter(team=team).values_list("cache_key", flat=True) cache.delete_many(hashes) - def update(self, instance: Team, validated_data: Dict[str, Any]) -> Team: + def update(self, instance: Team, validated_data: dict[str, Any]) -> Team: before_update = instance.__dict__.copy() if "timezone" in validated_data and validated_data["timezone"] != instance.timezone: @@ -406,13 +408,13 @@ def get_queryset(self): visible_teams_ids = UserPermissions(cast(User, self.request.user)).team_ids_visible_for_user return super().get_queryset().filter(id__in=visible_teams_ids) - def get_serializer_class(self) -> Type[serializers.BaseSerializer]: + def get_serializer_class(self) -> type[serializers.BaseSerializer]: if self.action == "list": return TeamBasicSerializer return super().get_serializer_class() # NOTE: Team permissions are somewhat complex so we override the underlying viewset's get_permissions method - def get_permissions(self) -> List: + def get_permissions(self) -> list: """ Special permissions handling for create requests as the organization is inferred from the current user. """ diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index a92d37c6b730b..3807733141070 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -58,6 +58,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -220,6 +221,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -563,6 +565,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index af842be0643e0..65d33cbda4fa6 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -58,6 +58,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -142,6 +143,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -479,6 +481,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index b4a5bb2780673..37bb741043ef9 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -84,6 +84,8 @@ '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: could not resolve field on model with path "viewed". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: SessionRecording has no field named \'viewed\')', '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/session.py: Error [SessionViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', + '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feedback.survey.Survey" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index b60f0660121b9..ecb47575bd0f9 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -61,6 +61,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -311,6 +312,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -473,6 +475,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -628,6 +631,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index 3838fa80136f0..8f8e801bcaf33 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -27,6 +27,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -164,6 +165,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index b849de02d476e..a71e9b802c72d 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -58,6 +58,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index f3d637637aec4..26c0b9679f69d 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -467,6 +467,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -670,6 +671,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1032,6 +1034,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1169,6 +1172,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1463,6 +1467,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1554,6 +1559,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1643,6 +1649,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1703,6 +1710,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 9e1f952147f44..bce2a60ffa967 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -697,6 +697,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -750,6 +751,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -879,6 +881,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1118,6 +1121,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1267,6 +1271,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1401,6 +1406,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1514,6 +1520,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1644,6 +1651,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1732,6 +1740,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1819,6 +1828,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1879,6 +1889,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index 2bce2f52b4920..345b64664cdd9 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -107,6 +107,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -215,6 +216,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -303,6 +305,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -515,6 +518,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -644,6 +648,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -793,6 +798,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -881,6 +887,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1099,6 +1106,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1228,6 +1236,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1281,6 +1290,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1430,6 +1440,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1706,6 +1717,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index 09c2089e7e46b..bf052e9b031c7 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -69,6 +69,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_query.ambr b/posthog/api/test/__snapshots__/test_query.ambr index f176a89a53e2b..80e5aa23cde82 100644 --- a/posthog/api/test/__snapshots__/test_query.ambr +++ b/posthog/api/test/__snapshots__/test_query.ambr @@ -13,7 +13,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_event_property_filter.1 @@ -30,7 +31,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_event_property_filter.2 @@ -47,7 +49,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_event_property_filter_materialized @@ -64,7 +67,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_event_property_filter_materialized.1 @@ -81,7 +85,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_event_property_filter_materialized.2 @@ -98,7 +103,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_events_query_all_time_date @@ -111,7 +117,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_events_query_all_time_date.1 @@ -124,7 +131,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_events_query_all_time_date.2 @@ -137,7 +145,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_full_hogql_query @@ -152,7 +161,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_full_hogql_query_async @@ -192,7 +202,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_full_hogql_query_view @@ -207,7 +218,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_full_hogql_query_view.1 @@ -226,7 +238,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter @@ -243,7 +256,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter.1 @@ -260,7 +274,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter.2 @@ -277,7 +292,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter.3 @@ -294,7 +310,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter_materialized @@ -311,7 +328,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter_materialized.1 @@ -328,7 +346,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter_materialized.2 @@ -345,7 +364,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_hogql_property_filter_materialized.3 @@ -362,7 +382,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_person_property_filter @@ -397,7 +418,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_person_property_filter_materialized @@ -432,7 +454,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_property_filter_aggregations @@ -447,7 +470,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_property_filter_aggregations.1 @@ -463,7 +487,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_property_filter_aggregations_materialized @@ -478,7 +503,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_property_filter_aggregations_materialized.1 @@ -494,7 +520,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_select_event_person @@ -509,7 +536,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_select_hogql_expressions @@ -525,7 +553,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_select_hogql_expressions.1 @@ -539,7 +568,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_select_hogql_expressions.2 @@ -554,7 +584,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestQuery.test_select_hogql_expressions.3 @@ -569,6 +600,7 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 6798d297f4652..22dc514280297 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -137,6 +137,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/batch_exports/operations.py b/posthog/api/test/batch_exports/operations.py index ff45b9b5b7cbd..5ac814deab1e2 100644 --- a/posthog/api/test/batch_exports/operations.py +++ b/posthog/api/test/batch_exports/operations.py @@ -2,7 +2,7 @@ from rest_framework import status -def create_batch_export(client: TestClient, team_id: int, batch_export_data: dict): +def create_batch_export(client: TestClient, team_id: int, batch_export_data: dict | str): return client.post( f"/api/projects/{team_id}/batch_exports", batch_export_data, @@ -10,7 +10,7 @@ def create_batch_export(client: TestClient, team_id: int, batch_export_data: dic ) -def create_batch_export_ok(client: TestClient, team_id: int, batch_export_data: dict): +def create_batch_export_ok(client: TestClient, team_id: int, batch_export_data: dict | str): response = create_batch_export(client, team_id, batch_export_data) assert response.status_code == status.HTTP_201_CREATED, response.json() return response.json() diff --git a/posthog/api/test/dashboards/__init__.py b/posthog/api/test/dashboards/__init__.py index 79d1e435e64ec..ad6505b5a61a7 100644 --- a/posthog/api/test/dashboards/__init__.py +++ b/posthog/api/test/dashboards/__init__.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Literal, Optional, Tuple +from typing import Any, Literal, Optional from rest_framework import status @@ -15,7 +15,7 @@ def soft_delete( self, model_id: int, model_type: Literal["insights", "dashboards"], - extra_data: Optional[Dict] = None, + extra_data: Optional[dict] = None, expected_get_status: int = status.HTTP_404_NOT_FOUND, ) -> None: if extra_data is None: @@ -33,10 +33,10 @@ def soft_delete( def create_dashboard( self, - data: Dict[str, Any], + data: dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_201_CREATED, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id response = self.client.post(f"/api/projects/{team_id}/dashboards/", data) @@ -49,10 +49,10 @@ def create_dashboard( def update_dashboard( self, dashboard_id: int, - data: Dict[str, Any], + data: dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id response = self.client.patch(f"/api/projects/{team_id}/dashboards/{dashboard_id}", data) @@ -67,8 +67,8 @@ def get_dashboard( dashboard_id: int, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - query_params: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: + query_params: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: if team_id is None: team_id = self.team.id @@ -82,8 +82,8 @@ def list_dashboards( self, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - query_params: Optional[Dict] = None, - ) -> Dict: + query_params: Optional[dict] = None, + ) -> dict: if team_id is None: team_id = self.team.id @@ -100,8 +100,8 @@ def list_insights( self, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - query_params: Optional[Dict] = None, - ) -> Dict: + query_params: Optional[dict] = None, + ) -> dict: if team_id is None: team_id = self.team.id @@ -122,8 +122,8 @@ def get_insight( insight_id: int, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - query_params: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: + query_params: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: if team_id is None: team_id = self.team.id @@ -138,10 +138,10 @@ def get_insight( def create_insight( self, - data: Dict[str, Any], + data: dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_201_CREATED, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id @@ -160,10 +160,10 @@ def create_insight( def update_insight( self, insight_id: int, - data: Dict[str, Any], + data: dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id @@ -177,10 +177,10 @@ def create_text_tile( self, dashboard_id: int, text: str = "I AM TEXT!", - extra_data: Optional[Dict] = None, + extra_data: Optional[dict] = None, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id @@ -218,10 +218,10 @@ def get_insight_activity( def update_text_tile( self, dashboard_id: int, - tile: Dict, + tile: dict, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id @@ -271,7 +271,7 @@ def set_tile_layout(self, dashboard_id: int, expected_tiles_to_update: int) -> N def add_insight_to_dashboard( self, - dashboard_ids: List[int], + dashboard_ids: list[int], insight_id: int, expected_status: int = status.HTTP_200_OK, ): diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index 2f3da77b65e0e..b4dee42a46c6a 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -58,6 +58,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -188,6 +189,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -315,6 +317,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -390,8 +393,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") INNER JOIN "posthog_organization" ON ("posthog_team"."organization_id" = "posthog_organization"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2 + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted" AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -535,6 +538,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -708,6 +712,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -891,6 +896,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1065,6 +1071,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1336,6 +1343,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1396,6 +1404,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1558,6 +1567,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1671,6 +1681,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1731,6 +1742,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1889,6 +1901,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2018,6 +2031,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2283,6 +2297,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2528,6 +2543,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2615,8 +2631,8 @@ ''' SELECT COUNT(*) AS "__count" FROM "posthog_dashboard" - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2) + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted") ''' # --- # name: TestDashboard.test_listing_dashboards_is_not_nplus1.55 @@ -2663,6 +2679,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2738,8 +2755,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") INNER JOIN "posthog_organization" ON ("posthog_team"."organization_id" = "posthog_organization"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2) + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted") ORDER BY "posthog_dashboard"."name" ASC LIMIT 300 ''' @@ -2808,6 +2825,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2932,6 +2950,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3034,6 +3053,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3187,6 +3207,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3286,6 +3307,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3416,6 +3438,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3542,6 +3565,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3679,6 +3703,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4006,6 +4031,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4169,6 +4195,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4310,6 +4337,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4398,6 +4426,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4562,6 +4591,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4622,6 +4652,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4748,6 +4779,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4903,6 +4935,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5335,6 +5368,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5483,6 +5517,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5571,6 +5606,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5697,6 +5733,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5784,6 +5821,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5844,6 +5882,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5970,6 +6009,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -6113,6 +6153,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -6278,6 +6319,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -6696,6 +6738,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -6854,6 +6897,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -6929,8 +6973,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") INNER JOIN "posthog_organization" ON ("posthog_team"."organization_id" = "posthog_organization"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2 + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted" AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -7042,6 +7086,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -7215,6 +7260,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -7359,6 +7405,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -7451,6 +7498,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -7625,6 +7673,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -8272,6 +8321,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -8536,6 +8586,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -8700,6 +8751,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -8760,6 +8812,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -8886,6 +8939,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9041,6 +9095,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9167,6 +9222,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9305,6 +9361,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9448,6 +9505,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9762,6 +9820,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -9921,6 +9980,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10030,6 +10090,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10164,6 +10225,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10474,6 +10536,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10612,6 +10675,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10687,8 +10751,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") INNER JOIN "posthog_organization" ON ("posthog_team"."organization_id" = "posthog_organization"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2 + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted" AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -10800,6 +10864,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -10966,6 +11031,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11072,6 +11138,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11241,6 +11308,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11428,6 +11496,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11541,6 +11610,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11710,6 +11780,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11797,8 +11868,8 @@ ''' SELECT COUNT(*) AS "__count" FROM "posthog_dashboard" - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2) + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted") ''' # --- # name: TestDashboard.test_retrieve_dashboard_list.3 @@ -11856,6 +11927,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -11931,8 +12003,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") INNER JOIN "posthog_organization" ON ("posthog_team"."organization_id" = "posthog_organization"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_dashboard"."team_id" = 2) + WHERE ("posthog_dashboard"."team_id" = 2 + AND NOT "posthog_dashboard"."deleted") ORDER BY "posthog_dashboard"."name" ASC LIMIT 100 ''' @@ -12093,6 +12165,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index e4c91f45149a1..7e8f3fafc87cf 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -1,5 +1,4 @@ import json -from typing import Dict from unittest import mock from unittest.mock import ANY, MagicMock, patch @@ -21,7 +20,7 @@ from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries, FuzzyInt from posthog.utils import generate_cache_key -valid_template: Dict = { +valid_template: dict = { "template_name": "Sign up conversion template with variables", "dashboard_description": "Use this template to see how many users sign up after visiting your pricing page.", "dashboard_filters": {}, @@ -1186,7 +1185,7 @@ def test_create_from_template_json(self, mock_capture) -> None: ) def test_create_from_template_json_must_provide_at_least_one_tile(self) -> None: - template: Dict = {**valid_template, "tiles": []} + template: dict = {**valid_template, "tiles": []} response = self.client.post( f"/api/projects/{self.team.id}/dashboards/create_from_template_json", @@ -1194,8 +1193,8 @@ def test_create_from_template_json_must_provide_at_least_one_tile(self) -> None: ) assert response.status_code == 400, response.json() - def test_create_from_template_json_cam_provide_text_tile(self) -> None: - template: Dict = { + def test_create_from_template_json_can_provide_text_tile(self) -> None: + template: dict = { **valid_template, "tiles": [{"type": "TEXT", "body": "hello world", "layouts": {}}], } @@ -1225,8 +1224,8 @@ def test_create_from_template_json_cam_provide_text_tile(self) -> None: }, ] - def test_create_from_template_json_cam_provide_query_tile(self) -> None: - template: Dict = { + def test_create_from_template_json_can_provide_query_tile(self) -> None: + template: dict = { **valid_template, # client provides an incorrect "empty" filter alongside a query "tiles": [ @@ -1288,11 +1287,25 @@ def test_create_from_template_json_cam_provide_query_tile(self) -> None: "kind": "DataTableNode", "columns": ["person", "id", "created_at", "person.$delete"], "source": { + "actionId": None, + "after": None, + "before": None, + "event": None, + "filterTestAccounts": None, + "fixedProperties": None, "kind": "EventsQuery", + "limit": None, + "modifiers": None, + "offset": None, + "orderBy": None, + "personId": None, + "properties": None, + "response": None, "select": ["*"], + "where": None, }, }, - "result": [], + "result": None, "saved": False, "short_id": ANY, "tags": [], diff --git a/posthog/api/test/dashboards/test_dashboard_duplication.py b/posthog/api/test/dashboards/test_dashboard_duplication.py index dbfa572e9c014..f477f9f1e0598 100644 --- a/posthog/api/test/dashboards/test_dashboard_duplication.py +++ b/posthog/api/test/dashboards/test_dashboard_duplication.py @@ -1,5 +1,3 @@ -from typing import Dict, List - from posthog.api.test.dashboards import DashboardAPI from posthog.test.base import APIBaseTest, QueryMatchingTest @@ -85,7 +83,7 @@ def test_duplicating_dashboard_without_duplicating_tiles(self) -> None: ] @staticmethod - def _tile_child_ids_from(dashboard_json: Dict) -> List[int]: + def _tile_child_ids_from(dashboard_json: dict) -> list[int]: return [ (tile.get("insight", None) or {}).get("id", None) or (tile.get("text", None) or {}).get("id", None) for tile in dashboard_json["tiles"] diff --git a/posthog/api/test/dashboards/test_dashboard_text_tiles.py b/posthog/api/test/dashboards/test_dashboard_text_tiles.py index 34b9366da5aeb..d3f899d72d284 100644 --- a/posthog/api/test/dashboards/test_dashboard_text_tiles.py +++ b/posthog/api/test/dashboards/test_dashboard_text_tiles.py @@ -1,5 +1,5 @@ import datetime -from typing import Dict, Optional, Union +from typing import Optional, Union from unittest import mock from freezegun import freeze_time @@ -16,7 +16,7 @@ def setUp(self) -> None: self.dashboard_api = DashboardAPI(self.client, self.team, self.assertEqual) @staticmethod - def _serialised_user(user: Optional[User]) -> Optional[Dict[str, Optional[Union[int, str]]]]: + def _serialised_user(user: Optional[User]) -> Optional[dict[str, Optional[Union[int, str]]]]: if user is None: return None @@ -37,7 +37,7 @@ def _expected_text( last_modified_by: Optional[User] = None, text_id: Optional[int] = None, last_modified_at: str = "2022-04-01T12:45:00Z", - ) -> Dict: + ) -> dict: if not created_by: created_by = self.user @@ -62,7 +62,7 @@ def _expected_tile_with_text( text_id: Optional[int] = None, color: Optional[str] = None, last_modified_at: str = "2022-04-01T12:45:00Z", - ) -> Dict: + ) -> dict: if not tile_id: tile_id = mock.ANY return { @@ -82,7 +82,7 @@ def _expected_tile_with_text( } @staticmethod - def _tile_layout(lg: Optional[Dict] = None) -> Dict: + def _tile_layout(lg: Optional[dict] = None) -> dict: if lg is None: lg = {"x": "0", "y": "0", "w": "6", "h": "5"} diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index e8aca8271e9d7..6655a327edee5 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -58,6 +58,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -195,6 +196,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -419,6 +421,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -527,6 +530,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py index 2779f1a226c78..f01d8fd6bc694 100644 --- a/posthog/api/test/notebooks/test_notebook.py +++ b/posthog/api/test/notebooks/test_notebook.py @@ -1,4 +1,3 @@ -from typing import List, Dict from unittest import mock from freezegun import freeze_time @@ -11,7 +10,7 @@ class TestNotebooks(APIBaseTest, QueryMatchingTest): - def created_activity(self, item_id: str, short_id: str) -> Dict: + def created_activity(self, item_id: str, short_id: str) -> dict: return { "activity": "created", "created_at": mock.ANY, @@ -30,11 +29,11 @@ def created_activity(self, item_id: str, short_id: str) -> Dict: }, } - def assert_notebook_activity(self, expected: List[Dict]) -> None: + def assert_notebook_activity(self, expected: list[dict]) -> None: activity_response = self.client.get(f"/api/projects/{self.team.id}/notebooks/activity") assert activity_response.status_code == status.HTTP_200_OK - activity: List[Dict] = activity_response.json()["results"] + activity: list[dict] = activity_response.json()["results"] self.maxDiff = None assert activity == expected @@ -78,7 +77,7 @@ def test_cannot_list_deleted_notebook(self) -> None: ), ] ) - def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None: + def test_create_a_notebook(self, _, content: dict | None, text_content: str | None) -> None: response = self.client.post( f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content}, diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py index bbe191892d8e8..06b543deca4bc 100644 --- a/posthog/api/test/notebooks/test_notebook_filtering.py +++ b/posthog/api/test/notebooks/test_notebook_filtering.py @@ -1,4 +1,4 @@ -from typing import Dict, Any, List +from typing import Any from parameterized import parameterized from rest_framework import status @@ -59,7 +59,7 @@ class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest): - def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], title: str = "the title") -> str: + def _create_notebook_with_content(self, inner_content: list[dict[str, Any]], title: str = "the title") -> str: response = self.client.post( f"/api/projects/{self.team.id}/notebooks", data={ @@ -83,7 +83,7 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit ["random", []], ] ) - def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None: + def test_filters_based_on_title(self, search_text: str, expected_match_indexes: list[int]) -> None: notebook_ids = [ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"), self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"), @@ -108,7 +108,7 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes: ["neither", []], ] ) - def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None: + def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: list[int]) -> None: notebook_ids = [ # will match both pony and ponies self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"), diff --git a/posthog/api/test/openapi_validation.py b/posthog/api/test/openapi_validation.py index e86bf5198bb53..20d2fb1e1a603 100644 --- a/posthog/api/test/openapi_validation.py +++ b/posthog/api/test/openapi_validation.py @@ -1,7 +1,7 @@ import gzip import json from io import BytesIO -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast from urllib.parse import parse_qs import lzstring @@ -11,7 +11,7 @@ from jsonschema import validate -def validate_response(openapi_spec: Dict[str, Any], response: Any, path_override: Optional[str] = None): +def validate_response(openapi_spec: dict[str, Any], response: Any, path_override: Optional[str] = None): # Validates are response against the OpenAPI spec. If `path_override` is # provided, the path in the response will be overridden with the provided # value. This is useful for validating responses from e.g. the /batch diff --git a/posthog/api/test/test_activity_log.py b/posthog/api/test/test_activity_log.py index a7573f10cabd3..c386d30de6cfd 100644 --- a/posthog/api/test/test_activity_log.py +++ b/posthog/api/test/test_activity_log.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory, StepTickTimeFactory @@ -9,7 +9,7 @@ from posthog.test.base import APIBaseTest, QueryMatchingTest -def _feature_flag_json_payload(key: str) -> Dict: +def _feature_flag_json_payload(key: str) -> dict: return { "key": key, "name": "", @@ -103,7 +103,7 @@ def _create_and_edit_things(self): def _edit_them_all( self, - created_insights: List[int], + created_insights: list[int], flag_one: str, flag_two: str, notebook_short_id: str, @@ -269,10 +269,10 @@ def test_reading_notifications_marks_them_unread(self): def _create_insight( self, - data: Dict[str, Any], + data: dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_201_CREATED, - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: if team_id is None: team_id = self.team.id diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py index 32ae14f01edc6..dd9c01ba023b6 100644 --- a/posthog/api/test/test_app_metrics.py +++ b/posthog/api/test/test_app_metrics.py @@ -1,12 +1,18 @@ +import datetime as dt +import json from unittest import mock from freezegun.api import freeze_time from rest_framework import status +from posthog.api.test.batch_exports.conftest import start_test_worker +from posthog.api.test.batch_exports.operations import create_batch_export_ok +from posthog.batch_exports.models import BatchExportRun from posthog.models.activity_logging.activity_log import Detail, Trigger, log_activity from posthog.models.plugin import Plugin, PluginConfig from posthog.models.utils import UUIDT from posthog.queries.app_metrics.test.test_app_metrics import create_app_metric +from posthog.temporal.common.client import sync_connect from posthog.test.base import APIBaseTest, ClickhouseTestMixin SAMPLE_PAYLOAD = {"dateRange": ["2021-06-10", "2022-06-12"], "parallelism": 1} @@ -72,6 +78,153 @@ def test_retrieve(self): }, ) + def test_retrieve_batch_export_runs_app_metrics(self): + """Test batch export metrics returned by app metrics endpoint.""" + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + temporal = sync_connect() + + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) + with start_test_worker(temporal): + response = create_batch_export_ok( + self.client, + self.team.pk, + json.dumps(batch_export_data), + ) + + batch_export_id = response["id"] + for days_ago in range(0, 7): + last_updated_at = now - dt.timedelta(days=days_ago) + + with freeze_time(last_updated_at): + # Since 'last_updated_at' uses 'auto_now', passing the argument is ignored. + # We have to re-freeze time to get each run created on a single date. + BatchExportRun.objects.create( + batch_export_id=batch_export_id, + data_interval_end=last_updated_at, + data_interval_start=last_updated_at - dt.timedelta(hours=1), + status=BatchExportRun.Status.COMPLETED, + records_completed=3, + records_total_count=3, + ) + + BatchExportRun.objects.create( + batch_export_id=batch_export_id, + data_interval_end=last_updated_at - dt.timedelta(hours=2), + data_interval_start=last_updated_at - dt.timedelta(hours=3), + status=BatchExportRun.Status.FAILED, + records_completed=0, + records_total_count=5, + ) + + response = self.client.get(f"/api/projects/@current/app_metrics/{batch_export_id}?date_from=-7d") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), + { + "metrics": { + "dates": [ + "2021-11-29", + "2021-11-30", + "2021-12-01", + "2021-12-02", + "2021-12-03", + "2021-12-04", + "2021-12-05", + ], + "successes": [3, 3, 3, 3, 3, 3, 3], + "successes_on_retry": [0, 0, 0, 0, 0, 0, 0], + "failures": [5, 5, 5, 5, 5, 5, 5], + "totals": {"successes": 21, "successes_on_retry": 0, "failures": 35}, + }, + "errors": None, + }, + ) + + def test_retrieve_batch_export_runs_app_metrics_defaults_to_zero(self): + """Test batch export metrics returned by app metrics endpoint.""" + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + temporal = sync_connect() + now = dt.datetime(2021, 12, 5, 13, 23, 0, tzinfo=dt.timezone.utc) + + with start_test_worker(temporal): + response = create_batch_export_ok( + self.client, + self.team.pk, + json.dumps(batch_export_data), + ) + batch_export_id = response["id"] + + for days_ago in range(0, 7): + last_updated_at = now - dt.timedelta(days=days_ago) + + with freeze_time(last_updated_at): + # Since 'last_updated_at' uses 'auto_now', passing the argument is ignored. + # We have to re-freeze time to get each run created on a single date. + BatchExportRun.objects.create( + batch_export_id=batch_export_id, + data_interval_end=last_updated_at, + data_interval_start=last_updated_at - dt.timedelta(hours=1), + status=BatchExportRun.Status.COMPLETED, + records_completed=1, + records_total_count=1, + ) + + response = self.client.get(f"/api/projects/@current/app_metrics/{batch_export_id}?date_from=-7d") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual( + response.json(), + { + "metrics": { + "dates": [ + "2021-11-29", + "2021-11-30", + "2021-12-01", + "2021-12-02", + "2021-12-03", + "2021-12-04", + "2021-12-05", + ], + "successes": [1, 1, 1, 1, 1, 1, 1], + "successes_on_retry": [0, 0, 0, 0, 0, 0, 0], + "failures": [0, 0, 0, 0, 0, 0, 0], + "totals": {"successes": 7, "successes_on_retry": 0, "failures": 0}, + }, + "errors": None, + }, + ) + def test_list_historical_exports(self): self._create_activity_log( activity="job_triggered", diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index f771aca99b39d..1beb4e9724b39 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -25,7 +25,7 @@ from prance import ResolvingParser from rest_framework import status from token_bucket import Limiter, MemoryStorage -from typing import Any, Dict, List, Union, cast +from typing import Any, Union, cast from unittest.mock import ANY, MagicMock, call, patch from urllib.parse import quote @@ -60,7 +60,7 @@ def mocked_get_ingest_context_from_token(_: Any) -> None: url=str(pathlib.Path(__file__).parent / "../../../openapi/capture.yaml"), strict=True, ) -openapi_spec = cast(Dict[str, Any], parser.specification) +openapi_spec = cast(dict[str, Any], parser.specification) large_data_array = [ {"key": "".join(random.choice(string.ascii_letters) for _ in range(512 * 1024))} @@ -162,7 +162,7 @@ def setUp(self): # it is really important to know that /capture is CSRF exempt. Enforce checking in the client self.client = Client(enforce_csrf_checks=True) - def _to_json(self, data: Union[Dict, List]) -> str: + def _to_json(self, data: Union[dict, list]) -> str: return json.dumps(data) def _dict_to_b64(self, data: dict) -> str: @@ -188,7 +188,7 @@ def _to_arguments(self, patch_process_event_with_plugins: Any) -> dict: def _send_original_version_session_recording_event( self, number_of_events: int = 1, - event_data: Dict | None = None, + event_data: dict | None = None, snapshot_source=3, snapshot_type=1, session_id="abc123", @@ -229,7 +229,7 @@ def _send_original_version_session_recording_event( def _send_august_2023_version_session_recording_event( self, number_of_events: int = 1, - event_data: Dict | List[Dict] | None = None, + event_data: dict | list[dict] | None = None, session_id="abc123", window_id="def456", distinct_id="ghi789", @@ -241,7 +241,7 @@ def _send_august_2023_version_session_recording_event( # event_data is an array of RRWeb events event_data = [{"type": 3, "data": {"source": 1}}, {"type": 3, "data": {"source": 2}}] - if isinstance(event_data, Dict): + if isinstance(event_data, dict): event_data = [event_data] event = { @@ -260,7 +260,7 @@ def _send_august_2023_version_session_recording_event( "distinct_id": distinct_id, } - post_data: List[Dict[str, Any]] | Dict[str, Any] + post_data: list[dict[str, Any]] | dict[str, Any] if content_type == "application/json": post_data = [{**event, "api_key": self.team.api_token} for _ in range(number_of_events)] @@ -1254,7 +1254,7 @@ def test_js_library_underscore_sent_at(self, kafka_produce): } self.client.get( - "/e/?_=%s&data=%s" % (int(tomorrow_sent_at.timestamp()), quote(self._to_json(data))), + "/e/?_={}&data={}".format(int(tomorrow_sent_at.timestamp()), quote(self._to_json(data))), content_type="application/json", HTTP_ORIGIN="https://localhost", ) @@ -1283,7 +1283,7 @@ def test_long_distinct_id(self, kafka_produce): } self.client.get( - "/e/?_=%s&data=%s" % (int(tomorrow_sent_at.timestamp()), quote(self._to_json(data))), + "/e/?_={}&data={}".format(int(tomorrow_sent_at.timestamp()), quote(self._to_json(data))), content_type="application/json", HTTP_ORIGIN="https://localhost", ) @@ -1526,7 +1526,7 @@ def test_handle_invalid_snapshot(self): ), ] ) - def test_cors_allows_tracing_headers(self, _: str, path: str, headers: List[str]) -> None: + def test_cors_allows_tracing_headers(self, _: str, path: str, headers: list[str]) -> None: expected_headers = ",".join(["X-Requested-With", "Content-Type", *headers]) presented_headers = ",".join([*headers, "someotherrandomheader"]) response = self.client.options( diff --git a/posthog/api/test/test_cohort.py b/posthog/api/test/test_cohort.py index 0b1971f8f2cbb..4e1a3da2d526d 100644 --- a/posthog/api/test/test_cohort.py +++ b/posthog/api/test/test_cohort.py @@ -1,6 +1,6 @@ import json from datetime import datetime, timedelta -from typing import Any, Dict, List +from typing import Any from unittest.mock import patch from django.core.files.uploadedfile import SimpleUploadedFile @@ -1493,11 +1493,11 @@ def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, pat self.assertEqual(async_deletion.delete_verified_at is not None, True) -def create_cohort(client: Client, team_id: int, name: str, groups: List[Dict[str, Any]]): +def create_cohort(client: Client, team_id: int, name: str, groups: list[dict[str, Any]]): return client.post(f"/api/projects/{team_id}/cohorts", {"name": name, "groups": json.dumps(groups)}) -def create_cohort_ok(client: Client, team_id: int, name: str, groups: List[Dict[str, Any]]): +def create_cohort_ok(client: Client, team_id: int, name: str, groups: list[dict[str, Any]]): response = create_cohort(client=client, team_id=team_id, name=name, groups=groups) assert response.status_code == 201, response.content return response.json() diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index e89fb0b3c1270..c4c4b9d96358c 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -456,6 +456,16 @@ def test_user_autocapture_opt_out(self, *args): response = self._post_decide().json() self.assertEqual(response["autocapture_opt_out"], True) + def test_user_heatmaps_opt_in(self, *args): + # :TRICKY: Test for regression around caching + response = self._post_decide().json() + self.assertEqual(response["heatmaps"], False) + + self._update_team({"heatmaps_opt_in": True}) + + response = self._post_decide().json() + self.assertEqual(response["heatmaps"], True) + def test_user_session_recording_allowed_when_no_permitted_domains_are_set(self, *args): self._update_team({"session_recording_opt_in": True, "recording_domains": []}) @@ -3457,9 +3467,11 @@ def test_decide_doesnt_error_out_when_database_is_down_and_database_check_isnt_c # remove database check cache values postgres_healthcheck.cache_clear() - with connection.execute_wrapper(QueryTimeoutWrapper()), snapshot_postgres_queries_context( - self - ), self.assertNumQueries(1): + with ( + connection.execute_wrapper(QueryTimeoutWrapper()), + snapshot_postgres_queries_context(self), + self.assertNumQueries(1), + ): response = self._post_decide(api_version=3, origin="https://random.example.com").json() response = self._post_decide(api_version=3, origin="https://random.example.com").json() response = self._post_decide(api_version=3, origin="https://random.example.com").json() @@ -3607,8 +3619,10 @@ def test_healthcheck_uses_read_replica(self): self.organization, self.team, self.user = org, team, user # this create fills up team cache^ - with freeze_time("2021-01-01T00:00:00Z"), self.assertNumQueries(1, using="replica"), self.assertNumQueries( - 1, using="default" + with ( + freeze_time("2021-01-01T00:00:00Z"), + self.assertNumQueries(1, using="replica"), + self.assertNumQueries(1, using="default"), ): response = self._post_decide() # Replica queries: @@ -4031,9 +4045,11 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): # now main database is down, but does not affect replica - with connections["default"].execute_wrapper(QueryTimeoutWrapper()), self.assertNumQueries( - 13, using="replica" - ), self.assertNumQueries(0, using="default"): + with ( + connections["default"].execute_wrapper(QueryTimeoutWrapper()), + self.assertNumQueries(13, using="replica"), + self.assertNumQueries(0, using="default"), + ): # Replica queries: # E 1. SET LOCAL statement_timeout = 300 # E 2. WITH some CTEs, diff --git a/posthog/api/test/test_element.py b/posthog/api/test/test_element.py index 72a97ea2b9b43..25cd01df35398 100644 --- a/posthog/api/test/test_element.py +++ b/posthog/api/test/test_element.py @@ -1,6 +1,5 @@ import json from datetime import timedelta -from typing import Dict, List from django.test import override_settings from freezegun import freeze_time @@ -17,7 +16,7 @@ snapshot_postgres_queries, ) -expected_autocapture_data_response_results: List[Dict] = [ +expected_autocapture_data_response_results: list[dict] = [ { "count": 3, "hash": None, @@ -78,7 +77,7 @@ }, ] -expected_rage_click_data_response_results: List[Dict] = [ +expected_rage_click_data_response_results: list[dict] = [ { "count": 1, "hash": None, diff --git a/posthog/api/test/test_event_definition.py b/posthog/api/test/test_event_definition.py index aa2a2c05a2428..c530708886b70 100644 --- a/posthog/api/test/test_event_definition.py +++ b/posthog/api/test/test_event_definition.py @@ -1,6 +1,6 @@ import dataclasses from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any, Optional from unittest.mock import ANY, patch from uuid import uuid4 @@ -20,7 +20,7 @@ class TestEventDefinitionAPI(APIBaseTest): demo_team: Team = None # type: ignore - EXPECTED_EVENT_DEFINITIONS: List[Dict[str, Any]] = [ + EXPECTED_EVENT_DEFINITIONS: list[dict[str, Any]] = [ {"name": "installed_app"}, {"name": "rated_app"}, {"name": "purchase"}, @@ -54,7 +54,7 @@ def test_list_event_definitions(self): self.assertEqual(len(response.json()["results"]), len(self.EXPECTED_EVENT_DEFINITIONS)) for item in self.EXPECTED_EVENT_DEFINITIONS: - response_item: Dict[str, Any] = next( + response_item: dict[str, Any] = next( (_i for _i in response.json()["results"] if _i["name"] == item["name"]), {}, ) @@ -199,7 +199,7 @@ class EventData: team_id: int distinct_id: str timestamp: datetime - properties: Dict[str, Any] + properties: dict[str, Any] def capture_event(event: EventData): @@ -222,7 +222,7 @@ def capture_event(event: EventData): ) -def create_event_definitions(event_definition: Dict, team_id: int) -> EventDefinition: +def create_event_definitions(event_definition: dict, team_id: int) -> EventDefinition: """ Create event definition for a team. """ diff --git a/posthog/api/test/test_exports.py b/posthog/api/test/test_exports.py index eead54e9055de..5e80486620693 100644 --- a/posthog/api/test/test_exports.py +++ b/posthog/api/test/test_exports.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Optional from unittest.mock import patch from datetime import datetime, timedelta import celery @@ -435,10 +435,10 @@ def _get_insight_activity(self, insight_id: int, expected_status: int = status.H self.assertEqual(activity.status_code, expected_status) return activity.json() - def _assert_logs_the_activity(self, insight_id: int, expected: List[Dict]) -> None: + def _assert_logs_the_activity(self, insight_id: int, expected: list[dict]) -> None: activity_response = self._get_insight_activity(insight_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None self.assertEqual(activity, expected) @@ -463,7 +463,7 @@ def test_can_list_exports(self) -> None: class TestExportMixin(APIBaseTest): - def _get_export_output(self, path: str) -> List[str]: + def _get_export_output(self, path: str) -> list[str]: """ Use this function to test the CSV output of exports in other tests """ diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 770883a191490..4c353b98124df 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Dict, List, Optional +from typing import Optional from unittest.mock import call, patch from django.core.cache import cache @@ -83,6 +83,166 @@ def test_cant_create_flag_with_duplicate_key(self): ) self.assertEqual(FeatureFlag.objects.count(), count) + def test_cant_create_flag_with_invalid_filters(self): + count = FeatureFlag.objects.count() + + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": ["@posthog.com"], + "operator": "icontains", + } + ], + } + ] + }, + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_value", + "detail": "Invalid value for operator icontains: ['@posthog.com']", + "attr": "filters", + }, + ) + + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": ["@posthog.com"], + "operator": "regex", + } + ], + } + ] + }, + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_value", + "detail": "Invalid value for operator regex: ['@posthog.com']", + "attr": "filters", + }, + ) + + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": ["@posthog.com"], + "operator": "not_icontains", + } + ], + } + ] + }, + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_value", + "detail": "Invalid value for operator not_icontains: ['@posthog.com']", + "attr": "filters", + }, + ) + + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": ["@posthog.com"], + "operator": "not_regex", + } + ], + } + ] + }, + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_value", + "detail": "Invalid value for operator not_regex: ['@posthog.com']", + "attr": "filters", + }, + ) + self.assertEqual(FeatureFlag.objects.count(), count) + + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": '["@posthog.com"]', # fine as long as a string + "operator": "not_regex", + } + ], + } + ] + }, + }, + ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + def test_cant_update_flag_with_duplicate_key(self): another_feature_flag = FeatureFlag.objects.create( team=self.team, @@ -3497,10 +3657,10 @@ def _get_feature_flag_activity( self.assertEqual(activity.status_code, expected_status) return activity.json() - def assert_feature_flag_activity(self, flag_id: Optional[int], expected: List[Dict]): + def assert_feature_flag_activity(self, flag_id: Optional[int], expected: list[dict]): activity_response = self._get_feature_flag_activity(flag_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None assert activity == expected @@ -3738,7 +3898,7 @@ def test_feature_flag_dashboard(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_json = response.json() - self.assertEquals(len(response_json["analytics_dashboards"]), 1) + self.assertEqual(len(response_json["analytics_dashboards"]), 1) # check deleting the dashboard doesn't delete flag, but deletes the relationship dashboard.delete() @@ -3768,7 +3928,7 @@ def test_feature_flag_dashboard_patch(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_json = response.json() - self.assertEquals(len(response_json["analytics_dashboards"]), 1) + self.assertEqual(len(response_json["analytics_dashboards"]), 1) def test_feature_flag_dashboard_already_exists(self): another_feature_flag = FeatureFlag.objects.create( @@ -3794,7 +3954,7 @@ def test_feature_flag_dashboard_already_exists(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_json = response.json() - self.assertEquals(len(response_json["analytics_dashboards"]), 1) + self.assertEqual(len(response_json["analytics_dashboards"]), 1) @freeze_time("2021-01-01") @snapshot_clickhouse_queries @@ -3828,8 +3988,11 @@ def test_creating_static_cohort(self): ) flush_persons_and_events() - with snapshot_postgres_queries_context(self), self.settings( - CELERY_TASK_ALWAYS_EAGER=True, PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False + with ( + snapshot_postgres_queries_context(self), + self.settings( + CELERY_TASK_ALWAYS_EAGER=True, PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False + ), ): response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/{flag.id}/create_static_cohort_for_flag", @@ -5168,9 +5331,13 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): self.assertFalse(errors) # now db is slow and times out - with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", - 500, + with ( + snapshot_postgres_queries_context(self), + connection.execute_wrapper(slow_query), + patch( + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, + ), ): mock_postgres_check.return_value = False all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id") @@ -5263,10 +5430,15 @@ def test_feature_flags_v3_with_skip_database_setting(self, mock_postgres_check): self.assertTrue(errors) # db is slow and times out, but shouldn't matter to us - with self.assertNumQueries(0), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", - 500, - ), self.settings(DECIDE_SKIP_POSTGRES_FLAGS=True): + with ( + self.assertNumQueries(0), + connection.execute_wrapper(slow_query), + patch( + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, + ), + self.settings(DECIDE_SKIP_POSTGRES_FLAGS=True), + ): mock_postgres_check.return_value = False all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id") @@ -5376,10 +5548,15 @@ def test_feature_flags_v3_with_slow_db_doesnt_try_to_compute_conditions_again(se self.assertFalse(errors) # now db is slow and times out - with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", - 500, - ), self.assertNumQueries(4): + with ( + snapshot_postgres_queries_context(self), + connection.execute_wrapper(slow_query), + patch( + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, + ), + self.assertNumQueries(4), + ): # no extra queries to get person properties for the second flag after first one failed all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id") @@ -5467,9 +5644,13 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, self.assertFalse(errors) # now db is slow - with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", - 500, + with ( + snapshot_postgres_queries_context(self), + connection.execute_wrapper(slow_query), + patch( + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, + ), ): with self.assertNumQueries(4): all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id", groups={"organization": "org:1"}) @@ -5577,9 +5758,13 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ self.assertFalse(errors) # db is slow and times out - with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", - 500, + with ( + snapshot_postgres_queries_context(self), + connection.execute_wrapper(slow_query), + patch( + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, + ), ): all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id", hash_key_override="random") diff --git a/posthog/api/test/test_feature_flag_utils.py b/posthog/api/test/test_feature_flag_utils.py index 53369794dfe4f..c13bf04b6708a 100644 --- a/posthog/api/test/test_feature_flag_utils.py +++ b/posthog/api/test/test_feature_flag_utils.py @@ -1,4 +1,3 @@ -from typing import Set from posthog.models.cohort.cohort import CohortOrEmpty from posthog.test.base import ( APIBaseTest, @@ -68,7 +67,7 @@ def create_cohort(name): self.assertEqual(topologically_sorted_cohort_ids, destination_creation_order) def test_empty_cohorts_set(self): - cohort_ids: Set[int] = set() + cohort_ids: set[int] = set() seen_cohorts_cache: dict[int, CohortOrEmpty] = {} topologically_sorted_cohort_ids = sort_cohorts_topologically(cohort_ids, seen_cohorts_cache) self.assertEqual(topologically_sorted_cohort_ids, []) diff --git a/posthog/api/test/test_ingestion_warnings.py b/posthog/api/test/test_ingestion_warnings.py index bdf3996955909..05e893babfa3e 100644 --- a/posthog/api/test/test_ingestion_warnings.py +++ b/posthog/api/test/test_ingestion_warnings.py @@ -1,5 +1,4 @@ import json -from typing import Dict from freezegun.api import freeze_time from rest_framework import status @@ -13,7 +12,7 @@ from posthog.utils import cast_timestamp_or_now -def create_ingestion_warning(team_id: int, type: str, details: Dict, timestamp: str, source=""): +def create_ingestion_warning(team_id: int, type: str, details: dict, timestamp: str, source=""): timestamp = cast_timestamp_or_now(timestamp) data = { "team_id": team_id, diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index f707f0330b7fb..07404b07db16f 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -1,6 +1,6 @@ import json from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional +from typing import Any, Optional from unittest import mock from unittest.case import skip from unittest.mock import patch @@ -34,6 +34,7 @@ DateRange, EventPropertyFilter, EventsNode, + EventsQuery, HogQLFilters, HogQLQuery, TrendsQuery, @@ -342,7 +343,7 @@ def test_basic_results(self) -> None: @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) @snapshot_postgres_queries def test_listing_insights_does_not_nplus1(self) -> None: - query_counts: List[int] = [] + query_counts: list[int] = [] queries = [] for i in range(5): @@ -1159,9 +1160,9 @@ def test_insight_refreshing_query(self, spy_execute_hogql_query) -> None: series=[ EventsNode( event="$pageview", - properties=[EventPropertyFilter(key="another", value="never_return_this", operator="is_not")], ) - ] + ], + properties=[EventPropertyFilter(key="another", value="never_return_this", operator="is_not")], ).model_dump() with freeze_time("2012-01-15T04:01:34.000Z"): @@ -1245,12 +1246,13 @@ def test_insight_refreshing_query(self, spy_execute_hogql_query) -> None: #  Test property filter - dashboard = Dashboard.objects.get(pk=dashboard_id) - dashboard.filters = { - "properties": [{"key": "prop", "value": "val"}], - "date_from": "-14d", - } - dashboard.save() + Dashboard.objects.update( + id=dashboard_id, + filters={ + "properties": [{"key": "prop", "value": "val"}], + "date_from": "-14d", + }, + ) with freeze_time("2012-01-16T05:01:34.000Z"): response = self.client.get( f"/api/projects/{self.team.id}/insights/{insight_id}/?refresh=true&from_dashboard={dashboard_id}" @@ -1278,7 +1280,7 @@ def test_insight_refreshing_query(self, spy_execute_hogql_query) -> None: ], ) - def test_dashboard_filters_applied_to_data_table_node(self): + def test_dashboard_filters_applied_to_sql_data_table_node(self): dashboard_id, _ = self.dashboard_api.create_dashboard( {"name": "the dashboard", "filters": {"date_from": "-180d"}} ) @@ -1328,6 +1330,29 @@ def test_dashboard_filters_applied_to_data_visualization_node(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["query"]["source"]["filters"]["dateRange"]["date_from"], "-180d") + def test_dashboard_filters_applied_to_events_query_data_table_node(self): + dashboard_id, _ = self.dashboard_api.create_dashboard( + {"name": "the dashboard", "filters": {"date_from": "-180d"}} + ) + query = DataTableNode( + source=EventsQuery(select=["uuid", "event", "timestamp"], after="-3d").model_dump(), + ).model_dump() + insight_id, _ = self.dashboard_api.create_insight( + {"query": query, "name": "insight", "dashboards": [dashboard_id]} + ) + + response = self.client.get(f"/api/projects/{self.team.id}/insights/{insight_id}/") + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json()["query"], query) + + response = self.client.get( + f"/api/projects/{self.team.id}/insights/{insight_id}/?refresh=true&from_dashboard={dashboard_id}" + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json()["query"]["source"]["after"], "-180d") + # BASIC TESTING OF ENDPOINTS. /queries as in depth testing for each insight def test_insight_trends_basic(self) -> None: @@ -2035,7 +2060,7 @@ def test_insight_trends_allowed_if_project_open_and_org_member(self) -> None: ) self.assertEqual(response.status_code, status.HTTP_200_OK) - def _create_one_person_cohort(self, properties: List[Dict[str, Any]]) -> int: + def _create_one_person_cohort(self, properties: list[dict[str, Any]]) -> int: Person.objects.create(team=self.team, properties=properties) cohort_one_id = self.client.post( f"/api/projects/{self.team.id}/cohorts", @@ -2402,7 +2427,7 @@ def test_soft_delete_can_be_reversed_by_patch(self) -> None: # assert that undeletes end up in the activity log activity_response = self.dashboard_api.get_insight_activity(insight_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] # we will have three logged activities (in reverse order) undelete, delete, create assert [a["activity"] for a in activity] == ["updated", "updated", "created"] undelete_change_log = activity[0]["detail"]["changes"][0] @@ -2454,10 +2479,10 @@ def _get_insight_with_client_query_id(self, client_query_id: str) -> None: query_params = f"?events={json.dumps([{'id': '$pageview', }])}&client_query_id={client_query_id}" self.client.get(f"/api/projects/{self.team.id}/insights/trend/{query_params}").json() - def assert_insight_activity(self, insight_id: Optional[int], expected: List[Dict]): + def assert_insight_activity(self, insight_id: Optional[int], expected: list[dict]): activity_response = self.dashboard_api.get_insight_activity(insight_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None assert activity == expected diff --git a/posthog/api/test/test_insight_funnels.py b/posthog/api/test/test_insight_funnels.py index b02ebfec558da..3b4c1403a58ac 100644 --- a/posthog/api/test/test_insight_funnels.py +++ b/posthog/api/test/test_insight_funnels.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, List, Union +from typing import Any, Union from django.test.client import Client from rest_framework import status @@ -1004,7 +1004,7 @@ def test_multi_property_breakdown(self): self.assertEqual(["Chrome", "95"], result[1][1]["breakdown_value"]) @staticmethod - def as_result(breakdown_properties: Union[str, List[str]]) -> Dict[str, Any]: + def as_result(breakdown_properties: Union[str, list[str]]) -> dict[str, Any]: return { "action_id": "$pageview", "name": "$pageview", diff --git a/posthog/api/test/test_insight_query.py b/posthog/api/test/test_insight_query.py index 6279999bbefcb..19044cd937bfd 100644 --- a/posthog/api/test/test_insight_query.py +++ b/posthog/api/test/test_insight_query.py @@ -1,5 +1,3 @@ -from typing import List - from rest_framework import status from ee.api.test.base import LicensedTestMixin @@ -213,7 +211,7 @@ def test_listing_insights_by_default_does_not_include_those_with_only_queries(se }, ) - created_insights: List[Insight] = list(Insight.objects.all()) + created_insights: list[Insight] = list(Insight.objects.all()) assert len(created_insights) == 2 listed_insights = self.dashboard_api.list_insights(query_params={"include_query_insights": False}) @@ -236,7 +234,7 @@ def test_can_list_insights_including_those_with_only_queries(self) -> None: }, ) - created_insights: List[Insight] = list(Insight.objects.all()) + created_insights: list[Insight] = list(Insight.objects.all()) assert len(created_insights) == 2 listed_insights = self.dashboard_api.list_insights(query_params={"include_query_insights": True}) diff --git a/posthog/api/test/test_kafka_inspector.py b/posthog/api/test/test_kafka_inspector.py index 6a42741a47ff1..b9a02d0464e14 100644 --- a/posthog/api/test/test_kafka_inspector.py +++ b/posthog/api/test/test_kafka_inspector.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List, Union +from typing import Union from unittest.mock import patch from rest_framework import status @@ -14,7 +14,7 @@ def setUp(self): self.user.is_staff = True self.user.save() - def _to_json(self, data: Union[Dict, List]) -> str: + def _to_json(self, data: Union[dict, list]) -> str: return json.dumps(data) @patch( diff --git a/posthog/api/test/test_organization_feature_flag.py b/posthog/api/test/test_organization_feature_flag.py index 41960032ca8b7..f1ad4ba26fb06 100644 --- a/posthog/api/test/test_organization_feature_flag.py +++ b/posthog/api/test/test_organization_feature_flag.py @@ -11,7 +11,7 @@ from posthog.models.early_access_feature import EarlyAccessFeature from posthog.api.dashboards.dashboard import Dashboard from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries -from typing import Any, Dict +from typing import Any class TestOrganizationFeatureFlagGet(APIBaseTest, QueryMatchingTest): @@ -382,7 +382,7 @@ def test_copy_feature_flag_update_override_deleted(self): def test_copy_feature_flag_missing_fields(self): url = f"/api/organizations/{self.organization.id}/feature_flags/copy_flags" - data: Dict[str, Any] = {} + data: dict[str, Any] = {} response = self.client.post(url, data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/posthog/api/test/test_person.py b/posthog/api/test/test_person.py index 815f38c472978..a97e9d25de095 100644 --- a/posthog/api/test/test_person.py +++ b/posthog/api/test/test_person.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List, Optional, cast +from typing import Optional, cast from unittest import mock from unittest.mock import patch, Mock @@ -982,10 +982,10 @@ def _get_person_activity( self.assertEqual(activity.status_code, expected_status) return activity.json() - def _assert_person_activity(self, person_id: Optional[str], expected: List[Dict]): + def _assert_person_activity(self, person_id: Optional[str], expected: list[dict]): activity_response = self._get_person_activity(person_id) - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None self.assertCountEqual(activity, expected) diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py index 16e0fc4c0d1d0..06642b460980f 100644 --- a/posthog/api/test/test_plugin.py +++ b/posthog/api/test/test_plugin.py @@ -1,7 +1,7 @@ import base64 import json from datetime import datetime -from typing import Dict, List, cast +from typing import cast from unittest import mock from unittest.mock import ANY, patch @@ -52,10 +52,10 @@ def _get_plugin_activity(self, expected_status: int = status.HTTP_200_OK): self.assertEqual(activity.status_code, expected_status) return activity.json() - def assert_plugin_activity(self, expected: List[Dict]): + def assert_plugin_activity(self, expected: list[dict]): activity_response = self._get_plugin_activity() - activity: List[Dict] = activity_response["results"] + activity: list[dict] = activity_response["results"] self.maxDiff = None self.assertEqual(activity, expected) @@ -586,7 +586,7 @@ def test_create_plugin_version_range_eq_next_minor(self, mock_get, mock_reload): ) self.assertEqual(response.status_code, 400) self.assertEqual( - cast(Dict[str, str], response.json())["detail"], + cast(dict[str, str], response.json())["detail"], f'Currently running PostHog version {FROZEN_POSTHOG_VERSION} does not match this plugin\'s semantic version requirement "{FROZEN_POSTHOG_VERSION.next_minor()}".', ) @@ -608,7 +608,7 @@ def test_create_plugin_version_range_gt_next_major(self, mock_get, mock_reload): ) self.assertEqual(response.status_code, 400) self.assertEqual( - cast(Dict[str, str], response.json())["detail"], + cast(dict[str, str], response.json())["detail"], f'Currently running PostHog version {FROZEN_POSTHOG_VERSION} does not match this plugin\'s semantic version requirement ">= {FROZEN_POSTHOG_VERSION.next_major()}".', ) @@ -620,7 +620,7 @@ def test_create_plugin_version_range_lt_current(self, mock_get, mock_reload): ) self.assertEqual(response.status_code, 400) self.assertEqual( - cast(Dict[str, str], response.json())["detail"], + cast(dict[str, str], response.json())["detail"], f'Currently running PostHog version {FROZEN_POSTHOG_VERSION} does not match this plugin\'s semantic version requirement "< {FROZEN_POSTHOG_VERSION}".', ) @@ -642,7 +642,7 @@ def test_create_plugin_version_range_lt_invalid(self, mock_get, mock_reload): ) self.assertEqual(response.status_code, 400) self.assertEqual( - cast(Dict[str, str], response.json())["detail"], + cast(dict[str, str], response.json())["detail"], 'Invalid PostHog semantic version requirement "< ..."!', ) diff --git a/posthog/api/test/test_properties_timeline.py b/posthog/api/test/test_properties_timeline.py index 5243151c27e09..d8b8a11e9099a 100644 --- a/posthog/api/test/test_properties_timeline.py +++ b/posthog/api/test/test_properties_timeline.py @@ -1,7 +1,7 @@ import json import random import uuid -from typing import Any, Dict, Literal, Optional +from typing import Any, Literal, Optional from freezegun.api import freeze_time from rest_framework import status @@ -52,7 +52,7 @@ def _create_actor(self, properties: dict) -> str: return group.group_key def _create_event(self, event: str, timestamp: str, actor_properties: dict): - create_event_kwargs: Dict[str, Any] = {} + create_event_kwargs: dict[str, Any] = {} if actor_type == "person": create_event_kwargs["person_id"] = main_actor_id create_event_kwargs["person_properties"] = actor_properties diff --git a/posthog/api/test/test_property_definition.py b/posthog/api/test/test_property_definition.py index 77dca5e833076..378f66d7884a5 100644 --- a/posthog/api/test/test_property_definition.py +++ b/posthog/api/test/test_property_definition.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List, Optional, Union +from typing import Optional, Union from unittest.mock import ANY, patch from rest_framework import status @@ -17,7 +17,7 @@ class TestPropertyDefinitionAPI(APIBaseTest): - EXPECTED_PROPERTY_DEFINITIONS: List[Dict[str, Union[str, Optional[int], bool]]] = [ + EXPECTED_PROPERTY_DEFINITIONS: list[dict[str, Union[str, Optional[int], bool]]] = [ {"name": "$browser", "is_numerical": False}, {"name": "$current_url", "is_numerical": False}, {"name": "$lib", "is_numerical": False}, @@ -69,7 +69,7 @@ def test_list_property_definitions(self): self.assertEqual(len(response.json()["results"]), len(self.EXPECTED_PROPERTY_DEFINITIONS)) for item in self.EXPECTED_PROPERTY_DEFINITIONS: - response_item: Dict = next( + response_item: dict = next( (_i for _i in response.json()["results"] if _i["name"] == item["name"]), {}, ) diff --git a/posthog/api/test/test_session.py b/posthog/api/test/test_session.py new file mode 100644 index 0000000000000..46fcafabd7c13 --- /dev/null +++ b/posthog/api/test/test_session.py @@ -0,0 +1,137 @@ +import uuid + +from rest_framework import status + +from posthog.models.event.util import create_event +from posthog.test.base import APIBaseTest + + +class TestSessionsAPI(APIBaseTest): + def setUp(self) -> None: + super().setUp() + + create_event( + team=self.team, + event="$pageview", + distinct_id="d1", + properties={"$session_id": "s1", "utm_source": "google"}, + event_uuid=(uuid.uuid4()), + ) + create_event( + team=self.team, + event="$pageview", + distinct_id="d1", + properties={"$session_id": "s1", "utm_source": "youtube"}, + event_uuid=(uuid.uuid4()), + ) + + def test_expected_session_properties(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/property_definitions/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_properties = {entry["name"] for entry in response.json()["results"]} + expected_properties = { + "$autocapture_count", + "$channel_type", + "$end_timestamp", + "$entry_url", + "$exit_url", + "$initial_gad_source", + "$initial_gclid", + "$initial_referring_domain", + "$initial_utm_campaign", + "$initial_utm_content", + "$initial_utm_medium", + "$initial_utm_source", + "$initial_utm_term", + "$pageview_count", + "$session_duration", + "$start_timestamp", + } + assert actual_properties == expected_properties + + def test_search_session_properties(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/property_definitions/?search=utm") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_properties = {entry["name"] for entry in response.json()["results"]} + expected_properties = { + "$initial_utm_campaign", + "$initial_utm_content", + "$initial_utm_medium", + "$initial_utm_source", + "$initial_utm_term", + } + assert actual_properties == expected_properties + + def test_empty_search_session_properties(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/property_definitions/?search=doesnotexist") + self.assertEqual(response.status_code, status.HTTP_200_OK) + assert len(response.json()["results"]) == 0 + + def test_list_channel_type_values(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/values/?key=$channel_type") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_values = {entry["name"] for entry in response.json()} + expected_values = { + "Affiliate", + "Audio", + "Cross Network", + "Direct", + "Email", + "Organic Search", + "Organic Shopping", + "Organic Video", + "Other", + "Paid Other", + "Paid Search", + "Paid Shopping", + "Paid Video", + "Push", + "Referral", + "SMS", + } + assert actual_values == expected_values + + def test_search_channel_type_values(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/values/?key=$channel_type&value=paid") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_values = {entry["name"] for entry in response.json()} + expected_values = { + "Paid Other", + "Paid Search", + "Paid Shopping", + "Paid Video", + } + assert actual_values == expected_values + + def test_list_session_property_values(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/values/?key=$initial_utm_source") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_values = {entry["name"] for entry in response.json()} + expected_values = { + "google", + "youtube", + } + assert actual_values == expected_values + + def test_search_session_property_values(self): + response = self.client.get(f"/api/projects/{self.team.pk}/sessions/values/?key=$initial_utm_source&value=tub") + self.assertEqual(response.status_code, status.HTTP_200_OK) + actual_values = {entry["name"] for entry in response.json()} + expected_values = { + "youtube", + } + assert actual_values == expected_values + + def test_search_session_property_no_matching_values(self): + response = self.client.get( + f"/api/projects/{self.team.pk}/sessions/values/?key=$initial_utm_source&value=doesnotexist" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + assert len(response.json()) == 0 + + def test_search_missing_session_property_values(self): + response = self.client.get( + f"/api/projects/{self.team.pk}/sessions/values/?key=$initial_utm_source&value=doesnotexist" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + assert len(response.json()) == 0 diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index 1587c0b365e9e..532f7b945e7ca 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -1,6 +1,6 @@ import datetime import uuid -from typing import Dict, Optional, cast +from typing import Optional, cast from unittest import mock from unittest.mock import ANY, patch from zoneinfo import ZoneInfo @@ -294,7 +294,7 @@ def test_cant_sign_up_with_required_attributes_null(self): required_attributes = ["first_name", "email"] for attribute in required_attributes: - body: Dict[str, Optional[str]] = { + body: dict[str, Optional[str]] = { "first_name": "Jane", "email": "invalid@posthog.com", "password": "notsecure", diff --git a/posthog/api/test/test_site_app.py b/posthog/api/test/test_site_app.py index 82823ac4cf4ed..9a428774c6ea7 100644 --- a/posthog/api/test/test_site_app.py +++ b/posthog/api/test/test_site_app.py @@ -1,5 +1,3 @@ -from typing import List - from django.test.client import Client from rest_framework import status @@ -44,7 +42,7 @@ def test_site_app(self): ) def test_get_site_config_from_schema(self): - schema: List[dict] = [{"key": "in_site", "site": True}, {"key": "not_in_site"}] + schema: list[dict] = [{"key": "in_site", "site": True}, {"key": "not_in_site"}] config = {"in_site": "123", "not_in_site": "12345"} self.assertEqual(get_site_config_from_schema(schema, config), {"in_site": "123"}) self.assertEqual(get_site_config_from_schema(None, None), {}) diff --git a/posthog/api/test/test_stickiness.py b/posthog/api/test/test_stickiness.py index 56d610c205e65..b3942414d5459 100644 --- a/posthog/api/test/test_stickiness.py +++ b/posthog/api/test/test_stickiness.py @@ -1,7 +1,7 @@ import uuid from dataclasses import dataclass from datetime import datetime, timedelta -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from dateutil.relativedelta import relativedelta from django.test import override_settings @@ -20,26 +20,26 @@ from posthog.utils import encode_get_request_params -def get_stickiness(client: Client, team: Team, request: Dict[str, Any]): +def get_stickiness(client: Client, team: Team, request: dict[str, Any]): return client.get(f"/api/projects/{team.pk}/insights/trend/", data=request) -def get_stickiness_ok(client: Client, team: Team, request: Dict[str, Any]): +def get_stickiness_ok(client: Client, team: Team, request: dict[str, Any]): response = get_stickiness(client=client, team=team, request=encode_get_request_params(data=request)) assert response.status_code == 200, response.content return response.json() -def get_stickiness_time_series_ok(client: Client, team: Team, request: Dict[str, Any]): +def get_stickiness_time_series_ok(client: Client, team: Team, request: dict[str, Any]): data = get_stickiness_ok(client=client, request=request, team=team) return get_time_series_ok(data) -def get_stickiness_people(client: Client, team_id: int, request: Dict[str, Any]): +def get_stickiness_people(client: Client, team_id: int, request: dict[str, Any]): return client.get("/api/person/stickiness/", data=request) -def get_stickiness_people_ok(client: Client, team_id: int, request: Dict[str, Any]): +def get_stickiness_people_ok(client: Client, team_id: int, request: dict[str, Any]): response = get_stickiness_people(client=client, team_id=team_id, request=encode_get_request_params(data=request)) assert response.status_code == 200 return response.json() diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py index d23efe81cf7d8..0cae63e3b60b2 100644 --- a/posthog/api/test/test_team.py +++ b/posthog/api/test/test_team.py @@ -1,6 +1,6 @@ import json import uuid -from typing import List, cast, Dict, Optional, Any +from typing import cast, Optional, Any from unittest import mock from unittest.mock import MagicMock, call, patch, ANY @@ -27,7 +27,7 @@ class TestTeamAPI(APIBaseTest): - def _assert_activity_log(self, expected: List[Dict], team_id: Optional[int] = None) -> None: + def _assert_activity_log(self, expected: list[dict], team_id: Optional[int] = None) -> None: if not team_id: team_id = self.team.pk @@ -35,7 +35,7 @@ def _assert_activity_log(self, expected: List[Dict], team_id: Optional[int] = No assert starting_log_response.status_code == 200 assert starting_log_response.json()["results"] == expected - def _assert_organization_activity_log(self, expected: List[Dict]) -> None: + def _assert_organization_activity_log(self, expected: list[dict]) -> None: starting_log_response = self.client.get(f"/api/organizations/{self.organization.pk}/activity") assert starting_log_response.status_code == 200 assert starting_log_response.json()["results"] == expected @@ -95,7 +95,7 @@ def test_cant_retrieve_project_from_another_org(self): @patch("posthog.api.team.get_geoip_properties") def test_ip_location_is_used_for_new_project_week_day_start(self, get_geoip_properties_mock: MagicMock): - self.organization.available_features = cast(List[str], [AvailableFeature.ORGANIZATIONS_PROJECTS]) + self.organization.available_features = cast(list[str], [AvailableFeature.ORGANIZATIONS_PROJECTS]) self.organization.save() self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() @@ -1039,7 +1039,7 @@ def test_can_set_replay_configs_patch_session_replay_config_one_level_deep(self) # and the existing second level nesting is not preserved self._assert_replay_config_is({"ai_config": {"opt_in": None, "included_event_properties": ["and another"]}}) - def _assert_replay_config_is(self, expected: Dict[str, Any] | None) -> HttpResponse: + def _assert_replay_config_is(self, expected: dict[str, Any] | None) -> HttpResponse: get_response = self.client.get("/api/projects/@current/") assert get_response.status_code == status.HTTP_200_OK, get_response.json() assert get_response.json()["session_replay_config"] == expected @@ -1047,7 +1047,7 @@ def _assert_replay_config_is(self, expected: Dict[str, Any] | None) -> HttpRespo return get_response def _patch_session_replay_config( - self, config: Dict[str, Any] | None, expected_status: int = status.HTTP_200_OK + self, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK ) -> HttpResponse: patch_response = self.client.patch( "/api/projects/@current/", @@ -1057,13 +1057,13 @@ def _patch_session_replay_config( return patch_response - def _assert_linked_flag_config(self, expected_config: Dict | None) -> HttpResponse: + def _assert_linked_flag_config(self, expected_config: dict | None) -> HttpResponse: response = self.client.get("/api/projects/@current/") assert response.status_code == status.HTTP_200_OK assert response.json()["session_recording_linked_flag"] == expected_config return response - def _patch_linked_flag_config(self, config: Dict | None, expected_status: int = status.HTTP_200_OK) -> HttpResponse: + def _patch_linked_flag_config(self, config: dict | None, expected_status: int = status.HTTP_200_OK) -> HttpResponse: response = self.client.patch("/api/projects/@current/", {"session_recording_linked_flag": config}) assert response.status_code == expected_status, response.json() return response diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py index 7113d50e5f7b5..4b682b4095e7f 100644 --- a/posthog/api/test/test_user.py +++ b/posthog/api/test/test_user.py @@ -1,6 +1,6 @@ import datetime import uuid -from typing import Dict, List, cast +from typing import cast from unittest import mock from unittest.mock import ANY, Mock, patch from urllib.parse import quote @@ -326,7 +326,7 @@ def test_set_scene_personalisation_for_user(self, _mock_capture, _mock_identify_ ) def _assert_set_scene_choice( - self, scene: str, dashboard: Dashboard, user: User, expected_choices: List[Dict] + self, scene: str, dashboard: Dashboard, user: User, expected_choices: list[dict] ) -> None: response = self.client.post( "/api/users/@me/scene_personalisation", diff --git a/posthog/api/uploaded_media.py b/posthog/api/uploaded_media.py index d4cea157c69b0..aba0384caf861 100644 --- a/posthog/api/uploaded_media.py +++ b/posthog/api/uploaded_media.py @@ -1,5 +1,5 @@ from io import BytesIO -from typing import Dict, Optional +from typing import Optional import structlog from django.http import HttpResponse @@ -149,7 +149,7 @@ def create(self, request, *args, **kwargs) -> Response: detail="Object storage must be available to allow media uploads.", ) - def get_success_headers(self, location: str) -> Dict: + def get_success_headers(self, location: str) -> dict: try: return {"Location": location} except (TypeError, KeyError): diff --git a/posthog/api/utils.py b/posthog/api/utils.py index d34530cda14cc..ed1a571e6e446 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -4,7 +4,7 @@ import urllib.parse from enum import Enum, auto from ipaddress import ip_address -from typing import List, Literal, Optional, Union, Tuple +from typing import Literal, Optional, Union from uuid import UUID import structlog @@ -64,7 +64,7 @@ def get_target_entity(filter: Union[Filter, StickinessFilter]) -> Entity: raise ValidationError("An entity must be provided for target entity to be determined") -def entity_from_order(order: Optional[str], entities: List[Entity]) -> Optional[Entity]: +def entity_from_order(order: Optional[str], entities: list[Entity]) -> Optional[Entity]: if not order: return None @@ -78,8 +78,8 @@ def retrieve_entity_from( entity_id: Optional[str], entity_type: Optional[str], entity_math: MathType, - events: List[Entity], - actions: List[Entity], + events: list[Entity], + actions: list[Entity], ) -> Optional[Entity]: """ Retrieves the entity from the events and actions. @@ -251,7 +251,7 @@ def create_event_definitions_sql( event_type: EventDefinitionType, is_enterprise: bool = False, conditions: str = "", - order_expressions: Optional[List[Tuple[str, Literal["ASC", "DESC"]]]] = None, + order_expressions: Optional[list[tuple[str, Literal["ASC", "DESC"]]]] = None, ) -> str: if order_expressions is None: order_expressions = [] @@ -305,7 +305,7 @@ def get_pk_or_uuid(queryset: QuerySet, key: Union[int, str]) -> QuerySet: return queryset.filter(pk=key) -def parse_bool(value: Union[str, List[str]]) -> bool: +def parse_bool(value: Union[str, list[str]]) -> bool: if value == "true": return True return False diff --git a/posthog/asgi.py b/posthog/asgi.py index 22912a0c7b76e..38b9b00ec5c51 100644 --- a/posthog/asgi.py +++ b/posthog/asgi.py @@ -1,8 +1,22 @@ import os from django.core.asgi import get_asgi_application +from django.http.response import HttpResponse os.environ.setdefault("DJANGO_SETTINGS_MODULE", "posthog.settings") os.environ.setdefault("SERVER_GATEWAY_INTERFACE", "ASGI") -application = get_asgi_application() + +# Django doesn't support lifetime requests and raises an exception +# when it receives them. This creates a lot of noise in sentry so +# intercept these requests and return a 501 error without raising an exception +def lifetime_wrapper(func): + async def inner(scope, receive, send): + if scope["type"] != "http": + return HttpResponse(status=501) + return await func(scope, receive, send) + + return inner + + +application = lifetime_wrapper(get_asgi_application()) diff --git a/posthog/async_migrations/definition.py b/posthog/async_migrations/definition.py index 859b8af08819d..52a53164bc770 100644 --- a/posthog/async_migrations/definition.py +++ b/posthog/async_migrations/definition.py @@ -1,13 +1,10 @@ from typing import ( TYPE_CHECKING, Any, - Callable, - Dict, - List, Optional, - Tuple, Union, ) +from collections.abc import Callable from posthog.constants import AnalyticsDBMS from posthog.models.utils import sane_repr @@ -36,9 +33,9 @@ def __init__( self, *, sql: str, - sql_settings: Optional[Dict] = None, + sql_settings: Optional[dict] = None, rollback: Optional[str], - rollback_settings: Optional[Dict] = None, + rollback_settings: Optional[dict] = None, database: AnalyticsDBMS = AnalyticsDBMS.CLICKHOUSE, timeout_seconds: int = ASYNC_MIGRATIONS_DEFAULT_TIMEOUT_SECONDS, per_shard: bool = False, @@ -58,7 +55,7 @@ def rollback_fn(self, query_id: str): if self.rollback is not None: self._execute_op(query_id, self.rollback, self.rollback_settings) - def _execute_op(self, query_id: str, sql: str, settings: Optional[Dict]): + def _execute_op(self, query_id: str, sql: str, settings: Optional[dict]): from posthog.async_migrations.utils import ( execute_op_clickhouse, execute_op_postgres, @@ -91,16 +88,16 @@ class AsyncMigrationDefinition: description = "" # list of versions accepted for the services the migration relies on e.g. ClickHouse, Postgres - service_version_requirements: List[ServiceVersionRequirement] = [] + service_version_requirements: list[ServiceVersionRequirement] = [] # list of operations the migration will perform _in order_ - operations: List[AsyncMigrationOperation] = [] + operations: list[AsyncMigrationOperation] = [] # name of async migration this migration depends on depends_on: Optional[str] = None # optional parameters for this async migration. Shown in the UI when starting the migration - parameters: Dict[str, Tuple[(Optional[Union[int, str]], str, Callable[[Any], Any])]] = {} + parameters: dict[str, tuple[(Optional[Union[int, str]], str, Callable[[Any], Any])]] = {} def __init__(self, name: str): self.name = name @@ -111,11 +108,11 @@ def is_required(self) -> bool: return True # run before starting the migration - def precheck(self) -> Tuple[bool, Optional[str]]: + def precheck(self) -> tuple[bool, Optional[str]]: return (True, None) # run at a regular interval while the migration is being executed - def healthcheck(self) -> Tuple[bool, Optional[str]]: + def healthcheck(self) -> tuple[bool, Optional[str]]: return (True, None) # return an int between 0-100 to specify how far along this migration is diff --git a/posthog/async_migrations/migrations/0001_events_sample_by.py b/posthog/async_migrations/migrations/0001_events_sample_by.py index 4098fd38f32a1..1d8fced273c1b 100644 --- a/posthog/async_migrations/migrations/0001_events_sample_by.py +++ b/posthog/async_migrations/migrations/0001_events_sample_by.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.async_migrations.definition import ( AsyncMigrationDefinition, AsyncMigrationOperation, @@ -17,7 +15,7 @@ class Migration(AsyncMigrationDefinition): posthog_max_version = "1.33.9" - operations: List[AsyncMigrationOperation] = [] + operations: list[AsyncMigrationOperation] = [] def is_required(self): return False diff --git a/posthog/async_migrations/migrations/0002_events_sample_by.py b/posthog/async_migrations/migrations/0002_events_sample_by.py index 7038975b2afbb..2157c380f2ddd 100644 --- a/posthog/async_migrations/migrations/0002_events_sample_by.py +++ b/posthog/async_migrations/migrations/0002_events_sample_by.py @@ -1,5 +1,4 @@ from functools import cached_property -from typing import List from django.conf import settings @@ -76,7 +75,7 @@ def operations(self): # Note: This _should_ be impossible but hard to ensure. raise RuntimeError("Cannot run the migration as `events` table is already Distributed engine.") - create_table_op: List[AsyncMigrationOperation] = [ + create_table_op: list[AsyncMigrationOperation] = [ AsyncMigrationOperationSQL( database=AnalyticsDBMS.CLICKHOUSE, sql=f""" diff --git a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py index 276d6c54abed3..8740456c5e1f7 100644 --- a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py +++ b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py @@ -1,6 +1,5 @@ import json from functools import cached_property -from typing import Dict, List, Tuple import structlog from django.conf import settings @@ -238,9 +237,9 @@ def _copy_batch_from_postgres(self, query_id: str) -> bool: ) return True - def _persons_insert_query(self, persons: List[Person]) -> Tuple[str, Dict]: + def _persons_insert_query(self, persons: list[Person]) -> tuple[str, dict]: values = [] - params: Dict = {} + params: dict = {} for i, person in enumerate(persons): created_at = person.created_at.strftime("%Y-%m-%d %H:%M:%S") # :TRICKY: We use a custom _timestamp to identify rows migrated during this migration diff --git a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py index 62f539f333481..75c5510c9ef49 100644 --- a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.async_migrations.definition import ( AsyncMigrationDefinition, AsyncMigrationOperation, @@ -19,7 +17,7 @@ class Migration(AsyncMigrationDefinition): depends_on = "0005_person_replacing_by_version" - operations: List[AsyncMigrationOperation] = [] + operations: list[AsyncMigrationOperation] = [] def is_required(self): return False diff --git a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py index 99216ee936b12..f51d171dfe855 100644 --- a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Dict, Tuple, Union +from typing import Union import structlog from django.conf import settings @@ -289,7 +289,7 @@ def _postcheck(self, _: str): self._check_person_data() self._check_groups_data() - def _where_clause(self) -> Tuple[str, Dict[str, Union[str, int]]]: + def _where_clause(self) -> tuple[str, dict[str, Union[str, int]]]: team_id = self.get_parameter("TEAM_ID") team_id_filter = f" AND team_id = %(team_id)s" if team_id else "" where_clause = f"WHERE timestamp > toDateTime(%(timestamp_lower_bound)s) AND timestamp < toDateTime(%(timestamp_upper_bound)s) {team_id_filter}" diff --git a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py index 9b4c64c9af869..d679643b8a538 100644 --- a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py +++ b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.async_migrations.definition import ( AsyncMigrationDefinition, AsyncMigrationOperation, @@ -16,4 +14,4 @@ class Migration(AsyncMigrationDefinition): def is_required(self): return False - operations: List[AsyncMigrationOperation] = [] + operations: list[AsyncMigrationOperation] = [] diff --git a/posthog/async_migrations/runner.py b/posthog/async_migrations/runner.py index 78f2afcf21201..05946cfd3c98a 100644 --- a/posthog/async_migrations/runner.py +++ b/posthog/async_migrations/runner.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple +from typing import Optional import structlog from semantic_version.base import SimpleSpec @@ -281,7 +281,7 @@ def run_next_migration(candidate: str): trigger_migration(migration_instance) -def is_migration_dependency_fulfilled(migration_name: str) -> Tuple[bool, str]: +def is_migration_dependency_fulfilled(migration_name: str) -> tuple[bool, str]: dependency = get_async_migration_dependency(migration_name) dependency_ok: bool = ( @@ -292,8 +292,8 @@ def is_migration_dependency_fulfilled(migration_name: str) -> Tuple[bool, str]: def check_service_version_requirements( - service_version_requirements: List[ServiceVersionRequirement], -) -> Tuple[bool, str]: + service_version_requirements: list[ServiceVersionRequirement], +) -> tuple[bool, str]: for service_version_requirement in service_version_requirements: in_range, version = service_version_requirement.is_service_in_accepted_version() if not in_range: diff --git a/posthog/async_migrations/setup.py b/posthog/async_migrations/setup.py index 4493f137bd2a2..acc27b495431d 100644 --- a/posthog/async_migrations/setup.py +++ b/posthog/async_migrations/setup.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional from django.core.exceptions import ImproperlyConfigured from infi.clickhouse_orm.utils import import_submodules @@ -19,12 +19,12 @@ def reload_migration_definitions(): ALL_ASYNC_MIGRATIONS[name] = module.Migration(name) -ALL_ASYNC_MIGRATIONS: Dict[str, AsyncMigrationDefinition] = {} +ALL_ASYNC_MIGRATIONS: dict[str, AsyncMigrationDefinition] = {} -ASYNC_MIGRATION_TO_DEPENDENCY: Dict[str, Optional[str]] = {} +ASYNC_MIGRATION_TO_DEPENDENCY: dict[str, Optional[str]] = {} # inverted mapping of ASYNC_MIGRATION_TO_DEPENDENCY -DEPENDENCY_TO_ASYNC_MIGRATION: Dict[Optional[str], str] = {} +DEPENDENCY_TO_ASYNC_MIGRATION: dict[Optional[str], str] = {} ASYNC_MIGRATIONS_MODULE_PATH = "posthog.async_migrations.migrations" ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH = "posthog.async_migrations.examples" diff --git a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py index 4e6588ad45920..9a35ed05c827f 100644 --- a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py @@ -1,5 +1,4 @@ import json -from typing import Dict, List from uuid import uuid4 import pytest @@ -31,7 +30,7 @@ MIGRATION_NAME = "0007_persons_and_groups_on_events_backfill" -uuid1, uuid2, uuid3 = [UUIDT() for _ in range(3)] +uuid1, uuid2, uuid3 = (UUIDT() for _ in range(3)) # Clickhouse leaves behind blank/zero values for non-filled columns, these are checked against these constants ZERO_UUID = UUIDT(uuid_str="00000000-0000-0000-0000-000000000000") ZERO_DATE = "1970-01-01T00:00:00Z" @@ -44,7 +43,7 @@ def run_migration(): return start_async_migration(MIGRATION_NAME, ignore_posthog_version=True) -def query_events() -> List[Dict]: +def query_events() -> list[dict]: return query_with_columns( """ SELECT @@ -351,7 +350,7 @@ def test_rollback(self): MIGRATION_DEFINITION.operations[-1].fn = old_fn def test_timestamp_boundaries(self): - _uuid1, _uuid2, _uuid3 = [UUIDT() for _ in range(3)] + _uuid1, _uuid2, _uuid3 = (UUIDT() for _ in range(3)) create_event( event_uuid=_uuid1, team=self.team, diff --git a/posthog/async_migrations/test/test_0010_move_old_partitions.py b/posthog/async_migrations/test/test_0010_move_old_partitions.py index d316f5f50e625..e249f17a43412 100644 --- a/posthog/async_migrations/test/test_0010_move_old_partitions.py +++ b/posthog/async_migrations/test/test_0010_move_old_partitions.py @@ -14,7 +14,7 @@ MIGRATION_NAME = "0010_move_old_partitions" -uuid1, uuid2, uuid3 = [UUIDT() for _ in range(3)] +uuid1, uuid2, uuid3 = (UUIDT() for _ in range(3)) MIGRATION_DEFINITION = get_async_migration_definition(MIGRATION_NAME) diff --git a/posthog/async_migrations/utils.py b/posthog/async_migrations/utils.py index 20ad64cf7d75b..ee7ecdbe4d2ed 100644 --- a/posthog/async_migrations/utils.py +++ b/posthog/async_migrations/utils.py @@ -1,6 +1,7 @@ import asyncio from datetime import datetime -from typing import Callable, Optional +from typing import Optional +from collections.abc import Callable import posthoganalytics import structlog diff --git a/posthog/auth.py b/posthog/auth.py index 6154ecb1ca0ba..f536ff30c200e 100644 --- a/posthog/auth.py +++ b/posthog/auth.py @@ -1,7 +1,7 @@ import functools import re from datetime import timedelta -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Optional, Union from urllib.parse import urlsplit import jwt @@ -57,9 +57,9 @@ class PersonalAPIKeyAuthentication(authentication.BaseAuthentication): def find_key_with_source( cls, request: Union[HttpRequest, Request], - request_data: Optional[Dict[str, Any]] = None, - extra_data: Optional[Dict[str, Any]] = None, - ) -> Optional[Tuple[str, str]]: + request_data: Optional[dict[str, Any]] = None, + extra_data: Optional[dict[str, Any]] = None, + ) -> Optional[tuple[str, str]]: """Try to find personal API key in request and return it along with where it was found.""" if "HTTP_AUTHORIZATION" in request.META: authorization_match = re.match(rf"^{cls.keyword}\s+(\S.+)$", request.META["HTTP_AUTHORIZATION"]) @@ -80,8 +80,8 @@ def find_key_with_source( def find_key( cls, request: Union[HttpRequest, Request], - request_data: Optional[Dict[str, Any]] = None, - extra_data: Optional[Dict[str, Any]] = None, + request_data: Optional[dict[str, Any]] = None, + extra_data: Optional[dict[str, Any]] = None, ) -> Optional[str]: """Try to find personal API key in request and return it.""" key_with_source = cls.find_key_with_source(request, request_data, extra_data) @@ -121,7 +121,7 @@ def validate_key(cls, personal_api_key_with_source): return personal_api_key_object - def authenticate(self, request: Union[HttpRequest, Request]) -> Optional[Tuple[Any, None]]: + def authenticate(self, request: Union[HttpRequest, Request]) -> Optional[tuple[Any, None]]: personal_api_key_with_source = self.find_key_with_source(request) if not personal_api_key_with_source: return None @@ -190,7 +190,7 @@ class JwtAuthentication(authentication.BaseAuthentication): keyword = "Bearer" @classmethod - def authenticate(cls, request: Union[HttpRequest, Request]) -> Optional[Tuple[Any, None]]: + def authenticate(cls, request: Union[HttpRequest, Request]) -> Optional[tuple[Any, None]]: if "HTTP_AUTHORIZATION" in request.META: authorization_match = re.match(rf"^Bearer\s+(\S.+)$", request.META["HTTP_AUTHORIZATION"]) if authorization_match: @@ -222,7 +222,7 @@ class SharingAccessTokenAuthentication(authentication.BaseAuthentication): sharing_configuration: SharingConfiguration - def authenticate(self, request: Union[HttpRequest, Request]) -> Optional[Tuple[Any, Any]]: + def authenticate(self, request: Union[HttpRequest, Request]) -> Optional[tuple[Any, Any]]: if sharing_access_token := request.GET.get("sharing_access_token"): if request.method not in ["GET", "HEAD"]: raise AuthenticationFailed(detail="Sharing access token can only be used for GET requests.") diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index e39a01c9cbeec..eaca9da1218a3 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -95,35 +95,21 @@ class BatchExportRunViewSet(TeamAndOrgViewSetMixin, viewsets.ReadOnlyModelViewSe queryset = BatchExportRun.objects.all() serializer_class = BatchExportRunSerializer pagination_class = RunsCursorPagination + filter_rewrite_rules = {"team_id": "batch_export__team_id"} - def get_queryset(self, date_range: tuple[dt.datetime, dt.datetime] | None = None): - if not isinstance(self.request.user, User) or self.request.user.current_team is None: - raise NotAuthenticated() - - if date_range: - return self.queryset.filter( - batch_export_id=self.kwargs["parent_lookup_batch_export_id"], - created_at__range=date_range, - ).order_by("-created_at") - else: - return self.queryset.filter(batch_export_id=self.kwargs["parent_lookup_batch_export_id"]).order_by( - "-created_at" - ) - - def list(self, request: request.Request, *args, **kwargs) -> response.Response: - """Get all BatchExportRuns for a BatchExport.""" - if not isinstance(request.user, User) or request.user.team is None: - raise NotAuthenticated() + def get_queryset(self): + queryset = super().get_queryset() - after = self.request.query_params.get("after", "-7d") - before = self.request.query_params.get("before", None) - after_datetime = relative_date_parse(after, request.user.team.timezone_info) - before_datetime = relative_date_parse(before, request.user.team.timezone_info) if before else now() + after = self.request.GET.get("after", "-7d") + before = self.request.GET.get("before", None) + after_datetime = relative_date_parse(after, self.team.timezone_info) + before_datetime = relative_date_parse(before, self.team.timezone_info) if before else now() date_range = (after_datetime, before_datetime) - page = self.paginate_queryset(self.get_queryset(date_range=date_range)) - serializer = self.get_serializer(page, many=True) - return self.get_paginated_response(serializer.data) + queryset = queryset.filter(batch_export_id=self.kwargs["parent_lookup_batch_export_id"]) + queryset = queryset.filter(created_at__range=date_range) + + return queryset.order_by("-created_at") class BatchExportDestinationSerializer(serializers.ModelSerializer): @@ -342,9 +328,6 @@ class BatchExportViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): serializer_class = BatchExportSerializer def get_queryset(self): - if not isinstance(self.request.user, User): - raise NotAuthenticated() - return super().get_queryset().exclude(deleted=True).order_by("-created_at").prefetch_related("destination") @action(methods=["POST"], detail=True) diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index 4ebe463e51f03..4089323202e50 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -1,6 +1,5 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union -from posthog.api.services.query import ExecutionMode import structlog from sentry_sdk import capture_exception @@ -78,7 +77,7 @@ def get_cache_type_for_filter(cacheable: FilterType) -> CacheType: return CacheType.TRENDS -def get_cache_type_for_query(cacheable: Dict) -> CacheType: +def get_cache_type_for_query(cacheable: dict) -> CacheType: cache_type = None if cacheable.get("source"): @@ -93,7 +92,7 @@ def get_cache_type_for_query(cacheable: Dict) -> CacheType: return cache_type -def get_cache_type(cacheable: Optional[FilterType] | Optional[Dict]) -> CacheType: +def get_cache_type(cacheable: Optional[FilterType] | Optional[dict]) -> CacheType: if isinstance(cacheable, dict): return get_cache_type_for_query(cacheable) elif cacheable is not None: @@ -110,7 +109,7 @@ def get_cache_type(cacheable: Optional[FilterType] | Optional[Dict]) -> CacheTyp def calculate_for_query_based_insight( insight: Insight, *, dashboard: Optional[Dashboard] = None, refresh_requested: bool ) -> "InsightResult": - from posthog.api.services.query import process_query + from posthog.api.services.query import process_query, ExecutionMode from posthog.caching.fetch_from_cache import InsightResult, NothingInCacheResult tag_queries(team_id=insight.team_id, insight_id=insight.pk) @@ -123,7 +122,9 @@ def calculate_for_query_based_insight( response = process_query( insight.team, effective_query, - execution_mode=ExecutionMode.CALCULATION_REQUESTED if refresh_requested else ExecutionMode.CACHE_ONLY, + execution_mode=ExecutionMode.CALCULATION_ALWAYS + if refresh_requested + else ExecutionMode.CACHE_ONLY_NEVER_CALCULATE, ) if "results" not in response: @@ -145,7 +146,7 @@ def calculate_for_query_based_insight( def calculate_for_filter_based_insight( insight: Insight, dashboard: Optional[Dashboard] -) -> Tuple[str, str, List | Dict]: +) -> tuple[str, str, list | dict]: filter = get_filter(data=insight.dashboard_filters(dashboard), team=insight.team) cache_key = generate_insight_cache_key(insight, dashboard) cache_type = get_cache_type(filter) @@ -160,7 +161,7 @@ def calculate_for_filter_based_insight( return cache_key, cache_type, calculate_result_by_cache_type(cache_type, filter, insight.team) -def calculate_result_by_cache_type(cache_type: CacheType, filter: Filter, team: Team) -> List[Dict[str, Any]]: +def calculate_result_by_cache_type(cache_type: CacheType, filter: Filter, team: Team) -> list[dict[str, Any]]: if cache_type == CacheType.FUNNEL: return _calculate_funnel(filter, team) else: @@ -168,7 +169,7 @@ def calculate_result_by_cache_type(cache_type: CacheType, filter: Filter, team: @timed("update_cache_item_timer.calculate_by_filter") -def _calculate_by_filter(filter: FilterType, team: Team, cache_type: CacheType) -> List[Dict[str, Any]]: +def _calculate_by_filter(filter: FilterType, team: Team, cache_type: CacheType) -> list[dict[str, Any]]: insight_class = CACHE_TYPE_TO_INSIGHT_CLASS[cache_type] if cache_type == CacheType.PATHS: @@ -179,7 +180,7 @@ def _calculate_by_filter(filter: FilterType, team: Team, cache_type: CacheType) @timed("update_cache_item_timer.calculate_funnel") -def _calculate_funnel(filter: Filter, team: Team) -> List[Dict[str, Any]]: +def _calculate_funnel(filter: Filter, team: Team) -> list[dict[str, Any]]: if filter.funnel_viz_type == FunnelVizType.TRENDS: result = ClickhouseFunnelTrends(team=team, filter=filter).run() elif filter.funnel_viz_type == FunnelVizType.TIME_TO_CONVERT: @@ -192,7 +193,7 @@ def _calculate_funnel(filter: Filter, team: Team) -> List[Dict[str, Any]]: def cache_includes_latest_events( - payload: Dict, filter: Union[RetentionFilter, StickinessFilter, PathFilter, Filter] + payload: dict, filter: Union[RetentionFilter, StickinessFilter, PathFilter, Filter] ) -> bool: """ event_definition has last_seen_at timestamp @@ -217,7 +218,7 @@ def cache_includes_latest_events( return False -def _events_from_filter(filter: Union[RetentionFilter, StickinessFilter, PathFilter, Filter]) -> List[str]: +def _events_from_filter(filter: Union[RetentionFilter, StickinessFilter, PathFilter, Filter]) -> list[str]: """ If a filter only represents a set of events then we can use their last_seen_at to determine if the cache is up-to-date diff --git a/posthog/caching/fetch_from_cache.py b/posthog/caching/fetch_from_cache.py index fcbeb0b72e341..fe5d46ace3d51 100644 --- a/posthog/caching/fetch_from_cache.py +++ b/posthog/caching/fetch_from_cache.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from datetime import datetime, timedelta -from typing import Any, List, Optional, Union +from typing import Any, Optional, Union from django.utils.timezone import now from prometheus_client import Counter @@ -27,7 +27,7 @@ class InsightResult: is_cached: bool timezone: Optional[str] next_allowed_client_refresh: Optional[datetime] = None - timings: Optional[List[QueryTiming]] = None + timings: Optional[list[QueryTiming]] = None @dataclass(frozen=True) diff --git a/posthog/caching/insight_cache.py b/posthog/caching/insight_cache.py index d73486234dfb1..97b5c691e4643 100644 --- a/posthog/caching/insight_cache.py +++ b/posthog/caching/insight_cache.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from time import perf_counter -from typing import Any, List, Optional, Tuple, cast +from typing import Any, Optional, cast from uuid import UUID import structlog @@ -49,7 +49,7 @@ def schedule_cache_updates(): logger.warn("No caches were found to be updated") -def fetch_states_in_need_of_updating(limit: int) -> List[Tuple[int, str, UUID]]: +def fetch_states_in_need_of_updating(limit: int) -> list[tuple[int, str, UUID]]: current_time = now() with connection.cursor() as cursor: cursor.execute( @@ -162,7 +162,7 @@ def update_cached_state( ) -def _extract_insight_dashboard(caching_state: InsightCachingState) -> Tuple[Insight, Optional[Dashboard]]: +def _extract_insight_dashboard(caching_state: InsightCachingState) -> tuple[Insight, Optional[Dashboard]]: if caching_state.dashboard_tile is not None: assert caching_state.dashboard_tile.insight is not None diff --git a/posthog/caching/insight_caching_state.py b/posthog/caching/insight_caching_state.py index a8ae36c14f05a..ae3eb269425f0 100644 --- a/posthog/caching/insight_caching_state.py +++ b/posthog/caching/insight_caching_state.py @@ -1,7 +1,7 @@ from datetime import timedelta from enum import Enum from functools import cached_property -from typing import List, Optional, Union +from typing import Optional, Union import structlog from django.core.paginator import Paginator @@ -232,10 +232,10 @@ def _iterate_large_queryset(queryset, page_size): yield page.object_list -def _execute_insert(states: List[Optional[InsightCachingState]]): +def _execute_insert(states: list[Optional[InsightCachingState]]): from django.db import connection - models: List[InsightCachingState] = list(filter(None, states)) + models: list[InsightCachingState] = list(filter(None, states)) if len(models) == 0: return diff --git a/posthog/caching/insights_api.py b/posthog/caching/insights_api.py index 35a75cdf8a0b1..11760e2dc4108 100644 --- a/posthog/caching/insights_api.py +++ b/posthog/caching/insights_api.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from math import ceil from time import sleep -from typing import Optional, Tuple, Union +from typing import Optional, Union import zoneinfo from rest_framework import request @@ -37,7 +37,7 @@ def should_refresh_insight( *, request: request.Request, is_shared=False, -) -> Tuple[bool, timedelta]: +) -> tuple[bool, timedelta]: """Return whether the insight should be refreshed now, and what's the minimum wait time between refreshes. If a refresh already is being processed somewhere else, this function will wait for that to finish (or time out). diff --git a/posthog/caching/test/test_insight_cache.py b/posthog/caching/test/test_insight_cache.py index 9de2053f6c2f1..b86ac56a3de99 100644 --- a/posthog/caching/test/test_insight_cache.py +++ b/posthog/caching/test/test_insight_cache.py @@ -1,5 +1,6 @@ from datetime import timedelta -from typing import Callable, Optional +from typing import Optional +from collections.abc import Callable from unittest.mock import call, patch import pytest diff --git a/posthog/caching/test/test_insight_caching_state.py b/posthog/caching/test/test_insight_caching_state.py index 03a3652555202..47465786fb17b 100644 --- a/posthog/caching/test/test_insight_caching_state.py +++ b/posthog/caching/test/test_insight_caching_state.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Any, Dict, Optional, Union, cast +from typing import Any, Optional, Union, cast from unittest.mock import patch import pytest @@ -42,7 +42,7 @@ def create_insight( is_shared=True, filters=filter_dict, deleted=False, - query: Optional[Dict] = None, + query: Optional[dict] = None, ) -> Insight: if mock_active_teams: mock_active_teams.return_value = {team.pk} if team_should_be_active else set() @@ -77,7 +77,7 @@ def create_tile( dashboard_tile_deleted=False, is_dashboard_shared=True, text_tile=False, - query: Optional[Dict] = None, + query: Optional[dict] = None, ) -> DashboardTile: if mock_active_teams: mock_active_teams.return_value = {team.pk} if team_should_be_active else set() @@ -295,7 +295,7 @@ def test_calculate_target_age( team: Team, user: User, create_item, - create_item_kw: Dict, + create_item_kw: dict, expected_target_age: TargetCacheAge, ): item = cast( diff --git a/posthog/caching/utils.py b/posthog/caching/utils.py index d0c6450cc7dba..c56d0f33571d5 100644 --- a/posthog/caching/utils.py +++ b/posthog/caching/utils.py @@ -1,6 +1,6 @@ from datetime import datetime from dateutil.parser import isoparse -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Optional, Union from zoneinfo import ZoneInfo from dateutil.parser import parser @@ -32,7 +32,7 @@ def ensure_is_date(candidate: Optional[Union[str, datetime]]) -> Optional[dateti return parser().parse(candidate) -def active_teams() -> Set[int]: +def active_teams() -> set[int]: """ Teams are stored in a sorted set. [{team_id: score}, {team_id: score}]. Their "score" is the number of seconds since last event. @@ -43,7 +43,7 @@ def active_teams() -> Set[int]: This assumes that the list of active teams is small enough to reasonably load in one go. """ redis = get_client() - all_teams: List[Tuple[bytes, float]] = redis.zrange(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, 0, -1, withscores=True) + all_teams: list[tuple[bytes, float]] = redis.zrange(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, 0, -1, withscores=True) if not all_teams: teams_by_recency = sync_execute( """ @@ -106,7 +106,7 @@ def is_stale(team: Team, date_to: datetime, interval: str, cached_result: Any) - return False last_refresh = ( - cached_result.get("last_refresh", None) if isinstance(cached_result, Dict) else cached_result.last_refresh + cached_result.get("last_refresh", None) if isinstance(cached_result, dict) else cached_result.last_refresh ) date_to = min([date_to, datetime.now(tz=ZoneInfo("UTC"))]) # can't be later than now diff --git a/posthog/celery.py b/posthog/celery.py index a78a7c94ad844..29c45c9b60729 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -1,6 +1,5 @@ import os import time -from typing import Dict from celery import Celery from celery.signals import ( @@ -71,7 +70,7 @@ app.steps["worker"].add(DjangoStructLogInitStep) -task_timings: Dict[str, float] = {} +task_timings: dict[str, float] = {} @setup_logging.connect diff --git a/posthog/clickhouse/client/connection.py b/posthog/clickhouse/client/connection.py index 31ae6cd291de0..35c72a305faea 100644 --- a/posthog/clickhouse/client/connection.py +++ b/posthog/clickhouse/client/connection.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from enum import Enum -from functools import lru_cache +from functools import cache from clickhouse_driver import Client as SyncClient from clickhouse_pool import ChPool @@ -65,7 +65,7 @@ def default_client(): ) -@lru_cache(maxsize=None) +@cache def make_ch_pool(**overrides) -> ChPool: kwargs = { "host": settings.CLICKHOUSE_HOST, diff --git a/posthog/clickhouse/client/escape.py b/posthog/clickhouse/client/escape.py index 49e7b1047f372..c1a2ae1cf4197 100644 --- a/posthog/clickhouse/client/escape.py +++ b/posthog/clickhouse/client/escape.py @@ -89,6 +89,7 @@ def escape_param_for_clickhouse(param: Any) -> str: version_patch="placeholder server_info value", revision="placeholder server_info value", display_name="placeholder server_info value", + used_revision="placeholder server_info value", timezone="UTC", ) return escape_param(param, context=context) diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index b588badfc07ea..17af5683a6f19 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -4,7 +4,8 @@ from contextlib import contextmanager from functools import lru_cache from time import perf_counter -from typing import Any, Dict, List, Optional, Sequence, Union +from typing import Any, Optional, Union +from collections.abc import Sequence import sqlparse from clickhouse_driver import Client as SyncClient @@ -19,7 +20,7 @@ from posthog.utils import generate_short_id, patchable InsertParams = Union[list, tuple, types.GeneratorType] -NonInsertParams = Dict[str, Any] +NonInsertParams = dict[str, Any] QueryArgs = Optional[Union[InsertParams, NonInsertParams]] thread_local_storage = threading.local() @@ -39,7 +40,7 @@ @lru_cache(maxsize=1) -def default_settings() -> Dict: +def default_settings() -> dict: return { "join_algorithm": "direct,parallel_hash", "distributed_replica_max_ignored_errors": 1000, @@ -131,11 +132,11 @@ def query_with_columns( query: str, args: Optional[QueryArgs] = None, columns_to_remove: Optional[Sequence[str]] = None, - columns_to_rename: Optional[Dict[str, str]] = None, + columns_to_rename: Optional[dict[str, str]] = None, *, workload: Workload = Workload.DEFAULT, team_id: Optional[int] = None, -) -> List[Dict]: +) -> list[dict]: if columns_to_remove is None: columns_to_remove = [] if columns_to_rename is None: @@ -184,7 +185,7 @@ def _prepare_query( below predicate. """ prepared_args: Any = QueryArgs - if isinstance(args, (list, tuple, types.GeneratorType)): + if isinstance(args, list | tuple | types.GeneratorType): # If we get one of these it means we have an insert, let the clickhouse # client handle substitution here. rendered_sql = query diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index e5b9e6d0c36bb..2a2e762d5aa56 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -106,7 +106,9 @@ def execute_process_query( team=team, query_json=query_json, limit_context=limit_context, - execution_mode=ExecutionMode.CALCULATION_REQUESTED if refresh_requested else ExecutionMode.CACHE_ONLY, + execution_mode=ExecutionMode.CALCULATION_ALWAYS + if refresh_requested + else ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE, ) logger.info("Got results for team %s query %s", team_id, query_id) query_status.complete = True diff --git a/posthog/clickhouse/client/migration_tools.py b/posthog/clickhouse/client/migration_tools.py index f71abd489fd64..aa3100b548bc0 100644 --- a/posthog/clickhouse/client/migration_tools.py +++ b/posthog/clickhouse/client/migration_tools.py @@ -1,4 +1,5 @@ -from typing import Callable, Union +from typing import Union +from collections.abc import Callable from infi.clickhouse_orm import migrations diff --git a/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr b/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr index 660abdacdd75b..5304b6569afe1 100644 --- a/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr +++ b/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr @@ -5,6 +5,7 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=600, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/clickhouse/materialized_columns/column.py b/posthog/clickhouse/materialized_columns/column.py index 70aca94511ac9..a206c051395cc 100644 --- a/posthog/clickhouse/materialized_columns/column.py +++ b/posthog/clickhouse/materialized_columns/column.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Dict, List, Literal, Tuple, Union +from typing import Literal, Union from posthog.cache_utils import cache_for from posthog.models.property import PropertyName, TableColumn, TableWithProperties @@ -12,7 +12,7 @@ @cache_for(timedelta(minutes=15)) def get_materialized_columns( table: TablesWithMaterializedColumns, -) -> Dict[Tuple[PropertyName, TableColumn], ColumnName]: +) -> dict[tuple[PropertyName, TableColumn], ColumnName]: return {} @@ -28,7 +28,7 @@ def materialize( def backfill_materialized_columns( table: TableWithProperties, - properties: List[Tuple[PropertyName, TableColumn]], + properties: list[tuple[PropertyName, TableColumn]], backfill_period: timedelta, test_settings=None, ) -> None: diff --git a/posthog/clickhouse/migrations/0046_ensure_kafa_session_replay_table_exists.py b/posthog/clickhouse/migrations/0046_ensure_kafa_session_replay_table_exists.py index 877139c155ee0..85d6664e475be 100644 --- a/posthog/clickhouse/migrations/0046_ensure_kafa_session_replay_table_exists.py +++ b/posthog/clickhouse/migrations/0046_ensure_kafa_session_replay_table_exists.py @@ -1,6 +1,4 @@ -from typing import List - -operations: List = [ +operations: list = [ # this migration has been amended to be entirely No-op # it has applied successfully in Prod US where it was a no-op # as all tables/columns it affected already existed diff --git a/posthog/clickhouse/migrations/0059_heatmaps_events.py b/posthog/clickhouse/migrations/0059_heatmaps_events.py new file mode 100644 index 0000000000000..3600483efb428 --- /dev/null +++ b/posthog/clickhouse/migrations/0059_heatmaps_events.py @@ -0,0 +1,16 @@ +from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions +from posthog.heatmaps.sql import ( + HEATMAPS_TABLE_MV_SQL, + KAFKA_HEATMAPS_TABLE_SQL, + HEATMAPS_TABLE_SQL, + DISTRIBUTED_HEATMAPS_TABLE_SQL, + WRITABLE_HEATMAPS_TABLE_SQL, +) + +operations = [ + run_sql_with_exceptions(WRITABLE_HEATMAPS_TABLE_SQL()), + run_sql_with_exceptions(DISTRIBUTED_HEATMAPS_TABLE_SQL()), + run_sql_with_exceptions(HEATMAPS_TABLE_SQL()), + run_sql_with_exceptions(KAFKA_HEATMAPS_TABLE_SQL()), + run_sql_with_exceptions(HEATMAPS_TABLE_MV_SQL()), +] diff --git a/posthog/clickhouse/migrations/0060_person_mode_force_upgrade.py b/posthog/clickhouse/migrations/0060_person_mode_force_upgrade.py new file mode 100644 index 0000000000000..7b3fa1b698740 --- /dev/null +++ b/posthog/clickhouse/migrations/0060_person_mode_force_upgrade.py @@ -0,0 +1,32 @@ +from infi.clickhouse_orm import migrations + +from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions +from posthog.client import sync_execute +from posthog.models.event.sql import ( + EVENTS_TABLE_JSON_MV_SQL, + KAFKA_EVENTS_TABLE_JSON_SQL, +) +from posthog.settings import CLICKHOUSE_CLUSTER + + +# Column was added in 0057_events_person_mode +ALTER_COLUMNS_BASE_SQL = """ +ALTER TABLE {table} +ON CLUSTER {cluster} +MODIFY COLUMN person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) +""" + + +def alter_columns_in_required_tables(_): + sync_execute(ALTER_COLUMNS_BASE_SQL.format(table="events", cluster=CLICKHOUSE_CLUSTER)) + sync_execute(ALTER_COLUMNS_BASE_SQL.format(table="writable_events", cluster=CLICKHOUSE_CLUSTER)) + sync_execute(ALTER_COLUMNS_BASE_SQL.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER)) + + +operations = [ + run_sql_with_exceptions(f"DROP TABLE IF EXISTS events_json_mv ON CLUSTER '{CLICKHOUSE_CLUSTER}'"), + run_sql_with_exceptions(f"DROP TABLE IF EXISTS kafka_events_json ON CLUSTER '{CLICKHOUSE_CLUSTER}'"), + migrations.RunPython(alter_columns_in_required_tables), + run_sql_with_exceptions(KAFKA_EVENTS_TABLE_JSON_SQL()), + run_sql_with_exceptions(EVENTS_TABLE_JSON_MV_SQL()), +] diff --git a/posthog/clickhouse/schema.py b/posthog/clickhouse/schema.py index 0c0b5656ba204..f482458fafc7a 100644 --- a/posthog/clickhouse/schema.py +++ b/posthog/clickhouse/schema.py @@ -1,6 +1,14 @@ # This file contains all CREATE TABLE queries, used to sync and test schema import re +from posthog.heatmaps.sql import ( + HEATMAPS_TABLE_SQL, + WRITABLE_HEATMAPS_TABLE_SQL, + DISTRIBUTED_HEATMAPS_TABLE_SQL, + KAFKA_HEATMAPS_TABLE_SQL, + HEATMAPS_TABLE_MV_SQL, +) + from posthog.clickhouse.dead_letter_queue import ( DEAD_LETTER_QUEUE_TABLE_SQL, DEAD_LETTER_QUEUE_TABLE_MV_SQL, @@ -117,6 +125,7 @@ SESSION_REPLAY_EVENTS_TABLE_SQL, CHANNEL_DEFINITION_TABLE_SQL, SESSIONS_TABLE_SQL, + HEATMAPS_TABLE_SQL, ) CREATE_DISTRIBUTED_TABLE_QUERIES = ( WRITABLE_EVENTS_TABLE_SQL, @@ -130,6 +139,8 @@ DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL, WRITABLE_SESSIONS_TABLE_SQL, DISTRIBUTED_SESSIONS_TABLE_SQL, + WRITABLE_HEATMAPS_TABLE_SQL, + DISTRIBUTED_HEATMAPS_TABLE_SQL, ) CREATE_KAFKA_TABLE_QUERIES = ( KAFKA_LOG_ENTRIES_TABLE_SQL, @@ -147,6 +158,7 @@ KAFKA_APP_METRICS_TABLE_SQL, KAFKA_PERFORMANCE_EVENTS_TABLE_SQL, KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL, + KAFKA_HEATMAPS_TABLE_SQL, ) CREATE_MV_TABLE_QUERIES = ( LOG_ENTRIES_TABLE_MV_SQL, @@ -165,6 +177,7 @@ PERFORMANCE_EVENTS_TABLE_MV_SQL, SESSION_REPLAY_EVENTS_TABLE_MV_SQL, SESSIONS_TABLE_MV_SQL, + HEATMAPS_TABLE_MV_SQL, ) CREATE_TABLE_QUERIES = ( diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py index e04c6bf7597f1..eec283f3b5ab2 100644 --- a/posthog/clickhouse/system_status.py +++ b/posthog/clickhouse/system_status.py @@ -1,6 +1,6 @@ from datetime import timedelta from os.path import abspath, dirname, join -from typing import Dict, Generator, List, Tuple +from collections.abc import Generator from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta @@ -27,7 +27,7 @@ CLICKHOUSE_FLAMEGRAPH_EXECUTABLE = abspath(join(dirname(__file__), "bin", "clickhouse-flamegraph")) FLAMEGRAPH_PL = abspath(join(dirname(__file__), "bin", "flamegraph.pl")) -SystemStatusRow = Dict +SystemStatusRow = dict def system_status() -> Generator[SystemStatusRow, None, None]: @@ -179,7 +179,7 @@ def is_alive() -> bool: return False -def dead_letter_queue_ratio() -> Tuple[bool, int]: +def dead_letter_queue_ratio() -> tuple[bool, int]: dead_letter_queue_events_last_day = get_dead_letter_queue_events_last_24h() total_events_ingested_last_day = sync_execute( @@ -199,14 +199,14 @@ def dead_letter_queue_ratio_ok_cached() -> bool: return dead_letter_queue_ratio()[0] -def get_clickhouse_running_queries() -> List[Dict]: +def get_clickhouse_running_queries() -> list[dict]: return query_with_columns( "SELECT elapsed as duration, query, * FROM system.processes ORDER BY duration DESC", columns_to_remove=["address", "initial_address", "elapsed"], ) -def get_clickhouse_slow_log() -> List[Dict]: +def get_clickhouse_slow_log() -> list[dict]: return query_with_columns( f""" SELECT query_duration_ms as duration, query, * diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index 5be2341abe4e7..851f3df420642 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -25,7 +25,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) @@ -108,7 +108,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) @@ -133,6 +133,31 @@ ''' # --- +# name: test_create_kafka_table_with_different_kafka_host[kafka_heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS kafka_heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String) + ) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_heatmap_events_test', 'group1', 'JSONEachRow') + + ''' +# --- # name: test_create_kafka_table_with_different_kafka_host[kafka_ingestion_warnings] ''' @@ -494,7 +519,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) , $group_0 VARCHAR COMMENT 'column_materializer::$group_0' , $group_1 VARCHAR COMMENT 'column_materializer::$group_1' @@ -648,6 +673,63 @@ ''' # --- +# name: test_create_table_query[heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String), + _timestamp DateTime, + _offset UInt64, + _partition UInt64 + ) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_heatmaps', cityHash64(concat(toString(team_id), '-', session_id, '-', toString(toDate(timestamp))))) + + ''' +# --- +# name: test_create_table_query[heatmaps_mv] + ''' + + CREATE MATERIALIZED VIEW IF NOT EXISTS heatmaps_mv ON CLUSTER 'posthog' + TO posthog_test.writable_heatmaps + AS SELECT + session_id, + team_id, + distinct_id, + timestamp, + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y, + -- stored so that in future we can support other resolutions + scale_factor, + viewport_width, + viewport_height, + -- some elements move when the page scrolls, others do not + pointer_target_fixed, + current_url, + type, + _timestamp, + _offset, + _partition + FROM posthog_test.kafka_heatmaps + + ''' +# --- # name: test_create_table_query[ingestion_warnings] ''' @@ -758,7 +840,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) @@ -783,6 +865,31 @@ ''' # --- +# name: test_create_table_query[kafka_heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS kafka_heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String) + ) ENGINE = Kafka('kafka:9092', 'clickhouse_heatmap_events_test', 'group1', 'JSONEachRow') + + ''' +# --- # name: test_create_table_query[kafka_ingestion_warnings] ''' @@ -1780,7 +1887,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) , $group_0 VARCHAR MATERIALIZED replaceRegexpAll(JSONExtractRaw(properties, '$group_0'), '^"|"$', '') COMMENT 'column_materializer::$group_0' , $group_1 VARCHAR MATERIALIZED replaceRegexpAll(JSONExtractRaw(properties, '$group_1'), '^"|"$', '') COMMENT 'column_materializer::$group_1' @@ -1811,6 +1918,49 @@ SAMPLE BY cityHash64(distinct_id) + ''' +# --- +# name: test_create_table_query[sharded_heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS sharded_heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String), + _timestamp DateTime, + _offset UInt64, + _partition UInt64 + ) ENGINE = ReplicatedMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.heatmaps', '{replica}') + + PARTITION BY toYYYYMM(timestamp) + -- almost always this is being queried by + -- * type, + -- * team_id, + -- * date range, + -- * URL (maybe matching wild cards), + -- * width + -- we'll almost never query this by session id + -- so from least to most cardinality that's + ORDER BY (type, team_id, toDate(timestamp), current_url, viewport_width) + -- I am purposefully not setting index granularity + -- the default is 8192, and we will be loading a lot of data + -- per query, we tend to copy this 512 around the place but + -- i don't think it applies here + ''' # --- # name: test_create_table_query[sharded_ingestion_warnings] @@ -2072,7 +2222,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) , _timestamp DateTime @@ -2083,6 +2233,34 @@ ''' # --- +# name: test_create_table_query[writable_heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS writable_heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String), + _timestamp DateTime, + _offset UInt64, + _partition UInt64 + ) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_heatmaps', cityHash64(concat(toString(team_id), '-', session_id, '-', toString(toDate(timestamp))))) + + ''' +# --- # name: test_create_table_query[writable_session_recording_events] ''' @@ -2600,7 +2778,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) , $group_0 VARCHAR MATERIALIZED replaceRegexpAll(JSONExtractRaw(properties, '$group_0'), '^"|"$', '') COMMENT 'column_materializer::$group_0' , $group_1 VARCHAR MATERIALIZED replaceRegexpAll(JSONExtractRaw(properties, '$group_1'), '^"|"$', '') COMMENT 'column_materializer::$group_1' @@ -2633,6 +2811,49 @@ ''' # --- +# name: test_create_table_query_replicated_and_storage[sharded_heatmaps] + ''' + + CREATE TABLE IF NOT EXISTS sharded_heatmaps ON CLUSTER 'posthog' + ( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String), + _timestamp DateTime, + _offset UInt64, + _partition UInt64 + ) ENGINE = ReplicatedMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.heatmaps', '{replica}') + + PARTITION BY toYYYYMM(timestamp) + -- almost always this is being queried by + -- * type, + -- * team_id, + -- * date range, + -- * URL (maybe matching wild cards), + -- * width + -- we'll almost never query this by session id + -- so from least to most cardinality that's + ORDER BY (type, team_id, toDate(timestamp), current_url, viewport_width) + -- I am purposefully not setting index granularity + -- the default is 8192, and we will be loading a lot of data + -- per query, we tend to copy this 512 around the place but + -- i don't think it applies here + + ''' +# --- # name: test_create_table_query_replicated_and_storage[sharded_ingestion_warnings] ''' diff --git a/posthog/conftest.py b/posthog/conftest.py index 40f3d828c41a1..8f3f233358ca8 100644 --- a/posthog/conftest.py +++ b/posthog/conftest.py @@ -1,4 +1,4 @@ -from typing import Any, Tuple +from typing import Any import pytest from django.conf import settings @@ -22,7 +22,7 @@ def create_clickhouse_tables(num_tables: int): ) # REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY! - CREATE_TABLE_QUERIES: Tuple[Any, ...] = CREATE_MERGETREE_TABLE_QUERIES + CREATE_DISTRIBUTED_TABLE_QUERIES + CREATE_TABLE_QUERIES: tuple[Any, ...] = CREATE_MERGETREE_TABLE_QUERIES + CREATE_DISTRIBUTED_TABLE_QUERIES # Check if all the tables have already been created if num_tables == len(CREATE_TABLE_QUERIES): @@ -69,6 +69,7 @@ def reset_clickhouse_tables(): ) from posthog.models.channel_type.sql import TRUNCATE_CHANNEL_DEFINITION_TABLE_SQL from posthog.models.sessions.sql import TRUNCATE_SESSIONS_TABLE_SQL + from posthog.heatmaps.sql import TRUNCATE_HEATMAPS_TABLE_SQL # REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY! TABLES_TO_CREATE_DROP = [ @@ -86,6 +87,7 @@ def reset_clickhouse_tables(): TRUNCATE_PERFORMANCE_EVENTS_TABLE_SQL, TRUNCATE_CHANNEL_DEFINITION_TABLE_SQL, TRUNCATE_SESSIONS_TABLE_SQL(), + TRUNCATE_HEATMAPS_TABLE_SQL(), ] run_clickhouse_statement_in_parallel(TABLES_TO_CREATE_DROP) diff --git a/posthog/decorators.py b/posthog/decorators.py index 955bb9d085195..bb012701033e2 100644 --- a/posthog/decorators.py +++ b/posthog/decorators.py @@ -1,6 +1,7 @@ from enum import Enum from functools import wraps -from typing import Any, Callable, Dict, List, TypeVar, Union, cast +from typing import Any, TypeVar, Union, cast +from collections.abc import Callable from django.urls import resolve from django.utils.timezone import now @@ -25,7 +26,7 @@ class CacheType(str, Enum): PATHS = "Path" -ResultPackage = Union[Dict[str, Any], List[Dict[str, Any]]] +ResultPackage = Union[dict[str, Any], list[dict[str, Any]]] T = TypeVar("T", bound=ResultPackage) U = TypeVar("U", bound=GenericViewSet) diff --git a/posthog/demo/legacy/data_generator.py b/posthog/demo/legacy/data_generator.py index ccc9f163e6c3c..d507e65c31c67 100644 --- a/posthog/demo/legacy/data_generator.py +++ b/posthog/demo/legacy/data_generator.py @@ -1,4 +1,3 @@ -from typing import Dict, List from uuid import uuid4 from posthog.models import Person, PersonDistinctId, Team @@ -13,9 +12,9 @@ def __init__(self, team: Team, n_days=14, n_people=100): self.team = team self.n_days = n_days self.n_people = n_people - self.events: List[Dict] = [] - self.snapshots: List[Dict] = [] - self.distinct_ids: List[str] = [] + self.events: list[dict] = [] + self.snapshots: list[dict] = [] + self.distinct_ids: list[str] = [] def create(self, dashboards=True): self.create_missing_events_and_properties() diff --git a/posthog/demo/legacy/web_data_generator.py b/posthog/demo/legacy/web_data_generator.py index aa0836d3db732..811270092250f 100644 --- a/posthog/demo/legacy/web_data_generator.py +++ b/posthog/demo/legacy/web_data_generator.py @@ -1,7 +1,7 @@ import json import random from datetime import timedelta -from typing import Any, Dict, List +from typing import Any from dateutil.relativedelta import relativedelta from django.utils.timezone import now @@ -199,11 +199,11 @@ def make_person(self, index): return super().make_person(index) @cached_property - def demo_data(self) -> List[Dict[str, Any]]: - with open(get_absolute_path("demo/legacy/demo_people.json"), "r") as demo_data_file: + def demo_data(self) -> list[dict[str, Any]]: + with open(get_absolute_path("demo/legacy/demo_people.json")) as demo_data_file: return json.load(demo_data_file) @cached_property - def demo_recording(self) -> Dict[str, Any]: - with open(get_absolute_path("demo/legacy/hogflix_session_recording.json"), "r") as demo_session_file: + def demo_recording(self) -> dict[str, Any]: + with open(get_absolute_path("demo/legacy/hogflix_session_recording.json")) as demo_session_file: return json.load(demo_session_file) diff --git a/posthog/demo/matrix/manager.py b/posthog/demo/matrix/manager.py index 507ea09581d51..ce073a6126f8a 100644 --- a/posthog/demo/matrix/manager.py +++ b/posthog/demo/matrix/manager.py @@ -1,7 +1,7 @@ import datetime as dt import json from time import sleep -from typing import Any, Dict, List, Literal, Optional, Tuple, cast +from typing import Any, Literal, Optional, cast from django.conf import settings from django.core import exceptions @@ -55,13 +55,13 @@ def ensure_account_and_save( password: Optional[str] = None, is_staff: bool = False, disallow_collision: bool = False, - ) -> Tuple[Organization, Team, User]: + ) -> tuple[Organization, Team, User]: """If there's an email collision in signup in the demo environment, we treat it as a login.""" existing_user: Optional[User] = User.objects.filter(email=email).first() if existing_user is None: if self.print_steps: print(f"Creating demo organization, project, and user...") - organization_kwargs: Dict[str, Any] = {"name": organization_name} + organization_kwargs: dict[str, Any] = {"name": organization_name} if settings.DEMO: organization_kwargs["plugins_access_level"] = Organization.PluginsAccessLevel.INSTALL with transaction.atomic(): @@ -241,7 +241,7 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id ["team_id", "is_deleted", "_timestamp", "_offset", "_partition"], {"id": "uuid"}, ) - bulk_persons: Dict[str, Person] = {} + bulk_persons: dict[str, Person] = {} for row in clickhouse_persons: properties = json.loads(row.pop("properties", "{}")) bulk_persons[row["uuid"]] = Person(team_id=target_team_id, properties=properties, **row) @@ -317,7 +317,7 @@ def _save_sim_person(self, team: Team, subject: SimPerson): self._save_future_sim_events(team, subject.future_events) @staticmethod - def _save_past_sim_events(team: Team, events: List[SimEvent]): + def _save_past_sim_events(team: Team, events: list[SimEvent]): """Past events are saved into ClickHouse right away (via Kafka of course).""" from posthog.models.event.util import create_event @@ -346,7 +346,7 @@ def _save_past_sim_events(team: Team, events: List[SimEvent]): ) @staticmethod - def _save_future_sim_events(team: Team, events: List[SimEvent]): + def _save_future_sim_events(team: Team, events: list[SimEvent]): """Future events are not saved immediately, instead they're scheduled for ingestion via event buffer.""" # TODO: This used the plugin server's Graphile Worker-based event buffer, but the event buffer is no more @@ -356,7 +356,7 @@ def _save_sim_group( team: Team, type_index: Literal[0, 1, 2, 3, 4], key: str, - properties: Dict[str, Any], + properties: dict[str, Any], timestamp: dt.datetime, ): from posthog.models.group.util import raw_create_group_ch diff --git a/posthog/demo/matrix/matrix.py b/posthog/demo/matrix/matrix.py index c2d3a5f2eb4f4..382e70d85b78d 100644 --- a/posthog/demo/matrix/matrix.py +++ b/posthog/demo/matrix/matrix.py @@ -3,13 +3,7 @@ from collections import defaultdict, deque from typing import ( Any, - DefaultDict, - Deque, - Dict, - List, Optional, - Set, - Type, ) import mimesis @@ -38,7 +32,7 @@ class Cluster(ABC): end: timezone.datetime # End of the simulation (might be same as now or later) radius: int - people_matrix: List[List[SimPerson]] # Grid containing all people in the cluster + people_matrix: list[list[SimPerson]] # Grid containing all people in the cluster random: mimesis.random.Random properties_provider: PropertiesProvider @@ -52,7 +46,7 @@ class Cluster(ABC): _simulation_time: dt.datetime _reached_now: bool - _scheduled_effects: Deque[Effect] + _scheduled_effects: deque[Effect] def __init__(self, *, index: int, matrix: "Matrix") -> None: self.index = index @@ -98,7 +92,7 @@ def initiation_distribution(self) -> float: """Return a value between 0 and 1 determining how far into the overall simulation should this cluster be initiated.""" return self.random.random() - def list_neighbors(self, person: SimPerson) -> List[SimPerson]: + def list_neighbors(self, person: SimPerson) -> list[SimPerson]: """Return a list of neighbors of a person at (x, y).""" x, y = person.x, person.y neighbors = [] @@ -141,7 +135,7 @@ def _apply_due_effects(self, until: dt.datetime): while self._scheduled_effects and self._scheduled_effects[0].timestamp <= until: effect = self._scheduled_effects.popleft() self.simulation_time = effect.timestamp - resolved_targets: List[SimPerson] + resolved_targets: list[SimPerson] if effect.target == Effect.Target.SELF: resolved_targets = [effect.source] elif effect.target == Effect.Target.ALL_NEIGHBORS: @@ -155,7 +149,7 @@ def _apply_due_effects(self, until: dt.datetime): effect.callback(target) @property - def people(self) -> Set[SimPerson]: + def people(self) -> set[SimPerson]: return {person for row in self.people_matrix for person in row} @property @@ -198,17 +192,17 @@ class Matrix(ABC): """ PRODUCT_NAME: str - CLUSTER_CLASS: Type[Cluster] - PERSON_CLASS: Type[SimPerson] + CLUSTER_CLASS: type[Cluster] + PERSON_CLASS: type[SimPerson] start: dt.datetime now: dt.datetime end: dt.datetime group_type_index_offset: int # A mapping of groups. The first key is the group type, the second key is the group key. - groups: DefaultDict[str, DefaultDict[str, Dict[str, Any]]] - distinct_id_to_person: Dict[str, SimPerson] - clusters: List[Cluster] + groups: defaultdict[str, defaultdict[str, dict[str, Any]]] + distinct_id_to_person: dict[str, SimPerson] + clusters: list[Cluster] is_complete: Optional[bool] server_client: SimServerClient @@ -257,7 +251,7 @@ def __init__( self.is_complete = None @property - def people(self) -> List[SimPerson]: + def people(self) -> list[SimPerson]: return [person for cluster in self.clusters for person in cluster.people] @abstractmethod @@ -273,7 +267,7 @@ def simulate(self): cluster.simulate() self.is_complete = True - def _update_group(self, group_type: str, group_key: str, set_properties: Dict[str, Any]): + def _update_group(self, group_type: str, group_key: str, set_properties: dict[str, Any]): if len(self.groups) == GROUP_TYPES_LIMIT and group_type not in self.groups: raise Exception(f"Cannot add group type {group_type} to simulation, limit of {GROUP_TYPES_LIMIT} reached!") self.groups[group_type][group_key].update(set_properties) diff --git a/posthog/demo/matrix/models.py b/posthog/demo/matrix/models.py index e1698d7dd7b3b..c09fcae8cbb03 100644 --- a/posthog/demo/matrix/models.py +++ b/posthog/demo/matrix/models.py @@ -8,17 +8,12 @@ from typing import ( TYPE_CHECKING, Any, - Callable, - DefaultDict, - Dict, Generic, - Iterable, - List, Literal, Optional, - Set, TypeVar, ) +from collections.abc import Callable, Iterable from urllib.parse import urlparse, parse_qs from uuid import UUID @@ -77,7 +72,7 @@ class Target(Enum): "$referrer", } -Properties = Dict[str, Any] +Properties = dict[str, Any] class SimSessionIntent(Enum): @@ -330,23 +325,23 @@ class SimPerson(ABC): timezone: str # Exposed state - present - past_events: List[SimEvent] - future_events: List[SimEvent] + past_events: list[SimEvent] + future_events: list[SimEvent] # Exposed state - at `now` - distinct_ids_at_now: Set[str] + distinct_ids_at_now: set[str] properties_at_now: Properties first_seen_at: Optional[dt.datetime] last_seen_at: Optional[dt.datetime] # Internal state active_client: SimBrowserClient # Client being used by person - all_time_pageview_counts: DefaultDict[str, int] # Pageview count per URL across all time - session_pageview_counts: DefaultDict[str, int] # Pageview count per URL across the ongoing session + all_time_pageview_counts: defaultdict[str, int] # Pageview count per URL across all time + session_pageview_counts: defaultdict[str, int] # Pageview count per URL across the ongoing session active_session_intent: Optional[SimSessionIntent] wake_up_by: dt.datetime - _groups: Dict[str, str] - _distinct_ids: Set[str] + _groups: dict[str, str] + _distinct_ids: set[str] _properties: Properties def __init__(self, *, kernel: bool, cluster: "Cluster", x: int, y: int): @@ -397,7 +392,7 @@ def attempt_session(self): # Abstract methods - def decide_feature_flags(self) -> Dict[str, Any]: + def decide_feature_flags(self) -> dict[str, Any]: """Determine feature flags in force at present.""" return {} diff --git a/posthog/demo/matrix/randomization.py b/posthog/demo/matrix/randomization.py index ca6bcfd588640..d017c295321dc 100644 --- a/posthog/demo/matrix/randomization.py +++ b/posthog/demo/matrix/randomization.py @@ -1,10 +1,9 @@ from enum import Enum -from typing import Dict, List, Tuple import mimesis import mimesis.random -WeightedPool = Tuple[List[str], List[int]] +WeightedPool = tuple[list[str], list[int]] class Industry(str, Enum): @@ -27,12 +26,12 @@ class PropertiesProvider(mimesis.BaseProvider): ["Desktop", "Mobile", "Tablet"], [8, 1, 1], ) - OS_WEIGHTED_POOLS: Dict[str, WeightedPool] = { + OS_WEIGHTED_POOLS: dict[str, WeightedPool] = { "Desktop": (["Windows", "Mac OS X", "Linux", "Chrome OS"], [18, 16, 7, 1]), "Mobile": (["iOS", "Android"], [1, 1]), "Tablet": (["iOS", "Android"], [1, 1]), } - BROWSER_WEIGHTED_POOLS: Dict[str, WeightedPool] = { + BROWSER_WEIGHTED_POOLS: dict[str, WeightedPool] = { "Windows": ( ["Chrome", "Firefox", "Opera", "Microsoft Edge", "Internet Explorer"], [12, 4, 2, 1, 1], @@ -65,7 +64,7 @@ class PropertiesProvider(mimesis.BaseProvider): random: mimesis.random.Random - def device_type_os_browser(self) -> Tuple[str, str, str]: + def device_type_os_browser(self) -> tuple[str, str, str]: device_type_pool, device_type_weights = self.DEVICE_TYPE_WEIGHTED_POOL device_type = self.random.choices(device_type_pool, device_type_weights)[0] os_pool, os_weights = self.OS_WEIGHTED_POOLS[device_type] diff --git a/posthog/demo/matrix/taxonomy_inference.py b/posthog/demo/matrix/taxonomy_inference.py index cc5686de96b0b..e05dc67f33368 100644 --- a/posthog/demo/matrix/taxonomy_inference.py +++ b/posthog/demo/matrix/taxonomy_inference.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List, Optional, Tuple +from typing import Optional from django.utils import timezone @@ -9,7 +9,7 @@ from posthog.models.property_definition import PropertyType -def infer_taxonomy_for_team(team_id: int) -> Tuple[int, int, int]: +def infer_taxonomy_for_team(team_id: int) -> tuple[int, int, int]: """Infer event and property definitions based on ClickHouse data. In production, the plugin server is responsible for this - but in demo data we insert directly to ClickHouse. @@ -55,13 +55,13 @@ def infer_taxonomy_for_team(team_id: int) -> Tuple[int, int, int]: return len(event_definitions), len(property_definitions), len(event_properties) -def _get_events_last_seen_at(team_id: int) -> Dict[str, timezone.datetime]: +def _get_events_last_seen_at(team_id: int) -> dict[str, timezone.datetime]: from posthog.client import sync_execute return dict(sync_execute(_GET_EVENTS_LAST_SEEN_AT, {"team_id": team_id})) -def _get_property_types(team_id: int) -> Dict[str, Optional[PropertyType]]: +def _get_property_types(team_id: int) -> dict[str, Optional[PropertyType]]: """Determine property types based on ClickHouse data.""" from posthog.client import sync_execute @@ -87,14 +87,14 @@ def _infer_property_type(sample_json_value: str) -> Optional[PropertyType]: parsed_value = json.loads(sample_json_value) if isinstance(parsed_value, bool): return PropertyType.Boolean - if isinstance(parsed_value, (float, int)): + if isinstance(parsed_value, float | int): return PropertyType.Numeric if isinstance(parsed_value, str): return PropertyType.String return None -def _get_event_property_pairs(team_id: int) -> List[Tuple[str, str]]: +def _get_event_property_pairs(team_id: int) -> list[tuple[str, str]]: """Determine which properties have been since with which events based on ClickHouse data.""" from posthog.client import sync_execute diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py index 324dc6b473762..dd694f64aac41 100644 --- a/posthog/demo/products/hedgebox/models.py +++ b/posthog/demo/products/hedgebox/models.py @@ -5,11 +5,7 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - List, Optional, - Set, - Tuple, cast, ) from urllib.parse import urlencode, urlparse, urlunparse @@ -114,9 +110,9 @@ def __hash__(self) -> int: class HedgeboxAccount: id: str created_at: dt.datetime - team_members: Set["HedgeboxPerson"] + team_members: set["HedgeboxPerson"] plan: HedgeboxPlan - files: Set[HedgeboxFile] = field(default_factory=set) + files: set[HedgeboxFile] = field(default_factory=set) was_billing_scheduled: bool = field(default=False) @property @@ -247,7 +243,7 @@ def has_signed_up(self) -> bool: # Abstract methods - def decide_feature_flags(self) -> Dict[str, Any]: + def decide_feature_flags(self) -> dict[str, Any]: if ( self.cluster.simulation_time >= self.cluster.matrix.new_signup_page_experiment_start and self.cluster.simulation_time < self.cluster.matrix.new_signup_page_experiment_end @@ -292,7 +288,7 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]: # Very low affinity users aren't interested # Non-kernel business users can't log in or sign up return None - possible_intents_with_weights: List[Tuple[HedgeboxSessionIntent, float]] = [] + possible_intents_with_weights: list[tuple[HedgeboxSessionIntent, float]] = [] if self.invite_to_use_id: possible_intents_with_weights.append((HedgeboxSessionIntent.JOIN_TEAM, 1)) elif self.file_to_view: @@ -342,8 +338,8 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]: if possible_intents_with_weights: possible_intents, weights = zip(*possible_intents_with_weights) return self.cluster.random.choices( - cast(Tuple[HedgeboxSessionIntent], possible_intents), - cast(Tuple[float], weights), + cast(tuple[HedgeboxSessionIntent], possible_intents), + cast(tuple[float], weights), )[0] else: return None @@ -807,10 +803,10 @@ def log_out(self): self.advance_timer(self.cluster.random.uniform(0.1, 0.2)) @property - def invitable_neighbors(self) -> List["HedgeboxPerson"]: + def invitable_neighbors(self) -> list["HedgeboxPerson"]: return [ neighbor - for neighbor in cast(List[HedgeboxPerson], self.cluster.list_neighbors(self)) + for neighbor in cast(list[HedgeboxPerson], self.cluster.list_neighbors(self)) if neighbor.is_invitable ] diff --git a/posthog/email.py b/posthog/email.py index 61edb7ae593d2..3590723f7084b 100644 --- a/posthog/email.py +++ b/posthog/email.py @@ -1,5 +1,5 @@ import sys -from typing import Dict, List, Optional +from typing import Optional import lxml import toronado @@ -54,9 +54,9 @@ def is_email_available(with_absolute_urls: bool = False) -> bool: @shared_task(**EMAIL_TASK_KWARGS) def _send_email( campaign_key: str, - to: List[Dict[str, str]], + to: list[dict[str, str]], subject: str, - headers: Dict, + headers: dict, txt_body: str = "", html_body: str = "", reply_to: Optional[str] = None, @@ -65,8 +65,8 @@ def _send_email( Sends built email message asynchronously. """ - messages: List = [] - records: List = [] + messages: list = [] + records: list = [] with transaction.atomic(): for dest in to: @@ -135,8 +135,8 @@ def __init__( campaign_key: str, subject: str, template_name: str, - template_context: Optional[Dict] = None, - headers: Optional[Dict] = None, + template_context: Optional[dict] = None, + headers: Optional[dict] = None, reply_to: Optional[str] = None, ): if template_context is None: @@ -153,7 +153,7 @@ def __init__( self.html_body = inline_css(template.render(template_context)) self.txt_body = "" self.headers = headers if headers else {} - self.to: List[Dict[str, str]] = [] + self.to: list[dict[str, str]] = [] self.reply_to = reply_to def add_recipient(self, email: str, name: Optional[str] = None) -> None: diff --git a/posthog/errors.py b/posthog/errors.py index d028522a599c0..70b3d46dd3c31 100644 --- a/posthog/errors.py +++ b/posthog/errors.py @@ -1,6 +1,6 @@ from dataclasses import dataclass import re -from typing import Dict, Optional +from typing import Optional from clickhouse_driver.errors import ServerException @@ -91,7 +91,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta: # # Remember to add back the `user_safe` args though! CLICKHOUSE_UNKNOWN_EXCEPTION = ErrorCodeMeta("UNKNOWN_EXCEPTION") -CLICKHOUSE_ERROR_CODE_LOOKUP: Dict[int, ErrorCodeMeta] = { +CLICKHOUSE_ERROR_CODE_LOOKUP: dict[int, ErrorCodeMeta] = { 0: ErrorCodeMeta("OK"), 1: ErrorCodeMeta("UNSUPPORTED_METHOD"), 2: ErrorCodeMeta("UNSUPPORTED_PARAMETER"), diff --git a/posthog/event_usage.py b/posthog/event_usage.py index ae8432c6b2731..cf74b59936365 100644 --- a/posthog/event_usage.py +++ b/posthog/event_usage.py @@ -2,7 +2,7 @@ Module to centralize event reporting on the server-side. """ -from typing import Dict, List, Optional +from typing import Optional import posthoganalytics @@ -107,7 +107,7 @@ def report_user_logged_in( ) -def report_user_updated(user: User, updated_attrs: List[str]) -> None: +def report_user_updated(user: User, updated_attrs: list[str]) -> None: """ Reports a user has been updated. This includes current_team, current_organization & password. """ @@ -217,7 +217,7 @@ def report_user_organization_membership_level_changed( ) -def report_user_action(user: User, event: str, properties: Optional[Dict] = None, team: Optional[Team] = None): +def report_user_action(user: User, event: str, properties: Optional[dict] = None, team: Optional[Team] = None): if properties is None: properties = {} posthoganalytics.capture( @@ -254,8 +254,8 @@ def groups(organization: Optional[Organization] = None, team: Optional[Team] = N def report_team_action( team: Team, event: str, - properties: Optional[Dict] = None, - group_properties: Optional[Dict] = None, + properties: Optional[dict] = None, + group_properties: Optional[dict] = None, ): """ For capturing events where it is unclear which user was the core actor we can use the team instead @@ -271,8 +271,8 @@ def report_team_action( def report_organization_action( organization: Organization, event: str, - properties: Optional[Dict] = None, - group_properties: Optional[Dict] = None, + properties: Optional[dict] = None, + group_properties: Optional[dict] = None, ): """ For capturing events where it is unclear which user was the core actor we can use the organization instead diff --git a/posthog/filters.py b/posthog/filters.py index ac098dea92c68..911edcf4596d7 100644 --- a/posthog/filters.py +++ b/posthog/filters.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple, TypeVar, Union +from typing import Optional, TypeVar, Union from django.db import models from django.db.models import Q @@ -19,7 +19,7 @@ class TermSearchFilterBackend(filters.BaseFilterBackend): # The URL query parameter used for the search. search_param = settings.api_settings.SEARCH_PARAM - def get_search_fields(self, view: APIView) -> Optional[List[str]]: + def get_search_fields(self, view: APIView) -> Optional[list[str]]: """ Search fields are obtained from the view. """ @@ -59,10 +59,10 @@ def filter_queryset( def term_search_filter_sql( - search_fields: List[str], + search_fields: list[str], search_terms: Optional[str] = "", search_extra: Optional[str] = "", -) -> Tuple[str, dict]: +) -> tuple[str, dict]: if not search_fields or not search_terms: return "", {} diff --git a/posthog/gzip_middleware.py b/posthog/gzip_middleware.py index 701f31b5dbe3d..cfd57eea0050b 100644 --- a/posthog/gzip_middleware.py +++ b/posthog/gzip_middleware.py @@ -1,5 +1,4 @@ import re -from typing import List from django.conf import settings from django.middleware.gzip import GZipMiddleware @@ -9,7 +8,7 @@ class InvalidGZipAllowList(Exception): pass -def allowed_path(path: str, allowed_paths: List) -> bool: +def allowed_path(path: str, allowed_paths: list) -> bool: return any(pattern.search(path) for pattern in allowed_paths) diff --git a/posthog/health.py b/posthog/health.py index 1ca35d6fe7308..72012928feb4e 100644 --- a/posthog/health.py +++ b/posthog/health.py @@ -17,7 +17,8 @@ # changes to them are deliberate, as otherwise we could introduce unexpected # behaviour in deployments. -from typing import Callable, Dict, List, Literal, cast, get_args +from typing import Literal, cast, get_args +from collections.abc import Callable from django.core.cache import cache from django.db import DEFAULT_DB_ALIAS @@ -35,7 +36,7 @@ ServiceRole = Literal["events", "web", "worker", "decide"] -service_dependencies: Dict[ServiceRole, List[str]] = { +service_dependencies: dict[ServiceRole, list[str]] = { "events": ["http", "kafka_connected"], "web": [ "http", @@ -66,7 +67,7 @@ # if atleast one of the checks is True, then the service is considered healthy # for the given role -service_conditional_dependencies: Dict[ServiceRole, List[str]] = { +service_conditional_dependencies: dict[ServiceRole, list[str]] = { "decide": ["cache", "postgres_flags"], } @@ -110,7 +111,7 @@ def readyz(request: HttpRequest): if role and role not in get_args(ServiceRole): return JsonResponse({"error": "InvalidRole"}, status=400) - available_checks: Dict[str, Callable] = { + available_checks: dict[str, Callable] = { "clickhouse": is_clickhouse_connected, "postgres": is_postgres_connected, "postgres_flags": lambda: is_postgres_connected(DATABASE_FOR_FLAG_MATCHING), diff --git a/posthog/heatmaps/heatmaps_api.py b/posthog/heatmaps/heatmaps_api.py new file mode 100644 index 0000000000000..e3fa68b5b4db3 --- /dev/null +++ b/posthog/heatmaps/heatmaps_api.py @@ -0,0 +1,215 @@ +from datetime import datetime, date +from typing import Any, List # noqa: UP035 + +from rest_framework import viewsets, request, response, serializers, status + +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.auth import TemporaryTokenAuthentication +from posthog.hogql import ast +from posthog.hogql.ast import Constant +from posthog.hogql.base import Expr +from posthog.hogql.constants import LimitContext +from posthog.hogql.context import HogQLContext +from posthog.hogql.parser import parse_expr, parse_select +from posthog.hogql.query import execute_hogql_query +from posthog.rate_limit import ClickHouseSustainedRateThrottle, ClickHouseBurstRateThrottle +from posthog.schema import HogQLQueryResponse +from posthog.utils import relative_date_parse_with_delta_mapping + +DEFAULT_QUERY = """ + select pointer_target_fixed, pointer_relative_x, client_y, {aggregation_count} + from ( + select + distinct_id, + pointer_target_fixed, + round((x / viewport_width), 2) as pointer_relative_x, + y * scale_factor as client_y + from heatmaps + where {predicates} + ) + group by `pointer_target_fixed`, pointer_relative_x, client_y + """ + +SCROLL_DEPTH_QUERY = """ +SELECT + bucket, + cnt as bucket_count, + sum(cnt) OVER (ORDER BY bucket DESC) AS cumulative_count +FROM ( + SELECT + intDiv(scroll_y, 100) * 100 as bucket, + {aggregation_count} as cnt + FROM ( + SELECT + distinct_id, (y + viewport_height) * scale_factor as scroll_y + FROM heatmaps + WHERE {predicates} + ) + GROUP BY bucket +) +ORDER BY bucket +""" + + +class HeatmapsRequestSerializer(serializers.Serializer): + viewport_width_min = serializers.IntegerField(required=False) + viewport_width_max = serializers.IntegerField(required=False) + type = serializers.CharField(required=False, default="click") + date_from = serializers.CharField(required=False, default="-7d") + date_to = serializers.DateField(required=False) + url_exact = serializers.CharField(required=False) + url_pattern = serializers.CharField(required=False) + aggregation = serializers.ChoiceField( + required=False, + choices=["unique_visitors", "total_count"], + help_text="How to aggregate the response", + default="total_count", + ) + + def validate_date_from(self, value) -> date: + try: + if isinstance(value, str): + parsed_date, _, _ = relative_date_parse_with_delta_mapping(value, self.context["team"].timezone_info) + return parsed_date.date() + if isinstance(value, datetime): + return value.date() + if isinstance(value, date): + return value + else: + raise serializers.ValidationError("Invalid date_from provided: {}".format(value)) + except Exception: + raise serializers.ValidationError("Error parsing provided date_from: {}".format(value)) + + def validate(self, values) -> dict: + url_exact = values.get("url_exact", None) + url_pattern = values.get("url_pattern", None) + if isinstance(url_exact, str) and isinstance(url_pattern, str): + if url_exact == url_pattern: + values.pop("url_pattern") + else: + values.pop("url_exact") + + return values + + +class HeatmapResponseItemSerializer(serializers.Serializer): + count = serializers.IntegerField(required=True) + pointer_y = serializers.IntegerField(required=True) + pointer_relative_x = serializers.FloatField(required=True) + pointer_target_fixed = serializers.BooleanField(required=True) + + +class HeatmapsResponseSerializer(serializers.Serializer): + results = HeatmapResponseItemSerializer(many=True) + + +class HeatmapScrollDepthResponseItemSerializer(serializers.Serializer): + cumulative_count = serializers.IntegerField(required=True) + bucket_count = serializers.IntegerField(required=True) + scroll_depth_bucket = serializers.IntegerField(required=True) + + +class HeatmapsScrollDepthResponseSerializer(serializers.Serializer): + results = HeatmapScrollDepthResponseItemSerializer(many=True) + + +class HeatmapViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet): + scope_object = "INTERNAL" + + throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] + serializer_class = HeatmapsResponseSerializer + + authentication_classes = [TemporaryTokenAuthentication] + + def get_queryset(self): + return None + + def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: + request_serializer = HeatmapsRequestSerializer(data=request.query_params, context={"team": self.team}) + request_serializer.is_valid(raise_exception=True) + + aggregation = request_serializer.validated_data.pop("aggregation") + placeholders: dict[str, Expr] = {k: Constant(value=v) for k, v in request_serializer.validated_data.items()} + is_scrolldepth_query = placeholders.get("type", None) == Constant(value="scrolldepth") + + raw_query = SCROLL_DEPTH_QUERY if is_scrolldepth_query else DEFAULT_QUERY + + aggregation_count = self._choose_aggregation(aggregation, is_scrolldepth_query) + exprs = self._predicate_expressions(placeholders) + + stmt = parse_select(raw_query, {"aggregation_count": aggregation_count, "predicates": ast.And(exprs=exprs)}) + context = HogQLContext(team_id=self.team.pk, limit_top_select=False) + results = execute_hogql_query(query=stmt, team=self.team, limit_context=LimitContext.HEATMAPS, context=context) + + if is_scrolldepth_query: + return self._return_scroll_depth_response(results) + else: + return self._return_heatmap_coordinates_response(results) + + def _choose_aggregation(self, aggregation, is_scrolldepth_query): + aggregation_value = "count(*) as cnt" if aggregation == "total_count" else "count(distinct distinct_id) as cnt" + if is_scrolldepth_query: + aggregation_value = "count(*)" if aggregation == "total_count" else "count(distinct distinct_id)" + aggregation_count = parse_expr(aggregation_value) + return aggregation_count + + @staticmethod + def _predicate_expressions(placeholders: dict[str, Expr]) -> List[ast.Expr]: # noqa: UP006 + predicate_expressions: list[ast.Expr] = [] + + predicate_mapping: dict[str, str] = { + # should always have values + "date_from": "timestamp >= {date_from}", + "type": "`type` = {type}", + # optional + "date_to": "timestamp <= {date_to} + interval 1 day", + "viewport_width_min": "viewport_width >= round({viewport_width_min} / 16)", + "viewport_width_max": "viewport_width <= round({viewport_width_max} / 16)", + "url_exact": "current_url = {url_exact}", + "url_pattern": "match(current_url, {url_pattern})", + } + + for predicate_key in placeholders.keys(): + predicate_expressions.append( + parse_expr(predicate_mapping[predicate_key], {predicate_key: placeholders[predicate_key]}) + ) + + if len(predicate_expressions) == 0: + raise serializers.ValidationError("must always generate some filter conditions") + + return predicate_expressions + + @staticmethod + def _return_heatmap_coordinates_response(query_response: HogQLQueryResponse) -> response.Response: + data = [ + { + "pointer_target_fixed": item[0], + "pointer_relative_x": item[1], + "pointer_y": item[2], + "count": item[3], + } + for item in query_response.results or [] + ] + + response_serializer = HeatmapsResponseSerializer(data={"results": data}) + response_serializer.is_valid(raise_exception=True) + return response.Response(response_serializer.data, status=status.HTTP_200_OK) + + @staticmethod + def _return_scroll_depth_response(query_response: HogQLQueryResponse) -> response.Response: + data = [ + { + "scroll_depth_bucket": item[0], + "bucket_count": item[1], + "cumulative_count": item[2], + } + for item in query_response.results or [] + ] + + response_serializer = HeatmapsScrollDepthResponseSerializer(data={"results": data}) + response_serializer.is_valid(raise_exception=True) + return response.Response(response_serializer.data, status=status.HTTP_200_OK) + + +class LegacyHeatmapViewSet(HeatmapViewSet): + derive_current_team_from_user_only = True diff --git a/posthog/heatmaps/sql.py b/posthog/heatmaps/sql.py new file mode 100644 index 0000000000000..778237e3b484a --- /dev/null +++ b/posthog/heatmaps/sql.py @@ -0,0 +1,166 @@ +from django.conf import settings + +from posthog.clickhouse.kafka_engine import kafka_engine +from posthog.clickhouse.table_engines import ( + Distributed, + ReplicationScheme, + MergeTreeEngine, +) +from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_HEATMAP_EVENTS + +HEATMAPS_DATA_TABLE = lambda: "sharded_heatmaps" + + +""" +We intend to send specific $heatmap events to build a heatmap instead of building from autocapture like the click map +We'll be storing individual clicks per url/team/session +And we'll be querying for those clicks at day level of granularity +And we'll be querying by URL exact or wildcard match +And we'll _sometimes_ be querying by width + +We _could_ aggregate this data by day, but we're hoping this will be small/fast enough not to bother +And can always add a materialized view for day (and week?) granularity driven by this data if needed + +We only add session_id so that we could offer example sessions for particular clicked areas in the toolbar +""" + +KAFKA_HEATMAPS_TABLE_BASE_SQL = """ +CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}' +( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String) +) ENGINE = {engine} +""" + +HEATMAPS_TABLE_BASE_SQL = """ +CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}' +( + session_id VARCHAR, + team_id Int64, + distinct_id VARCHAR, + timestamp DateTime64(6, 'UTC'), + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x Int16, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y Int16, + -- stored so that in future we can support other resolutions + scale_factor Int16, + viewport_width Int16, + viewport_height Int16, + -- some elements move when the page scrolls, others do not + pointer_target_fixed Bool, + current_url VARCHAR, + type LowCardinality(String), + _timestamp DateTime, + _offset UInt64, + _partition UInt64 +) ENGINE = {engine} +""" + +HEATMAPS_DATA_TABLE_ENGINE = lambda: MergeTreeEngine("heatmaps", replication_scheme=ReplicationScheme.SHARDED) + +HEATMAPS_TABLE_SQL = lambda: ( + HEATMAPS_TABLE_BASE_SQL + + """ + PARTITION BY toYYYYMM(timestamp) + -- almost always this is being queried by + -- * type, + -- * team_id, + -- * date range, + -- * URL (maybe matching wild cards), + -- * width + -- we'll almost never query this by session id + -- so from least to most cardinality that's + ORDER BY (type, team_id, toDate(timestamp), current_url, viewport_width) +-- I am purposefully not setting index granularity +-- the default is 8192, and we will be loading a lot of data +-- per query, we tend to copy this 512 around the place but +-- i don't think it applies here +""" +).format( + table_name=HEATMAPS_DATA_TABLE(), + cluster=settings.CLICKHOUSE_CLUSTER, + engine=HEATMAPS_DATA_TABLE_ENGINE(), +) + +KAFKA_HEATMAPS_TABLE_SQL = lambda: KAFKA_HEATMAPS_TABLE_BASE_SQL.format( + table_name="kafka_heatmaps", + cluster=settings.CLICKHOUSE_CLUSTER, + engine=kafka_engine(topic=KAFKA_CLICKHOUSE_HEATMAP_EVENTS), +) + +HEATMAPS_TABLE_MV_SQL = ( + lambda: """ +CREATE MATERIALIZED VIEW IF NOT EXISTS heatmaps_mv ON CLUSTER '{cluster}' +TO {database}.{target_table} +AS SELECT + session_id, + team_id, + distinct_id, + timestamp, + -- x is the x with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + x, + -- y is the y with resolution applied, the resolution converts high fidelity mouse positions into an NxN grid + y, + -- stored so that in future we can support other resolutions + scale_factor, + viewport_width, + viewport_height, + -- some elements move when the page scrolls, others do not + pointer_target_fixed, + current_url, + type, + _timestamp, + _offset, + _partition +FROM {database}.kafka_heatmaps +""".format( + target_table="writable_heatmaps", + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) +) + +# Distributed engine tables are only created if CLICKHOUSE_REPLICATED + +# This table is responsible for writing to sharded_heatmaps based on a sharding key. +WRITABLE_HEATMAPS_TABLE_SQL = lambda: HEATMAPS_TABLE_BASE_SQL.format( + table_name="writable_heatmaps", + cluster=settings.CLICKHOUSE_CLUSTER, + engine=Distributed( + data_table=HEATMAPS_DATA_TABLE(), + sharding_key="cityHash64(concat(toString(team_id), '-', session_id, '-', toString(toDate(timestamp))))", + ), +) + +# This table is responsible for reading from heatmaps on a cluster setting +DISTRIBUTED_HEATMAPS_TABLE_SQL = lambda: HEATMAPS_TABLE_BASE_SQL.format( + table_name="heatmaps", + cluster=settings.CLICKHOUSE_CLUSTER, + engine=Distributed( + data_table=HEATMAPS_DATA_TABLE(), + sharding_key="cityHash64(concat(toString(team_id), '-', session_id, '-', toString(toDate(timestamp))))", + ), +) + +DROP_HEATMAPS_TABLE_SQL = lambda: ( + f"DROP TABLE IF EXISTS {HEATMAPS_DATA_TABLE()} ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'" +) + +TRUNCATE_HEATMAPS_TABLE_SQL = lambda: ( + f"TRUNCATE TABLE IF EXISTS {HEATMAPS_DATA_TABLE()} ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'" +) diff --git a/posthog/heatmaps/test/__snapshots__/test_heatmaps_api.ambr b/posthog/heatmaps/test/__snapshots__/test_heatmaps_api.ambr new file mode 100644 index 0000000000000..222d75a1bb0f4 --- /dev/null +++ b/posthog/heatmaps/test/__snapshots__/test_heatmaps_api.ambr @@ -0,0 +1,392 @@ +# serializer version: 1 +# name: TestSessionRecordings.test_can_filter_by_exact_url + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'rageclick'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')), equals(heatmaps.current_url, 'http://example.com'))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_exact_url.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'rageclick'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')), equals(heatmaps.current_url, 'http://example.com/about'))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_exact_url.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'rageclick'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')), match(heatmaps.current_url, 'http://example.com*'))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_viewport_0_min_150 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), ifNull(greaterOrEquals(heatmaps.viewport_width, round(divide(150, 16))), 0), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_viewport_1_min_161 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), ifNull(greaterOrEquals(heatmaps.viewport_width, round(divide(161, 16))), 0), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_viewport_2_min_177 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), ifNull(greaterOrEquals(heatmaps.viewport_width, round(divide(177, 16))), 0), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_viewport_3_min_201 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), ifNull(greaterOrEquals(heatmaps.viewport_width, round(divide(201, 16))), 0), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_filter_by_viewport_4_min_161_and_max_192 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), ifNull(greaterOrEquals(heatmaps.viewport_width, round(divide(161, 16))), 0), ifNull(lessOrEquals(heatmaps.viewport_width, round(divide(192, 16))), 0), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_all_data_response + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_count_by_aggregation + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_count_by_aggregation.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(DISTINCT distinct_id) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_empty_response + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2024-05-03')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_filter_by_click + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_filter_by_click.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'rageclick'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_filter_by_date_from + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_filter_by_relative_date_from + ''' + /* user_id:0 request:_snapshot_ */ + SELECT pointer_target_fixed AS pointer_target_fixed, + pointer_relative_x AS pointer_relative_x, + client_y AS client_y, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + heatmaps.pointer_target_fixed AS pointer_target_fixed, + round(divide(heatmaps.x, heatmaps.viewport_width), 2) AS pointer_relative_x, + multiply(heatmaps.y, heatmaps.scale_factor) AS client_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'click'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-08')))) + GROUP BY pointer_target_fixed, + pointer_relative_x, + client_y + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- +# name: TestSessionRecordings.test_can_get_scrolldepth_counts + ''' + /* user_id:0 request:_snapshot_ */ + SELECT bucket AS bucket, + cnt AS bucket_count, + sum(cnt) OVER ( + ORDER BY bucket DESC) AS cumulative_count + FROM + (SELECT multiply(intDiv(scroll_y, 100), 100) AS bucket, + count(*) AS cnt + FROM + (SELECT heatmaps.distinct_id AS distinct_id, + multiply(plus(heatmaps.y, heatmaps.viewport_height), heatmaps.scale_factor) AS scroll_y + FROM heatmaps + WHERE and(equals(heatmaps.team_id, 2), equals(heatmaps.type, 'scrolldepth'), greaterOrEquals(toTimeZone(heatmaps.timestamp, 'UTC'), toDate('2023-03-06')))) + GROUP BY bucket) + ORDER BY bucket ASC + LIMIT 1000000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 + ''' +# --- diff --git a/posthog/heatmaps/test/test_heatmaps_api.py b/posthog/heatmaps/test/test_heatmaps_api.py new file mode 100644 index 0000000000000..18a2c2205d4e6 --- /dev/null +++ b/posthog/heatmaps/test/test_heatmaps_api.py @@ -0,0 +1,361 @@ +import freezegun +from django.http import HttpResponse +from parameterized import parameterized +from rest_framework import status + +from posthog.kafka_client.client import ClickhouseProducer +from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS +from posthog.models import Organization, Team +from posthog.models.event.util import format_clickhouse_timestamp +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, QueryMatchingTest, snapshot_clickhouse_queries + + +INSERT_SINGLE_HEATMAP_EVENT = """ +INSERT INTO sharded_heatmaps ( + session_id, + team_id, + distinct_id, + timestamp, + x, + y, + scale_factor, + viewport_width, + viewport_height, + pointer_target_fixed, + current_url, + type +) +SELECT + %(session_id)s, + %(team_id)s, + %(distinct_id)s, + %(timestamp)s, + %(x)s, + %(y)s, + %(scale_factor)s, + %(viewport_width)s, + %(viewport_height)s, + %(pointer_target_fixed)s, + %(current_url)s, + %(type)s +""" + + +class TestSessionRecordings(APIBaseTest, ClickhouseTestMixin, QueryMatchingTest): + CLASS_DATA_LEVEL_SETUP = False + + def _assert_heatmap_no_result_count( + self, params: dict[str, str | int | None] | None, expected_status_code: int = status.HTTP_200_OK + ) -> None: + response = self._get_heatmap(params, expected_status_code) + if response.status_code == status.HTTP_200_OK: + assert len(response.json()["results"]) == 0 + + def _assert_heatmap_single_result_count( + self, params: dict[str, str | int | None] | None, expected_grouped_count: int + ) -> None: + response = self._get_heatmap(params) + assert len(response.json()["results"]) == 1 + assert response.json()["results"][0]["count"] == expected_grouped_count + + def _get_heatmap( + self, params: dict[str, str | int | None] | None, expected_status_code: int = status.HTTP_200_OK + ) -> HttpResponse: + if params is None: + params = {} + + query_params = "&".join([f"{key}={value}" for key, value in params.items()]) + response = self.client.get(f"/api/heatmap/?{query_params}") + assert response.status_code == expected_status_code, response.json() + + return response + + @snapshot_clickhouse_queries + def test_can_get_empty_response(self) -> None: + response = self.client.get("/api/heatmap/?date_from=2024-05-03") + assert response.status_code == 200 + self.assertEqual(response.json(), {"results": []}) + + @snapshot_clickhouse_queries + def test_can_get_all_data_response(self) -> None: + self._create_heatmap_event("session_1", "click") + self._create_heatmap_event("session_2", "click") + + self._assert_heatmap_single_result_count({"date_from": "2023-03-08"}, 2) + + def test_cannot_query_across_teams(self) -> None: + self._create_heatmap_event("session_1", "click") + self._create_heatmap_event("session_2", "click") + + org = Organization.objects.create(name="Separate Org") + other_team = Team.objects.create(organization=org, name="other orgs team") + self._create_heatmap_event("session_1", "click", team_id=other_team.pk) + + # second team's click is not counted + self._assert_heatmap_single_result_count({"date_from": "2023-03-08"}, 2) + + @snapshot_clickhouse_queries + def test_can_get_filter_by_date_from(self) -> None: + self._create_heatmap_event("session_1", "click", "2023-03-07T07:00:00") + self._create_heatmap_event("session_2", "click", "2023-03-08T08:00:00") + + self._assert_heatmap_single_result_count({"date_from": "2023-03-08"}, 1) + + @snapshot_clickhouse_queries + @freezegun.freeze_time("2023-03-15T09:00:00") + def test_can_get_filter_by_relative_date_from(self) -> None: + self._create_heatmap_event("session_1", "click", "2023-03-07T07:00:00") + self._create_heatmap_event("session_2", "click", "2023-03-08T08:00:00") + + self._assert_heatmap_single_result_count({"date_from": "-7d"}, 1) + + @snapshot_clickhouse_queries + def test_can_get_filter_by_click(self) -> None: + self._create_heatmap_event("session_1", "click", "2023-03-08T07:00:00") + self._create_heatmap_event("session_2", "rageclick", "2023-03-08T08:00:00") + self._create_heatmap_event("session_2", "rageclick", "2023-03-08T08:01:00") + + self._assert_heatmap_single_result_count({"date_from": "2023-03-08", "type": "click"}, 1) + + self._assert_heatmap_single_result_count({"date_from": "2023-03-08", "type": "rageclick"}, 2) + + @snapshot_clickhouse_queries + def test_can_filter_by_exact_url(self) -> None: + self._create_heatmap_event("session_1", "rageclick", "2023-03-08T08:00:00", current_url="http://example.com") + self._create_heatmap_event( + "session_2", "rageclick", "2023-03-08T08:01:00", current_url="http://example.com/about" + ) + self._create_heatmap_event( + "session_3", "rageclick", "2023-03-08T08:01:00", current_url="http://example.com/about" + ) + + self._assert_heatmap_single_result_count( + {"date_from": "2023-03-08", "url_exact": "http://example.com", "type": "rageclick"}, 1 + ) + + self._assert_heatmap_single_result_count( + {"date_from": "2023-03-08", "url_exact": "http://example.com/about", "type": "rageclick"}, 2 + ) + + self._assert_heatmap_single_result_count( + {"date_from": "2023-03-08", "url_pattern": "http://example.com*", "type": "rageclick"}, 3 + ) + + @snapshot_clickhouse_queries + def test_can_get_scrolldepth_counts(self) -> None: + # to calculate expected scroll depth bucket from y and viewport height + # ((round(y/16) + round(viewport_height/16)) * 16 // 100) * 100 + + # scroll depth bucket 1000 + self._create_heatmap_event("session_1", "scrolldepth", "2023-03-08T07:00:00", y=10, viewport_height=1000) + self._create_heatmap_event("session_2", "scrolldepth", "2023-03-08T08:00:00", y=100, viewport_height=1000) + # scroll depth bucket 1100 + self._create_heatmap_event("session_3", "scrolldepth", "2023-03-08T08:01:00", y=200, viewport_height=1000) + # scroll depth bucket 1200 + self._create_heatmap_event("session_4", "scrolldepth", "2023-03-08T08:01:00", y=300, viewport_height=1000) + # scroll depth bucket 1300 + self._create_heatmap_event("session_5", "scrolldepth", "2023-03-08T08:01:00", y=400, viewport_height=1000) + # scroll depth bucket 1400 + self._create_heatmap_event("session_6", "scrolldepth", "2023-03-08T08:01:00", y=500, viewport_height=1000) + # scroll depth bucket 1800 + self._create_heatmap_event("session_7", "scrolldepth", "2023-03-08T08:01:00", y=900, viewport_height=1000) + self._create_heatmap_event("session_8", "scrolldepth", "2023-03-08T08:01:00", y=900, viewport_height=1000) + + scroll_response = self._get_heatmap({"date_from": "2023-03-06", "type": "scrolldepth"}) + + assert scroll_response.json() == { + "results": [ + { + "bucket_count": 2, + "cumulative_count": 8, + "scroll_depth_bucket": 1000, + }, + { + "bucket_count": 1, + "cumulative_count": 6, + "scroll_depth_bucket": 1100, + }, + { + "bucket_count": 1, + "cumulative_count": 5, + "scroll_depth_bucket": 1200, + }, + { + "bucket_count": 1, + "cumulative_count": 4, + "scroll_depth_bucket": 1300, + }, + { + "bucket_count": 1, + "cumulative_count": 3, + "scroll_depth_bucket": 1400, + }, + { + "bucket_count": 2, + "cumulative_count": 2, + "scroll_depth_bucket": 1800, + }, + ], + } + + def test_can_get_scrolldepth_counts_by_visitor(self) -> None: + # scroll depth bucket 1000 + self._create_heatmap_event( + "session_1", "scrolldepth", "2023-03-08T07:00:00", y=100, viewport_height=1000, distinct_id="12345" + ) + + # one person only scrolls a little way + # scroll depth bucket 1000 + self._create_heatmap_event( + "session_2", "scrolldepth", "2023-03-08T08:00:00", y=100, viewport_height=1000, distinct_id="34567" + ) + + # the first person scrolls further + # scroll depth bucket 1100 + self._create_heatmap_event( + "session_3", "scrolldepth", "2023-03-08T08:01:00", y=200, viewport_height=1000, distinct_id="12345" + ) + + scroll_response = self._get_heatmap( + {"date_from": "2023-03-06", "type": "scrolldepth", "aggregation": "unique_visitors"} + ) + + assert scroll_response.json() == { + "results": [ + { + "bucket_count": 2, + "cumulative_count": 3, + "scroll_depth_bucket": 1000, + }, + { + "bucket_count": 1, + "cumulative_count": 1, + "scroll_depth_bucket": 1100, + }, + ], + } + + @staticmethod + def heatmap_result(relative_x: float, count: int) -> dict: + return { + "count": count, + "pointer_relative_x": relative_x, + "pointer_target_fixed": True, + "pointer_y": 16, + } + + @parameterized.expand( + [ + [ + "min_150", + {"date_from": "2023-03-08", "viewport_width_min": "150"}, + [heatmap_result(0.08, 1), heatmap_result(0.09, 1), heatmap_result(0.1, 1), heatmap_result(0.11, 2)], + ], + [ + "min_161", + {"date_from": "2023-03-08", "viewport_width_min": "161"}, + [ + heatmap_result(0.08, 1), + heatmap_result(0.09, 1), + heatmap_result(0.1, 1), + ], + ], + [ + "min_177", + {"date_from": "2023-03-08", "viewport_width_min": "177"}, + [ + heatmap_result(0.08, 1), + heatmap_result(0.09, 1), + ], + ], + ["min_201", {"date_from": "2023-03-08", "viewport_width_min": "201"}, []], + [ + "min_161_and_max_192", + {"date_from": "2023-03-08", "viewport_width_min": 161, "viewport_width_max": 192}, + [heatmap_result(0.08, 1), heatmap_result(0.09, 1), heatmap_result(0.1, 1)], + ], + ] + ) + @snapshot_clickhouse_queries + def test_can_filter_by_viewport(self, _name: str, query_params: dict, expected_results: list) -> None: + # all these xs = round(10/16) = 1 + + # viewport widths that scale to 9 + self._create_heatmap_event("session_1", "click", "2023-03-08T08:00:00", 150) + self._create_heatmap_event("session_2", "click", "2023-03-08T08:00:00", 151) + + # viewport widths that scale to 10 + self._create_heatmap_event("session_3", "click", "2023-03-08T08:01:00", 152) + self._create_heatmap_event("session_3", "click", "2023-03-08T08:01:00", 161) + + # viewport width that scales to 11 + self._create_heatmap_event("session_3", "click", "2023-03-08T08:01:00", 177) + # viewport width that scales to 12 + self._create_heatmap_event("session_3", "click", "2023-03-08T08:01:00", 193) + + response = self._get_heatmap(query_params) + assert sorted(response.json()["results"], key=lambda k: k["pointer_relative_x"]) == expected_results + + @snapshot_clickhouse_queries + def test_can_get_count_by_aggregation(self) -> None: + # 3 items but 2 visitors + self._create_heatmap_event("session_1", "click", distinct_id="12345") + self._create_heatmap_event("session_2", "click", distinct_id="12345") + self._create_heatmap_event("session_3", "click", distinct_id="54321") + + self._assert_heatmap_single_result_count({"date_from": "2023-03-08"}, 3) + self._assert_heatmap_single_result_count({"date_from": "2023-03-08", "aggregation": "unique_visitors"}, 2) + + @parameterized.expand( + [ + ["total_count", status.HTTP_200_OK], + ["unique_visitors", status.HTTP_200_OK], + ["direction", status.HTTP_400_BAD_REQUEST], + # equivalent to not providing it + ["", status.HTTP_200_OK], + [" ", status.HTTP_400_BAD_REQUEST], + [None, status.HTTP_400_BAD_REQUEST], + ] + ) + def test_only_allow_valid_values_for_aggregation(self, choice: str | None, expected_status_code: int) -> None: + self._assert_heatmap_no_result_count( + {"date_from": "2023-03-08", "aggregation": choice}, expected_status_code=expected_status_code + ) + + def _create_heatmap_event( + self, + session_id: str, + type: str, + date_from: str = "2023-03-08T09:00:00", + viewport_width: int = 100, + viewport_height: int = 100, + x: int = 10, + y: int = 20, + current_url: str | None = None, + distinct_id: str = "user_distinct_id", + team_id: int | None = None, + ) -> None: + if team_id is None: + team_id = self.team.pk + + p = ClickhouseProducer() + # because this is in a test it will write directly using SQL not really with Kafka + p.produce( + topic=KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, + sql=INSERT_SINGLE_HEATMAP_EVENT, + data={ + "session_id": session_id, + "team_id": team_id, + "distinct_id": distinct_id, + "timestamp": format_clickhouse_timestamp(date_from), + "x": round(x / 16), + "y": round(y / 16), + "scale_factor": 16, + # this adjustment is done at ingestion + "viewport_width": round(viewport_width / 16), + "viewport_height": round(viewport_height / 16), + "type": type, + "pointer_target_fixed": True, + "current_url": current_url if current_url else "http://posthog.com", + }, + ) diff --git a/posthog/helpers/dashboard_templates.py b/posthog/helpers/dashboard_templates.py index cfaa2bac5e1d1..0e3f8a81f9536 100644 --- a/posthog/helpers/dashboard_templates.py +++ b/posthog/helpers/dashboard_templates.py @@ -1,4 +1,5 @@ -from typing import Callable, Dict, List, Optional +from typing import Optional +from collections.abc import Callable import structlog @@ -28,7 +29,7 @@ from posthog.models.insight import Insight from posthog.models.tag import Tag -DASHBOARD_COLORS: List[str] = ["white", "blue", "green", "purple", "black"] +DASHBOARD_COLORS: list[str] = ["white", "blue", "green", "purple", "black"] logger = structlog.get_logger(__name__) @@ -444,7 +445,7 @@ def _create_default_app_items(dashboard: Dashboard) -> None: create_from_template(dashboard, template) -DASHBOARD_TEMPLATES: Dict[str, Callable] = { +DASHBOARD_TEMPLATES: dict[str, Callable] = { "DEFAULT_APP": _create_default_app_items, "WEBSITE_TRAFFIC": _create_website_dashboard, } @@ -491,7 +492,7 @@ def create_from_template(dashboard: Dashboard, template: DashboardTemplate) -> N logger.error("dashboard_templates.creation.unknown_type", template=template) -def _create_tile_for_text(dashboard: Dashboard, body: str, layouts: Dict, color: Optional[str]) -> None: +def _create_tile_for_text(dashboard: Dashboard, body: str, layouts: dict, color: Optional[str]) -> None: text = Text.objects.create( team=dashboard.team, body=body, @@ -507,11 +508,11 @@ def _create_tile_for_text(dashboard: Dashboard, body: str, layouts: Dict, color: def _create_tile_for_insight( dashboard: Dashboard, name: str, - filters: Dict, + filters: dict, description: str, - layouts: Dict, + layouts: dict, color: Optional[str], - query: Optional[Dict] = None, + query: Optional[dict] = None, ) -> None: filter_test_accounts = filters.get("filter_test_accounts", True) insight = Insight.objects.create( diff --git a/posthog/helpers/multi_property_breakdown.py b/posthog/helpers/multi_property_breakdown.py index edc5fe68f1bfb..94fc538b2957d 100644 --- a/posthog/helpers/multi_property_breakdown.py +++ b/posthog/helpers/multi_property_breakdown.py @@ -1,12 +1,12 @@ import copy -from typing import Any, Dict, List, Union +from typing import Any, Union -funnel_with_breakdown_type = List[List[Dict[str, Any]]] -possible_funnel_results_types = Union[funnel_with_breakdown_type, List[Dict[str, Any]], Dict[str, Any]] +funnel_with_breakdown_type = list[list[dict[str, Any]]] +possible_funnel_results_types = Union[funnel_with_breakdown_type, list[dict[str, Any]], dict[str, Any]] def protect_old_clients_from_multi_property_default( - request_filter: Dict[str, Any], result: possible_funnel_results_types + request_filter: dict[str, Any], result: possible_funnel_results_types ) -> possible_funnel_results_types: """ Implementing multi property breakdown will default breakdown to a list even if it is received as a string. @@ -25,7 +25,7 @@ def protect_old_clients_from_multi_property_default( :return: """ - if isinstance(result, Dict) or (len(result) > 1) and isinstance(result[0], Dict): + if isinstance(result, dict) or (len(result) > 1) and isinstance(result[0], dict): return result is_breakdown_request = ( @@ -34,7 +34,7 @@ def protect_old_clients_from_multi_property_default( and "breakdown_type" in request_filter and request_filter["breakdown_type"] in ["person", "event"] ) - is_breakdown_result = isinstance(result, List) and len(result) > 0 and isinstance(result[0], List) + is_breakdown_result = isinstance(result, list) and len(result) > 0 and isinstance(result[0], list) is_single_property_breakdown = ( is_breakdown_request @@ -49,14 +49,14 @@ def protect_old_clients_from_multi_property_default( for series_index in range(len(result)): copied_series = copied_result[series_index] - if isinstance(copied_series, List): + if isinstance(copied_series, list): for data_index in range(len(copied_series)): copied_item = copied_series[data_index] if is_single_property_breakdown: - if copied_item.get("breakdown") and isinstance(copied_item["breakdown"], List): + if copied_item.get("breakdown") and isinstance(copied_item["breakdown"], list): copied_item["breakdown"] = copied_item["breakdown"][0] - if copied_item.get("breakdown_value") and isinstance(copied_item["breakdown_value"], List): + if copied_item.get("breakdown_value") and isinstance(copied_item["breakdown_value"], list): copied_item["breakdown_value"] = copied_item["breakdown_value"][0] if is_multi_property_breakdown: diff --git a/posthog/helpers/tests/test_multi_property_breakdown.py b/posthog/helpers/tests/test_multi_property_breakdown.py index d22675adf84e9..417583ae00965 100644 --- a/posthog/helpers/tests/test_multi_property_breakdown.py +++ b/posthog/helpers/tests/test_multi_property_breakdown.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from unittest import TestCase from posthog.helpers.multi_property_breakdown import ( @@ -8,8 +8,8 @@ class TestMultiPropertyBreakdown(TestCase): def test_handles_empty_inputs(self): - data: Dict[str, Any] = {} - result: List = [] + data: dict[str, Any] = {} + result: list = [] try: protect_old_clients_from_multi_property_default(data, result) @@ -17,12 +17,12 @@ def test_handles_empty_inputs(self): raise AssertionError("should not raise any KeyError") def test_handles_empty_breakdowns_array(self): - data: Dict[str, Any] = { + data: dict[str, Any] = { "breakdowns": [], "insight": "FUNNELS", "breakdown_type": "event", } - result: List = [] + result: list = [] try: protect_old_clients_from_multi_property_default(data, result) @@ -30,37 +30,37 @@ def test_handles_empty_breakdowns_array(self): raise AssertionError("should not raise any KeyError") def test_keeps_multi_property_breakdown_for_multi_property_requests(self): - data: Dict[str, Any] = { + data: dict[str, Any] = { "breakdowns": ["a", "b"], "insight": "FUNNELS", "breakdown_type": "event", } - result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1", "b1"], "breakdown_value": ["a1", "b1"]}]] + result: list[list[dict[str, Any]]] = [[{"breakdown": ["a1", "b1"], "breakdown_value": ["a1", "b1"]}]] actual = protect_old_clients_from_multi_property_default(data, result) # to satisfy mypy - assert isinstance(actual, List) + assert isinstance(actual, list) series = actual[0] - assert isinstance(series, List) + assert isinstance(series, list) data = series[0] assert data["breakdowns"] == ["a1", "b1"] assert "breakdown" not in data def test_flattens_multi_property_breakdown_for_single_property_requests(self): - data: Dict[str, Any] = { + data: dict[str, Any] = { "breakdown": "a", "insight": "FUNNELS", "breakdown_type": "event", } - result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1"], "breakdown_value": ["a1", "b1"]}]] + result: list[list[dict[str, Any]]] = [[{"breakdown": ["a1"], "breakdown_value": ["a1", "b1"]}]] actual = protect_old_clients_from_multi_property_default(data, result) # to satisfy mypy - assert isinstance(actual, List) + assert isinstance(actual, list) series = actual[0] - assert isinstance(series, List) + assert isinstance(series, list) data = series[0] assert data["breakdown"] == "a1" assert "breakdowns" not in data diff --git a/posthog/hogql/ai.py b/posthog/hogql/ai.py index 15b03e82e5030..71a565ec77773 100644 --- a/posthog/hogql/ai.py +++ b/posthog/hogql/ai.py @@ -63,7 +63,7 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t schema_description = "\n\n".join( ( f"Table {table_name} with fields:\n" - + "\n".join((f'- {field["key"]} ({field["type"]})' for field in table_fields)) + + "\n".join(f'- {field["key"]} ({field["type"]})' for field in table_fields) for table_name, table_fields in serialized_database.items() ) ) diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index ccb3f9f34576d..e3fa80b3f3ee8 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from dataclasses import dataclass, field from posthog.hogql.base import Type, Expr, CTE, ConstantType, UnknownType, AST @@ -143,14 +143,14 @@ class SelectQueryType(Type): """Type and new enclosed scope for a select query. Contains information about all tables and columns in the query.""" # all aliases a select query has access to in its scope - aliases: Dict[str, FieldAliasType] = field(default_factory=dict) + aliases: dict[str, FieldAliasType] = field(default_factory=dict) # all types a select query exports - columns: Dict[str, Type] = field(default_factory=dict) + columns: dict[str, Type] = field(default_factory=dict) # all from and join, tables and subqueries with aliases - tables: Dict[str, TableOrSelectType] = field(default_factory=dict) - ctes: Dict[str, CTE] = field(default_factory=dict) + tables: dict[str, TableOrSelectType] = field(default_factory=dict) + ctes: dict[str, CTE] = field(default_factory=dict) # all from and join subqueries without aliases - anonymous_tables: List[Union["SelectQueryType", "SelectUnionQueryType"]] = field(default_factory=list) + anonymous_tables: list[Union["SelectQueryType", "SelectUnionQueryType"]] = field(default_factory=list) # the parent select query, if this is a lambda parent: Optional[Union["SelectQueryType", "SelectUnionQueryType"]] = None @@ -173,7 +173,7 @@ def has_child(self, name: str, context: HogQLContext) -> bool: @dataclass(kw_only=True) class SelectUnionQueryType(Type): - types: List[SelectQueryType] + types: list[SelectQueryType] def get_alias_for_table_type(self, table_type: TableOrSelectType) -> Optional[str]: return self.types[0].get_alias_for_table_type(table_type) @@ -313,7 +313,7 @@ def print_type(self) -> str: @dataclass(kw_only=True) class TupleType(ConstantType): data_type: ConstantDataType = field(default="tuple", init=False) - item_types: List[ConstantType] + item_types: list[ConstantType] def print_type(self) -> str: return "Tuple" @@ -322,8 +322,8 @@ def print_type(self) -> str: @dataclass(kw_only=True) class CallType(Type): name: str - arg_types: List[ConstantType] - param_types: Optional[List[ConstantType]] = None + arg_types: list[ConstantType] + param_types: Optional[list[ConstantType]] = None return_type: ConstantType def resolve_constant_type(self, context: HogQLContext) -> ConstantType: @@ -337,7 +337,7 @@ class AsteriskType(Type): @dataclass(kw_only=True) class FieldTraverserType(Type): - chain: List[str | int] + chain: list[str | int] table_type: TableOrSelectType @@ -400,7 +400,7 @@ def resolve_table_type(self, context: HogQLContext): @dataclass(kw_only=True) class PropertyType(Type): - chain: List[str | int] + chain: list[str | int] field_type: FieldType # The property has been moved into a field we query from a joined subquery @@ -449,12 +449,12 @@ class ArithmeticOperation(Expr): @dataclass(kw_only=True) class And(Expr): type: Optional[ConstantType] = None - exprs: List[Expr] + exprs: list[Expr] @dataclass(kw_only=True) class Or(Expr): - exprs: List[Expr] + exprs: list[Expr] type: Optional[ConstantType] = None @@ -509,7 +509,7 @@ class ArrayAccess(Expr): @dataclass(kw_only=True) class Array(Expr): - exprs: List[Expr] + exprs: list[Expr] @dataclass(kw_only=True) @@ -520,12 +520,12 @@ class TupleAccess(Expr): @dataclass(kw_only=True) class Tuple(Expr): - exprs: List[Expr] + exprs: list[Expr] @dataclass(kw_only=True) class Lambda(Expr): - args: List[str] + args: list[str] expr: Expr @@ -536,7 +536,7 @@ class Constant(Expr): @dataclass(kw_only=True) class Field(Expr): - chain: List[str | int] + chain: list[str | int] @dataclass(kw_only=True) @@ -548,8 +548,8 @@ class Placeholder(Expr): class Call(Expr): name: str """Function name""" - args: List[Expr] - params: Optional[List[Expr]] = None + args: list[Expr] + params: Optional[list[Expr]] = None """ Parameters apply to some aggregate functions, see ClickHouse docs: https://clickhouse.com/docs/en/sql-reference/aggregate-functions/parametric-functions @@ -569,7 +569,7 @@ class JoinExpr(Expr): join_type: Optional[str] = None table: Optional[Union["SelectQuery", "SelectUnionQuery", Field]] = None - table_args: Optional[List[Expr]] = None + table_args: Optional[list[Expr]] = None alias: Optional[str] = None table_final: Optional[bool] = None constraint: Optional["JoinConstraint"] = None @@ -585,8 +585,8 @@ class WindowFrameExpr(Expr): @dataclass(kw_only=True) class WindowExpr(Expr): - partition_by: Optional[List[Expr]] = None - order_by: Optional[List[OrderExpr]] = None + partition_by: Optional[list[Expr]] = None + order_by: Optional[list[OrderExpr]] = None frame_method: Optional[Literal["ROWS", "RANGE"]] = None frame_start: Optional[WindowFrameExpr] = None frame_end: Optional[WindowFrameExpr] = None @@ -595,7 +595,7 @@ class WindowExpr(Expr): @dataclass(kw_only=True) class WindowFunction(Expr): name: str - args: Optional[List[Expr]] = None + args: Optional[list[Expr]] = None over_expr: Optional[WindowExpr] = None over_identifier: Optional[str] = None @@ -604,20 +604,20 @@ class WindowFunction(Expr): class SelectQuery(Expr): # :TRICKY: When adding new fields, make sure they're handled in visitor.py and resolver.py type: Optional[SelectQueryType] = None - ctes: Optional[Dict[str, CTE]] = None - select: List[Expr] + ctes: Optional[dict[str, CTE]] = None + select: list[Expr] distinct: Optional[bool] = None select_from: Optional[JoinExpr] = None array_join_op: Optional[str] = None - array_join_list: Optional[List[Expr]] = None - window_exprs: Optional[Dict[str, WindowExpr]] = None + array_join_list: Optional[list[Expr]] = None + window_exprs: Optional[dict[str, WindowExpr]] = None where: Optional[Expr] = None prewhere: Optional[Expr] = None having: Optional[Expr] = None - group_by: Optional[List[Expr]] = None - order_by: Optional[List[OrderExpr]] = None + group_by: Optional[list[Expr]] = None + order_by: Optional[list[OrderExpr]] = None limit: Optional[Expr] = None - limit_by: Optional[List[Expr]] = None + limit_by: Optional[list[Expr]] = None limit_with_ties: Optional[bool] = None offset: Optional[Expr] = None settings: Optional[HogQLQuerySettings] = None @@ -627,7 +627,7 @@ class SelectQuery(Expr): @dataclass(kw_only=True) class SelectUnionQuery(Expr): type: Optional[SelectUnionQueryType] = None - select_queries: List[SelectQuery] + select_queries: list[SelectQuery] @dataclass(kw_only=True) @@ -652,7 +652,7 @@ class HogQLXAttribute(AST): @dataclass(kw_only=True) class HogQLXTag(AST): kind: str - attributes: List[HogQLXAttribute] + attributes: list[HogQLXAttribute] def to_dict(self): return { diff --git a/posthog/hogql/autocomplete.py b/posthog/hogql/autocomplete.py index b6d003c1ac88d..c0d4cd8b84f9d 100644 --- a/posthog/hogql/autocomplete.py +++ b/posthog/hogql/autocomplete.py @@ -1,5 +1,6 @@ from copy import copy, deepcopy -from typing import Callable, Dict, List, Optional, cast +from typing import Optional, cast +from collections.abc import Callable from posthog.hogql.context import HogQLContext from posthog.hogql.database.database import Database, create_hogql_database from posthog.hogql.database.models import ( @@ -38,7 +39,7 @@ class GetNodeAtPositionTraverser(TraversingVisitor): start: int end: int - selects: List[ast.SelectQuery] = [] + selects: list[ast.SelectQuery] = [] node: Optional[AST] = None parent_node: Optional[AST] = None last_node: Optional[AST] = None @@ -100,13 +101,13 @@ def convert_field_or_table_to_type_string(field_or_table: FieldOrTable) -> str | return "Object" if isinstance(field_or_table, ast.ExpressionField): return "Expression" - if isinstance(field_or_table, (ast.Table, ast.LazyJoin)): + if isinstance(field_or_table, ast.Table | ast.LazyJoin): return "Table" return None -def get_table(context: HogQLContext, join_expr: ast.JoinExpr, ctes: Optional[Dict[str, CTE]]) -> None | Table: +def get_table(context: HogQLContext, join_expr: ast.JoinExpr, ctes: Optional[dict[str, CTE]]) -> None | Table: assert context.database is not None def resolve_fields_on_table(table: Table | None, table_query: ast.SelectQuery) -> Table | None: @@ -120,7 +121,7 @@ def resolve_fields_on_table(table: Table | None, table_query: ast.SelectQuery) - return None selected_columns = node.type.columns - new_fields: Dict[str, FieldOrTable] = {} + new_fields: dict[str, FieldOrTable] = {} for name, field in selected_columns.items(): if isinstance(field, ast.FieldAliasType): underlying_field_name = field.alias @@ -145,7 +146,7 @@ def resolve_fields_on_table(table: Table | None, table_query: ast.SelectQuery) - # Return a new table with a reduced field set class AnonTable(Table): - fields: Dict[str, FieldOrTable] = new_fields + fields: dict[str, FieldOrTable] = new_fields def to_printed_hogql(self): # Use the base table name for resolving property definitions later @@ -184,8 +185,8 @@ def to_printed_hogql(self): return None -def get_tables_aliases(query: ast.SelectQuery, context: HogQLContext) -> Dict[str, ast.Table]: - tables: Dict[str, ast.Table] = {} +def get_tables_aliases(query: ast.SelectQuery, context: HogQLContext) -> dict[str, ast.Table]: + tables: dict[str, ast.Table] = {} if query.select_from is not None and query.select_from.alias is not None: table = get_table(context, query.select_from, query.ctes) @@ -207,7 +208,7 @@ def get_tables_aliases(query: ast.SelectQuery, context: HogQLContext) -> Dict[st # Replaces all ast.FieldTraverser with the underlying node def resolve_table_field_traversers(table: Table, context: HogQLContext) -> Table: new_table = deepcopy(table) - new_fields: Dict[str, FieldOrTable] = {} + new_fields: dict[str, FieldOrTable] = {} for key, field in list(new_table.fields.items()): if not isinstance(field, ast.FieldTraverser): new_fields[key] = field @@ -234,9 +235,9 @@ def resolve_table_field_traversers(table: Table, context: HogQLContext) -> Table return new_table -def append_table_field_to_response(table: Table, suggestions: List[AutocompleteCompletionItem]) -> None: - keys: List[str] = [] - details: List[str | None] = [] +def append_table_field_to_response(table: Table, suggestions: list[AutocompleteCompletionItem]) -> None: + keys: list[str] = [] + details: list[str | None] = [] table_fields = list(table.fields.items()) for field_name, field_or_table in table_fields: # Skip over hidden fields @@ -258,11 +259,11 @@ def append_table_field_to_response(table: Table, suggestions: List[AutocompleteC def extend_responses( - keys: List[str], - suggestions: List[AutocompleteCompletionItem], + keys: list[str], + suggestions: list[AutocompleteCompletionItem], kind: Kind = Kind.Variable, insert_text: Optional[Callable[[str], str]] = None, - details: Optional[List[str | None]] = None, + details: Optional[list[str | None]] = None, ) -> None: suggestions.extend( [ diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/bytecode.py index 2be5c206cf327..f1abb9c4be0ed 100644 --- a/posthog/hogql/bytecode.py +++ b/posthog/hogql/bytecode.py @@ -1,4 +1,4 @@ -from typing import List, Any +from typing import Any from posthog.hogql import ast from posthog.hogql.errors import NotImplementedError @@ -39,13 +39,13 @@ } -def to_bytecode(expr: str) -> List[Any]: +def to_bytecode(expr: str) -> list[Any]: from posthog.hogql.parser import parse_expr return create_bytecode(parse_expr(expr)) -def create_bytecode(expr: ast.Expr) -> List[Any]: +def create_bytecode(expr: ast.Expr) -> list[Any]: bytecode = [HOGQL_BYTECODE_IDENTIFIER] bytecode.extend(BytecodeBuilder().visit(expr)) return bytecode diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 45e362c8f8e72..446efe23fbf0d 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,6 +1,6 @@ from datetime import date, datetime from enum import Enum -from typing import Optional, Literal, TypeAlias, Tuple, List +from typing import Optional, Literal, TypeAlias from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -18,7 +18,7 @@ ] ConstantSupportedPrimitive: TypeAlias = int | float | str | bool | date | datetime | UUID | None ConstantSupportedData: TypeAlias = ( - ConstantSupportedPrimitive | List[ConstantSupportedPrimitive] | Tuple[ConstantSupportedPrimitive, ...] + ConstantSupportedPrimitive | list[ConstantSupportedPrimitive] | tuple[ConstantSupportedPrimitive, ...] ) # Keywords passed to ClickHouse without transformation @@ -31,6 +31,8 @@ DEFAULT_RETURNED_ROWS = 100 # Max limit for all SELECT queries, and the default for CSV exports. MAX_SELECT_RETURNED_ROWS = 10000 # sync with CSV_EXPORT_LIMIT +# Max limit for heatmaps which don't really need 1 billion so have their own max +MAX_SELECT_HEATMAPS_LIMIT = 1000000 # 1m datapoints # Max limit for all cohort calculations MAX_SELECT_COHORT_CALCULATION_LIMIT = 1000000000 # 1b persons @@ -47,6 +49,7 @@ class LimitContext(str, Enum): QUERY_ASYNC = "query_async" EXPORT = "export" COHORT_CALCULATION = "cohort_calculation" + HEATMAPS = "heatmaps" def get_max_limit_for_context(limit_context: LimitContext) -> int: @@ -54,6 +57,8 @@ def get_max_limit_for_context(limit_context: LimitContext) -> int: return MAX_SELECT_RETURNED_ROWS # 10k elif limit_context in (LimitContext.QUERY, LimitContext.QUERY_ASYNC): return MAX_SELECT_RETURNED_ROWS # 10k + elif limit_context == LimitContext.HEATMAPS: + return MAX_SELECT_HEATMAPS_LIMIT # 1M elif limit_context == LimitContext.COHORT_CALCULATION: return MAX_SELECT_COHORT_CALCULATION_LIMIT # 1b else: @@ -66,6 +71,8 @@ def get_default_limit_for_context(limit_context: LimitContext) -> int: return MAX_SELECT_RETURNED_ROWS # 10k elif limit_context in (LimitContext.QUERY, LimitContext.QUERY_ASYNC): return DEFAULT_RETURNED_ROWS # 100 + elif limit_context == LimitContext.HEATMAPS: + return MAX_SELECT_HEATMAPS_LIMIT # 1M elif limit_context == LimitContext.COHORT_CALCULATION: return MAX_SELECT_COHORT_CALCULATION_LIMIT # 1b else: @@ -92,3 +99,4 @@ class HogQLGlobalSettings(HogQLQuerySettings): readonly: Optional[int] = 2 max_execution_time: Optional[int] = 60 allow_experimental_object_type: Optional[bool] = True + format_csv_allow_double_quotes: Optional[bool] = False diff --git a/posthog/hogql/context.py b/posthog/hogql/context.py index 68692323e059d..9b5b6092a6911 100644 --- a/posthog/hogql/context.py +++ b/posthog/hogql/context.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Any +from typing import TYPE_CHECKING, Literal, Optional, Any from posthog.hogql.timings import HogQLTimings from posthog.schema import HogQLNotice, HogQLQueryModifiers @@ -11,7 +11,7 @@ @dataclass class HogQLFieldAccess: - input: List[str] + input: list[str] type: Optional[Literal["event", "event.properties", "person", "person.properties"]] field: Optional[str] sql: str @@ -28,7 +28,7 @@ class HogQLContext: # Virtual database we're querying, will be populated from team_id if not present database: Optional["Database"] = None # If set, will save string constants to this dict. Inlines strings into the query if None. - values: Dict = field(default_factory=dict) + values: dict = field(default_factory=dict) # Are we small part of a non-HogQL query? If so, use custom syntax for accessed person properties. within_non_hogql_query: bool = False # Enable full SELECT queries and subqueries in ClickHouse @@ -39,9 +39,9 @@ class HogQLContext: max_view_depth: int = 1 # Warnings returned with the metadata query - warnings: List["HogQLNotice"] = field(default_factory=list) + warnings: list["HogQLNotice"] = field(default_factory=list) # Notices returned with the metadata query - notices: List["HogQLNotice"] = field(default_factory=list) + notices: list["HogQLNotice"] = field(default_factory=list) # Timings in seconds for different parts of the HogQL query timings: HogQLTimings = field(default_factory=HogQLTimings) # Modifications requested by the HogQL client diff --git a/posthog/hogql/database/argmax.py b/posthog/hogql/database/argmax.py index 5872dc77d8b44..b6c8e3d853bf8 100644 --- a/posthog/hogql/database/argmax.py +++ b/posthog/hogql/database/argmax.py @@ -1,10 +1,11 @@ -from typing import Callable, List, Optional, Dict +from typing import Optional +from collections.abc import Callable def argmax_select( table_name: str, - select_fields: Dict[str, List[str | int]], - group_fields: List[str], + select_fields: dict[str, list[str | int]], + group_fields: list[str], argmax_field: str, deleted_field: Optional[str] = None, ): @@ -14,8 +15,8 @@ def argmax_select( name="argMax", args=[field, ast.Field(chain=[table_name, argmax_field])] ) - fields_to_group: List[ast.Expr] = [] - fields_to_select: List[ast.Expr] = [] + fields_to_group: list[ast.Expr] = [] + fields_to_select: list[ast.Expr] = [] for name, chain in select_fields.items(): if name not in group_fields: fields_to_select.append( diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index fc6cf735c42fd..fc1f665cf3978 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Literal, Optional, TypedDict +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, TypedDict from zoneinfo import ZoneInfo, ZoneInfoNotFoundError from pydantic import ConfigDict, BaseModel from sentry_sdk import capture_exception @@ -22,6 +22,7 @@ ExpressionField, ) from posthog.hogql.database.schema.channel_type import create_initial_channel_type, create_initial_domain_type +from posthog.hogql.database.schema.heatmaps import HeatmapsTable from posthog.hogql.database.schema.log_entries import ( LogEntriesTable, ReplayConsoleLogsLogEntriesTable, @@ -80,6 +81,7 @@ class Database(BaseModel): console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable() batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable() sessions: SessionsTable = SessionsTable() + heatmaps: HeatmapsTable = HeatmapsTable() raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable() raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable() @@ -94,7 +96,7 @@ class Database(BaseModel): numbers: NumbersTable = NumbersTable() # clunky: keep table names in sync with above - _table_names: ClassVar[List[str]] = [ + _table_names: ClassVar[list[str]] = [ "events", "groups", "persons", @@ -107,7 +109,7 @@ class Database(BaseModel): "sessions", ] - _warehouse_table_names: List[str] = [] + _warehouse_table_names: list[str] = [] _timezone: Optional[str] _week_start_day: Optional[WeekStartDay] @@ -134,7 +136,7 @@ def get_table(self, table_name: str) -> Table: return getattr(self, table_name) raise QueryError(f'Unknown table "{table_name}".') - def get_all_tables(self) -> List[str]: + def get_all_tables(self) -> list[str]: return self._table_names + self._warehouse_table_names def add_warehouse_tables(self, **field_definitions: Any): @@ -224,7 +226,7 @@ def create_hogql_database( if database.events.fields.get(mapping.group_type) is None: database.events.fields[mapping.group_type] = FieldTraverser(chain=[f"group_{mapping.group_type_index}"]) - tables: Dict[str, Table] = {} + tables: dict[str, Table] = {} for table in DataWarehouseTable.objects.filter(team_id=team.pk).exclude(deleted=True): tables[table.name] = table.hogql_definition() @@ -274,6 +276,11 @@ def create_hogql_database( database.add_warehouse_tables(**tables) for join in DataWarehouseJoin.objects.filter(team_id=team.pk).exclude(deleted=True): + # Skip if either table is not present. This can happen if the table was deleted after the join was created. + # User will be prompted on UI to resolve missing tables underlying the JOIN + if not database.has_table(join.source_table_name) or not database.has_table(join.joining_table_name): + continue + try: source_table = database.get_table(join.source_table_name) joining_table = database.get_table(join.joining_table_name) @@ -355,35 +362,35 @@ class _SerializedFieldBase(TypedDict): class SerializedField(_SerializedFieldBase, total=False): - fields: List[str] + fields: list[str] table: str - chain: List[str | int] + chain: list[str | int] -def serialize_database(context: HogQLContext) -> Dict[str, List[SerializedField]]: - tables: Dict[str, List[SerializedField]] = {} +def serialize_database(context: HogQLContext) -> dict[str, list[SerializedField]]: + tables: dict[str, list[SerializedField]] = {} if context.database is None: raise ResolutionError("Must provide database to serialize_database") for table_key in context.database.model_fields.keys(): - field_input: Dict[str, Any] = {} + field_input: dict[str, Any] = {} table = getattr(context.database, table_key, None) if isinstance(table, FunctionCallTable): field_input = table.get_asterisk() elif isinstance(table, Table): field_input = table.fields - field_output: List[SerializedField] = serialize_fields(field_input, context) + field_output: list[SerializedField] = serialize_fields(field_input, context) tables[table_key] = field_output return tables -def serialize_fields(field_input, context: HogQLContext) -> List[SerializedField]: +def serialize_fields(field_input, context: HogQLContext) -> list[SerializedField]: from posthog.hogql.database.models import SavedQuery - field_output: List[SerializedField] = [] + field_output: list[SerializedField] = [] for field_key, field in field_input.items(): if field_key == "team_id": pass diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py index f6e985d92b4d7..34bec54eca32b 100644 --- a/posthog/hogql/database/models.py +++ b/posthog/hogql/database/models.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING +from collections.abc import Callable from pydantic import ConfigDict, BaseModel from posthog.hogql.base import Expr @@ -65,11 +66,11 @@ class ExpressionField(DatabaseField): class FieldTraverser(FieldOrTable): model_config = ConfigDict(extra="forbid") - chain: List[str | int] + chain: list[str | int] class Table(FieldOrTable): - fields: Dict[str, FieldOrTable] + fields: dict[str, FieldOrTable] model_config = ConfigDict(extra="forbid") def has_field(self, name: str | int) -> bool: @@ -87,12 +88,12 @@ def to_printed_clickhouse(self, context: "HogQLContext") -> str: def to_printed_hogql(self) -> str: raise NotImplementedError("Table.to_printed_hogql not overridden") - def avoid_asterisk_fields(self) -> List[str]: + def avoid_asterisk_fields(self) -> list[str]: return [] def get_asterisk(self): fields_to_avoid = [*self.avoid_asterisk_fields(), "team_id"] - asterisk: Dict[str, FieldOrTable] = {} + asterisk: dict[str, FieldOrTable] = {} for key, field in self.fields.items(): if key in fields_to_avoid: continue @@ -109,10 +110,10 @@ def get_asterisk(self): class LazyJoin(FieldOrTable): model_config = ConfigDict(extra="forbid") - join_function: Callable[[str, str, Dict[str, Any], "HogQLContext", "SelectQuery"], Any] + join_function: Callable[[str, str, dict[str, Any], "HogQLContext", "SelectQuery"], Any] join_table: Table | str - from_field: List[str | int] - to_field: Optional[List[str | int]] = None + from_field: list[str | int] + to_field: Optional[list[str | int]] = None def resolve_table(self, context: "HogQLContext") -> Table: if isinstance(self.join_table, Table): @@ -132,7 +133,7 @@ class LazyTable(Table): model_config = ConfigDict(extra="forbid") def lazy_select( - self, requested_fields: Dict[str, List[str | int]], context: "HogQLContext", node: "SelectQuery" + self, requested_fields: dict[str, list[str | int]], context: "HogQLContext", node: "SelectQuery" ) -> Any: raise NotImplementedError("LazyTable.lazy_select not overridden") diff --git a/posthog/hogql/database/schema/channel_type.py b/posthog/hogql/database/schema/channel_type.py index 24e4d32bab05b..c45c71458d7d1 100644 --- a/posthog/hogql/database/schema/channel_type.py +++ b/posthog/hogql/database/schema/channel_type.py @@ -98,7 +98,7 @@ def wrap_with_null_if_empty(expr: ast.Expr) -> ast.Expr: match({campaign}, '^(.*video.*)$'), 'Paid Video', - 'Paid Other' + 'Paid Unknown' ) ), @@ -125,7 +125,7 @@ def wrap_with_null_if_empty(expr: ast.Expr) -> ast.Expr: match({medium}, 'push$'), 'Push', - 'Other' + 'Unknown' ) ) )""", @@ -139,3 +139,23 @@ def wrap_with_null_if_empty(expr: ast.Expr) -> ast.Expr: "gad_source": wrap_with_null_if_empty(gad_source), }, ) + + +POSSIBLE_CHANNEL_TYPES = [ + "Cross Network", + "Paid Search", + "Paid Video", + "Paid Shopping", + "Paid Other", + "Direct", + "Organic Search", + "Organic Video", + "Organic Shopping", + "Push", + "SMS", + "Audio", + "Email", + "Referral", + "Affiliate", + "Other", +] diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py index c556903d40cdf..255779aef5902 100644 --- a/posthog/hogql/database/schema/cohort_people.py +++ b/posthog/hogql/database/schema/cohort_people.py @@ -1,5 +1,3 @@ -from typing import Dict, List - from posthog.hogql.database.models import ( StringDatabaseField, IntegerDatabaseField, @@ -22,7 +20,7 @@ } -def select_from_cohort_people_table(requested_fields: Dict[str, List[str | int]], team_id: int): +def select_from_cohort_people_table(requested_fields: dict[str, list[str | int]], team_id: int): from posthog.hogql import ast from posthog.models import Cohort @@ -39,7 +37,7 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str | int]] if "cohort_id" not in requested_fields: requested_fields = {**requested_fields, "cohort_id": ["cohort_id"]} - fields: List[ast.Expr] = [ + fields: list[ast.Expr] = [ ast.Alias(alias=name, expr=ast.Field(chain=[table_name, *chain])) for name, chain in requested_fields.items() ] @@ -60,7 +58,7 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str | int]] class RawCohortPeople(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **COHORT_PEOPLE_FIELDS, "sign": IntegerDatabaseField(name="sign"), "version": IntegerDatabaseField(name="version"), @@ -74,9 +72,9 @@ def to_printed_hogql(self): class CohortPeople(LazyTable): - fields: Dict[str, FieldOrTable] = COHORT_PEOPLE_FIELDS + fields: dict[str, FieldOrTable] = COHORT_PEOPLE_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return select_from_cohort_people_table(requested_fields, context.team_id) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/event_sessions.py b/posthog/hogql/database/schema/event_sessions.py index 31682981ea3ea..fc03357884a6d 100644 --- a/posthog/hogql/database/schema/event_sessions.py +++ b/posthog/hogql/database/schema/event_sessions.py @@ -1,5 +1,5 @@ from copy import deepcopy -from typing import Any, Dict, List, Optional +from typing import Any, Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import ( @@ -14,7 +14,7 @@ class EventsSessionSubTable(VirtualTable): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { "id": StringDatabaseField(name="$session_id"), "duration": IntegerDatabaseField(name="session_duration"), } @@ -27,7 +27,7 @@ def to_printed_hogql(self): class GetFieldsTraverser(TraversingVisitor): - fields: List[ast.Field] + fields: list[ast.Field] def __init__(self, expr: ast.Expr): super().__init__() @@ -71,7 +71,7 @@ def visit_field_type(self, node: ast.FieldType): class WhereClauseExtractor: - compare_operators: List[ast.Expr] + compare_operators: list[ast.Expr] def __init__( self, @@ -123,10 +123,10 @@ def _is_field_on_table(self, field: ast.Field) -> bool: return True - def run(self, expr: ast.Expr) -> List[ast.Expr]: - exprs_to_apply: List[ast.Expr] = [] + def run(self, expr: ast.Expr) -> list[ast.Expr]: + exprs_to_apply: list[ast.Expr] = [] - def should_add(expression: ast.Expr, fields: List[ast.Field]) -> bool: + def should_add(expression: ast.Expr, fields: list[ast.Field]) -> bool: for field in fields: on_table = self._is_field_on_table(field) if not on_table: @@ -168,7 +168,7 @@ def should_add(expression: ast.Expr, fields: List[ast.Field]) -> bool: def join_with_events_table_session_duration( from_table: str, to_table: str, - requested_fields: Dict[str, Any], + requested_fields: dict[str, Any], context: HogQLContext, node: ast.SelectQuery, ): diff --git a/posthog/hogql/database/schema/events.py b/posthog/hogql/database/schema/events.py index 88f59a11fd7ef..34941de0ec92a 100644 --- a/posthog/hogql/database/schema/events.py +++ b/posthog/hogql/database/schema/events.py @@ -1,5 +1,3 @@ -from typing import Dict - from posthog.hogql.database.models import ( VirtualTable, StringDatabaseField, @@ -20,7 +18,7 @@ class EventsPersonSubTable(VirtualTable): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { "id": StringDatabaseField(name="person_id"), "created_at": DateTimeDatabaseField(name="person_created_at"), "properties": StringJSONDatabaseField(name="person_properties"), @@ -54,7 +52,7 @@ def to_printed_hogql(self): class EventsTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { "uuid": StringDatabaseField(name="uuid"), "event": StringDatabaseField(name="event"), "properties": StringJSONDatabaseField(name="properties"), diff --git a/posthog/hogql/database/schema/groups.py b/posthog/hogql/database/schema/groups.py index ad97ff7eb0878..06fc40560b7db 100644 --- a/posthog/hogql/database/schema/groups.py +++ b/posthog/hogql/database/schema/groups.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext @@ -24,7 +24,7 @@ } -def select_from_groups_table(requested_fields: Dict[str, List[str | int]]): +def select_from_groups_table(requested_fields: dict[str, list[str | int]]): return argmax_select( table_name="raw_groups", select_fields=requested_fields, @@ -37,7 +37,7 @@ def join_with_group_n_table(group_index: int): def join_with_group_table( from_table: str, to_table: str, - requested_fields: Dict[str, Any], + requested_fields: dict[str, Any], context: HogQLContext, node: SelectQuery, ): @@ -70,7 +70,7 @@ def join_with_group_table( class RawGroupsTable(Table): - fields: Dict[str, FieldOrTable] = GROUPS_TABLE_FIELDS + fields: dict[str, FieldOrTable] = GROUPS_TABLE_FIELDS def to_printed_clickhouse(self, context): return "groups" @@ -80,9 +80,9 @@ def to_printed_hogql(self): class GroupsTable(LazyTable): - fields: Dict[str, FieldOrTable] = GROUPS_TABLE_FIELDS + fields: dict[str, FieldOrTable] = GROUPS_TABLE_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return select_from_groups_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/heatmaps.py b/posthog/hogql/database/schema/heatmaps.py new file mode 100644 index 0000000000000..959117baef874 --- /dev/null +++ b/posthog/hogql/database/schema/heatmaps.py @@ -0,0 +1,31 @@ +from posthog.hogql.database.models import ( + StringDatabaseField, + DateTimeDatabaseField, + IntegerDatabaseField, + Table, + FieldOrTable, + BooleanDatabaseField, +) + + +class HeatmapsTable(Table): + fields: dict[str, FieldOrTable] = { + "session_id": StringDatabaseField(name="session_id"), + "team_id": IntegerDatabaseField(name="team_id"), + "distinct_id": StringDatabaseField(name="distinct_id"), + "x": IntegerDatabaseField(name="x"), + "y": IntegerDatabaseField(name="y"), + "scale_factor": IntegerDatabaseField(name="scale_factor"), + "viewport_width": IntegerDatabaseField(name="viewport_width"), + "viewport_height": IntegerDatabaseField(name="viewport_height"), + "pointer_target_fixed": BooleanDatabaseField(name="pointer_target_fixed"), + "current_url": StringDatabaseField(name="current_url"), + "timestamp": DateTimeDatabaseField(name="timestamp"), + "type": StringDatabaseField(name="type"), + } + + def to_printed_clickhouse(self, context): + return "heatmaps" + + def to_printed_hogql(self): + return "heatmaps" diff --git a/posthog/hogql/database/schema/log_entries.py b/posthog/hogql/database/schema/log_entries.py index 14efaff09ce1f..edd2f761981c3 100644 --- a/posthog/hogql/database/schema/log_entries.py +++ b/posthog/hogql/database/schema/log_entries.py @@ -1,5 +1,3 @@ -from typing import Dict, List - from posthog.hogql import ast from posthog.hogql.database.models import ( Table, @@ -10,7 +8,7 @@ FieldOrTable, ) -LOG_ENTRIES_FIELDS: Dict[str, FieldOrTable] = { +LOG_ENTRIES_FIELDS: dict[str, FieldOrTable] = { "team_id": IntegerDatabaseField(name="team_id"), "log_source": StringDatabaseField(name="log_source"), "log_source_id": StringDatabaseField(name="log_source_id"), @@ -22,7 +20,7 @@ class LogEntriesTable(Table): - fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS + fields: dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS def to_printed_clickhouse(self, context): return "log_entries" @@ -32,10 +30,10 @@ def to_printed_hogql(self): class ReplayConsoleLogsLogEntriesTable(LazyTable): - fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS + fields: dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): - fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): + fields: list[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] return ast.SelectQuery( select=fields, @@ -55,10 +53,10 @@ def to_printed_hogql(self): class BatchExportLogEntriesTable(LazyTable): - fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS + fields: dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): - fields: List[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): + fields: list[ast.Expr] = [ast.Field(chain=["log_entries", *chain]) for name, chain in requested_fields.items()] return ast.SelectQuery( select=fields, diff --git a/posthog/hogql/database/schema/numbers.py b/posthog/hogql/database/schema/numbers.py index 01c09ac66d797..7590e4041c1d5 100644 --- a/posthog/hogql/database/schema/numbers.py +++ b/posthog/hogql/database/schema/numbers.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional from posthog.hogql.database.models import ( IntegerDatabaseField, @@ -12,7 +12,7 @@ class NumbersTable(FunctionCallTable): - fields: Dict[str, FieldOrTable] = NUMBERS_TABLE_FIELDS + fields: dict[str, FieldOrTable] = NUMBERS_TABLE_FIELDS name: str = "numbers" min_args: Optional[int] = 1 diff --git a/posthog/hogql/database/schema/person_distinct_id_overrides.py b/posthog/hogql/database/schema/person_distinct_id_overrides.py index 6045e74ff7679..209c73c346e40 100644 --- a/posthog/hogql/database/schema/person_distinct_id_overrides.py +++ b/posthog/hogql/database/schema/person_distinct_id_overrides.py @@ -1,4 +1,3 @@ -from typing import Dict, List from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext @@ -27,7 +26,7 @@ } -def select_from_person_distinct_id_overrides_table(requested_fields: Dict[str, List[str | int]]): +def select_from_person_distinct_id_overrides_table(requested_fields: dict[str, list[str | int]]): # Always include "person_id", as it's the key we use to make further joins, and it'd be great if it's available if "person_id" not in requested_fields: requested_fields = {**requested_fields, "person_id": ["person_id"]} @@ -43,7 +42,7 @@ def select_from_person_distinct_id_overrides_table(requested_fields: Dict[str, L def join_with_person_distinct_id_overrides_table( from_table: str, to_table: str, - requested_fields: Dict[str, List[str]], + requested_fields: dict[str, list[str]], context: HogQLContext, node: SelectQuery, ): @@ -65,7 +64,7 @@ def join_with_person_distinct_id_overrides_table( class RawPersonDistinctIdOverridesTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **PERSON_DISTINCT_ID_OVERRIDES_FIELDS, "is_deleted": BooleanDatabaseField(name="is_deleted"), "version": IntegerDatabaseField(name="version"), @@ -79,9 +78,9 @@ def to_printed_hogql(self): class PersonDistinctIdOverridesTable(LazyTable): - fields: Dict[str, FieldOrTable] = PERSON_DISTINCT_ID_OVERRIDES_FIELDS + fields: dict[str, FieldOrTable] = PERSON_DISTINCT_ID_OVERRIDES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context: HogQLContext, node: SelectQuery): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context: HogQLContext, node: SelectQuery): return select_from_person_distinct_id_overrides_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/person_distinct_ids.py b/posthog/hogql/database/schema/person_distinct_ids.py index dde1f97c27922..9fa00c59c2985 100644 --- a/posthog/hogql/database/schema/person_distinct_ids.py +++ b/posthog/hogql/database/schema/person_distinct_ids.py @@ -1,4 +1,3 @@ -from typing import Dict, List from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext @@ -27,7 +26,7 @@ } -def select_from_person_distinct_ids_table(requested_fields: Dict[str, List[str | int]]): +def select_from_person_distinct_ids_table(requested_fields: dict[str, list[str | int]]): # Always include "person_id", as it's the key we use to make further joins, and it'd be great if it's available if "person_id" not in requested_fields: requested_fields = {**requested_fields, "person_id": ["person_id"]} @@ -43,7 +42,7 @@ def select_from_person_distinct_ids_table(requested_fields: Dict[str, List[str | def join_with_person_distinct_ids_table( from_table: str, to_table: str, - requested_fields: Dict[str, List[str]], + requested_fields: dict[str, list[str]], context: HogQLContext, node: SelectQuery, ): @@ -65,7 +64,7 @@ def join_with_person_distinct_ids_table( class RawPersonDistinctIdsTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **PERSON_DISTINCT_IDS_FIELDS, "is_deleted": BooleanDatabaseField(name="is_deleted"), "version": IntegerDatabaseField(name="version"), @@ -79,9 +78,9 @@ def to_printed_hogql(self): class PersonDistinctIdsTable(LazyTable): - fields: Dict[str, FieldOrTable] = PERSON_DISTINCT_IDS_FIELDS + fields: dict[str, FieldOrTable] = PERSON_DISTINCT_IDS_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return select_from_person_distinct_ids_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/person_overrides.py b/posthog/hogql/database/schema/person_overrides.py index 559ddd3a8013d..366321cf65e41 100644 --- a/posthog/hogql/database/schema/person_overrides.py +++ b/posthog/hogql/database/schema/person_overrides.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext @@ -14,7 +14,7 @@ from posthog.hogql.errors import ResolutionError from posthog.schema import HogQLQueryModifiers -PERSON_OVERRIDES_FIELDS: Dict[str, FieldOrTable] = { +PERSON_OVERRIDES_FIELDS: dict[str, FieldOrTable] = { "team_id": IntegerDatabaseField(name="team_id"), "old_person_id": StringDatabaseField(name="old_person_id"), "override_person_id": StringDatabaseField(name="override_person_id"), @@ -24,7 +24,7 @@ } -def select_from_person_overrides_table(requested_fields: Dict[str, List[str | int]]): +def select_from_person_overrides_table(requested_fields: dict[str, list[str | int]]): return argmax_select( table_name="raw_person_overrides", select_fields=requested_fields, @@ -36,7 +36,7 @@ def select_from_person_overrides_table(requested_fields: Dict[str, List[str | in def join_with_person_overrides_table( from_table: str, to_table: str, - requested_fields: Dict[str, Any], + requested_fields: dict[str, Any], context: HogQLContext, node: SelectQuery, ): @@ -59,7 +59,7 @@ def join_with_person_overrides_table( class RawPersonOverridesTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **PERSON_OVERRIDES_FIELDS, "version": IntegerDatabaseField(name="version"), } @@ -72,9 +72,9 @@ def to_printed_hogql(self): class PersonOverridesTable(Table): - fields: Dict[str, FieldOrTable] = PERSON_OVERRIDES_FIELDS + fields: dict[str, FieldOrTable] = PERSON_OVERRIDES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: dict[str, list[str | int]], modifiers: HogQLQueryModifiers): return select_from_person_overrides_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py index 189da1faee068..14884a7008f60 100644 --- a/posthog/hogql/database/schema/persons.py +++ b/posthog/hogql/database/schema/persons.py @@ -1,4 +1,3 @@ -from typing import Dict, List from posthog.hogql.ast import SelectQuery from posthog.hogql.constants import HogQLQuerySettings @@ -19,7 +18,7 @@ from posthog.hogql.database.schema.persons_pdi import PersonsPDITable, persons_pdi_join from posthog.schema import HogQLQueryModifiers, PersonsArgMaxVersion -PERSONS_FIELDS: Dict[str, FieldOrTable] = { +PERSONS_FIELDS: dict[str, FieldOrTable] = { "id": StringDatabaseField(name="id"), "created_at": DateTimeDatabaseField(name="created_at"), "team_id": IntegerDatabaseField(name="team_id"), @@ -33,7 +32,7 @@ } -def select_from_persons_table(requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): +def select_from_persons_table(requested_fields: dict[str, list[str | int]], modifiers: HogQLQueryModifiers): version = modifiers.personsArgMaxVersion if version == PersonsArgMaxVersion.auto: version = PersonsArgMaxVersion.v1 @@ -85,7 +84,7 @@ def select_from_persons_table(requested_fields: Dict[str, List[str | int]], modi def join_with_persons_table( from_table: str, to_table: str, - requested_fields: Dict[str, List[str | int]], + requested_fields: dict[str, list[str | int]], context: HogQLContext, node: SelectQuery, ): @@ -107,7 +106,7 @@ def join_with_persons_table( class RawPersonsTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **PERSONS_FIELDS, "is_deleted": BooleanDatabaseField(name="is_deleted"), "version": IntegerDatabaseField(name="version"), @@ -121,9 +120,9 @@ def to_printed_hogql(self): class PersonsTable(LazyTable): - fields: Dict[str, FieldOrTable] = PERSONS_FIELDS + fields: dict[str, FieldOrTable] = PERSONS_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return select_from_persons_table(requested_fields, context.modifiers) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/persons_pdi.py b/posthog/hogql/database/schema/persons_pdi.py index 30fdadee67795..0e30b4e62d275 100644 --- a/posthog/hogql/database/schema/persons_pdi.py +++ b/posthog/hogql/database/schema/persons_pdi.py @@ -1,4 +1,3 @@ -from typing import Dict, List from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext @@ -14,7 +13,7 @@ # :NOTE: We already have person_distinct_ids.py, which most tables link to. This persons_pdi.py is a hack to # make "select persons.pdi.distinct_id from persons" work while avoiding circular imports. Don't use directly. -def persons_pdi_select(requested_fields: Dict[str, List[str | int]]): +def persons_pdi_select(requested_fields: dict[str, list[str | int]]): # Always include "person_id", as it's the key we use to make further joins, and it'd be great if it's available if "person_id" not in requested_fields: requested_fields = {**requested_fields, "person_id": ["person_id"]} @@ -32,7 +31,7 @@ def persons_pdi_select(requested_fields: Dict[str, List[str | int]]): def persons_pdi_join( from_table: str, to_table: str, - requested_fields: Dict[str, List[str | int]], + requested_fields: dict[str, list[str | int]], context: HogQLContext, node: SelectQuery, ): @@ -56,13 +55,13 @@ def persons_pdi_join( # :NOTE: We already have person_distinct_ids.py, which most tables link to. This persons_pdi.py is a hack to # make "select persons.pdi.distinct_id from persons" work while avoiding circular imports. Don't use directly. class PersonsPDITable(LazyTable): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { "team_id": IntegerDatabaseField(name="team_id"), "distinct_id": StringDatabaseField(name="distinct_id"), "person_id": StringDatabaseField(name="person_id"), } - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return persons_pdi_select(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py index a6f0fbed3bcf5..81f705af378d9 100644 --- a/posthog/hogql/database/schema/session_replay_events.py +++ b/posthog/hogql/database/schema/session_replay_events.py @@ -1,5 +1,3 @@ -from typing import Dict, List - from posthog.hogql.database.models import ( Table, StringDatabaseField, @@ -18,7 +16,7 @@ RAW_ONLY_FIELDS = ["min_first_timestamp", "max_last_timestamp"] -SESSION_REPLAY_EVENTS_COMMON_FIELDS: Dict[str, FieldOrTable] = { +SESSION_REPLAY_EVENTS_COMMON_FIELDS: dict[str, FieldOrTable] = { "session_id": StringDatabaseField(name="session_id"), "team_id": IntegerDatabaseField(name="team_id"), "distinct_id": StringDatabaseField(name="distinct_id"), @@ -46,14 +44,14 @@ class RawSessionReplayEventsTable(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **SESSION_REPLAY_EVENTS_COMMON_FIELDS, "min_first_timestamp": DateTimeDatabaseField(name="min_first_timestamp"), "max_last_timestamp": DateTimeDatabaseField(name="max_last_timestamp"), "first_url": DatabaseField(name="first_url"), } - def avoid_asterisk_fields(self) -> List[str]: + def avoid_asterisk_fields(self) -> list[str]: return ["first_url"] def to_printed_clickhouse(self, context): @@ -63,7 +61,7 @@ def to_printed_hogql(self): return "raw_session_replay_events" -def select_from_session_replay_events_table(requested_fields: Dict[str, List[str | int]]): +def select_from_session_replay_events_table(requested_fields: dict[str, list[str | int]]): from posthog.hogql import ast table_name = "raw_session_replay_events" @@ -85,8 +83,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str "message_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "message_count"])]), } - select_fields: List[ast.Expr] = [] - group_by_fields: List[ast.Expr] = [] + select_fields: list[ast.Expr] = [] + group_by_fields: list[ast.Expr] = [] for name, chain in requested_fields.items(): if name in RAW_ONLY_FIELDS: @@ -107,14 +105,14 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str class SessionReplayEventsTable(LazyTable): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { **{k: v for k, v in SESSION_REPLAY_EVENTS_COMMON_FIELDS.items() if k not in RAW_ONLY_FIELDS}, "start_time": DateTimeDatabaseField(name="start_time"), "end_time": DateTimeDatabaseField(name="end_time"), "first_url": StringDatabaseField(name="first_url"), } - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node): return select_from_session_replay_events_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/sessions.py b/posthog/hogql/database/schema/sessions.py index e1fcaf1a75f06..63f0e4f98e79f 100644 --- a/posthog/hogql/database/schema/sessions.py +++ b/posthog/hogql/database/schema/sessions.py @@ -1,4 +1,4 @@ -from typing import Dict, List, cast, Any, TYPE_CHECKING +from typing import cast, Any, Optional, TYPE_CHECKING from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -11,15 +11,23 @@ StringArrayDatabaseField, DatabaseField, LazyTable, + FloatDatabaseField, + BooleanDatabaseField, ) -from posthog.hogql.database.schema.channel_type import create_channel_type_expr +from posthog.hogql.database.schema.channel_type import create_channel_type_expr, POSSIBLE_CHANNEL_TYPES from posthog.hogql.database.schema.util.session_where_clause_extractor import SessionMinTimestampWhereClauseExtractor from posthog.hogql.errors import ResolutionError +from posthog.models.property_definition import PropertyType +from posthog.models.sessions.sql import ( + SELECT_SESSION_PROP_STRING_VALUES_SQL_WITH_FILTER, + SELECT_SESSION_PROP_STRING_VALUES_SQL, +) +from posthog.queries.insight import insight_sync_execute if TYPE_CHECKING: - pass + from posthog.models.team import Team -RAW_SESSIONS_FIELDS: Dict[str, FieldOrTable] = { +RAW_SESSIONS_FIELDS: dict[str, FieldOrTable] = { "id": StringDatabaseField(name="session_id"), # TODO remove this, it's a duplicate of the correct session_id field below to get some trends working on a deadline "session_id": StringDatabaseField(name="session_id"), @@ -44,7 +52,7 @@ "autocapture_count": IntegerDatabaseField(name="autocapture_count"), } -LAZY_SESSIONS_FIELDS: Dict[str, FieldOrTable] = { +LAZY_SESSIONS_FIELDS: dict[str, FieldOrTable] = { "id": StringDatabaseField(name="session_id"), # TODO remove this, it's a duplicate of the correct session_id field below to get some trends working on a deadline "session_id": StringDatabaseField(name="session_id"), @@ -75,7 +83,7 @@ class RawSessionsTable(Table): - fields: Dict[str, FieldOrTable] = RAW_SESSIONS_FIELDS + fields: dict[str, FieldOrTable] = RAW_SESSIONS_FIELDS def to_printed_clickhouse(self, context): return "sessions" @@ -83,7 +91,7 @@ def to_printed_clickhouse(self, context): def to_printed_hogql(self): return "raw_sessions" - def avoid_asterisk_fields(self) -> List[str]: + def avoid_asterisk_fields(self) -> list[str]: # our clickhouse driver can't return aggregate states return [ "entry_url", @@ -100,7 +108,7 @@ def avoid_asterisk_fields(self) -> List[str]: def select_from_sessions_table( - requested_fields: Dict[str, List[str | int]], node: ast.SelectQuery, context: HogQLContext + requested_fields: dict[str, list[str | int]], node: ast.SelectQuery, context: HogQLContext ): from posthog.hogql import ast @@ -166,8 +174,8 @@ def select_from_sessions_table( } aggregate_fields["duration"] = aggregate_fields["$session_duration"] - select_fields: List[ast.Expr] = [] - group_by_fields: List[ast.Expr] = [ast.Field(chain=[table_name, "session_id"])] + select_fields: list[ast.Expr] = [] + group_by_fields: list[ast.Expr] = [ast.Field(chain=[table_name, "session_id"])] for name, chain in requested_fields.items(): if name in aggregate_fields: @@ -189,9 +197,9 @@ def select_from_sessions_table( class SessionsTable(LazyTable): - fields: Dict[str, FieldOrTable] = LAZY_SESSIONS_FIELDS + fields: dict[str, FieldOrTable] = LAZY_SESSIONS_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node: ast.SelectQuery): + def lazy_select(self, requested_fields: dict[str, list[str | int]], context, node: ast.SelectQuery): return select_from_sessions_table(requested_fields, node, context) def to_printed_clickhouse(self, context): @@ -200,9 +208,14 @@ def to_printed_clickhouse(self, context): def to_printed_hogql(self): return "sessions" + def avoid_asterisk_fields(self) -> list[str]: + return [ + "duration", # alias of $session_duration, deprecated but included for backwards compatibility + ] + def join_events_table_to_sessions_table( - from_table: str, to_table: str, requested_fields: Dict[str, Any], context: HogQLContext, node: ast.SelectQuery + from_table: str, to_table: str, requested_fields: dict[str, Any], context: HogQLContext, node: ast.SelectQuery ) -> ast.JoinExpr: from posthog.hogql import ast @@ -220,3 +233,97 @@ def join_events_table_to_sessions_table( ) ) return join_expr + + +def get_lazy_session_table_properties(search: Optional[str]): + # some fields shouldn't appear as properties + hidden_fields = {"team_id", "distinct_id", "session_id", "id", "$event_count_map", "$urls", "duration"} + + # some fields should have a specific property type which isn't derivable from the type of database field + property_type_overrides = { + "$session_duration": PropertyType.Duration, + } + + def get_property_type(field_name: str, field_definition: FieldOrTable): + if field_name in property_type_overrides: + return property_type_overrides[field_name] + if isinstance(field_definition, IntegerDatabaseField) or isinstance(field_definition, FloatDatabaseField): + return PropertyType.Numeric + if isinstance(field_definition, DateTimeDatabaseField): + return PropertyType.Datetime + if isinstance(field_definition, BooleanDatabaseField): + return PropertyType.Boolean + return PropertyType.String + + results = [ + { + "id": field_name, + "name": field_name, + "is_numerical": isinstance(field_definition, IntegerDatabaseField) + or isinstance(field_definition, FloatDatabaseField), + "property_type": get_property_type(field_name, field_definition), + "is_seen_on_filtered_events": None, + "tags": [], + } + for field_name, field_definition in LAZY_SESSIONS_FIELDS.items() + if (not search or search.lower() in field_name.lower()) and field_name not in hidden_fields + ] + return results + + +SESSION_PROPERTY_TO_RAW_SESSIONS_EXPR_MAP = { + "$initial_referring_domain": "finalizeAggregation(initial_referring_domain)", + "$initial_utm_source": "finalizeAggregation(initial_utm_source)", + "$initial_utm_campaign": "finalizeAggregation(initial_utm_campaign)", + "$initial_utm_medium": "finalizeAggregation(initial_utm_medium)", + "$initial_utm_term": "finalizeAggregation(initial_utm_term)", + "$initial_utm_content": "finalizeAggregation(initial_utm_content)", + "$initial_gclid": "finalizeAggregation(initial_gclid)", + "$initial_gad_source": "finalizeAggregation(initial_gad_source)", + "$initial_gclsrc": "finalizeAggregation(initial_gclsrc)", + "$initial_dclid": "finalizeAggregation(initial_dclid)", + "$initial_gbraid": "finalizeAggregation(initial_gbraid)", + "$initial_wbraid": "finalizeAggregation(initial_wbraid)", + "$initial_fbclid": "finalizeAggregation(initial_fbclid)", + "$initial_msclkid": "finalizeAggregation(initial_msclkid)", + "$initial_twclid": "finalizeAggregation(initial_twclid)", + "$initial_li_fat_id": "finalizeAggregation(initial_li_fat_id)", + "$initial_mc_cid": "finalizeAggregation(initial_mc_cid)", + "$initial_igshid": "finalizeAggregation(initial_igshid)", + "$initial_ttclid": "finalizeAggregation(initial_ttclid)", + "$entry_url": "finalizeAggregation(entry_url)", + "$exit_url": "finalizeAggregation(exit_url)", +} + + +def get_lazy_session_table_values(key: str, search_term: Optional[str], team: "Team"): + # the sessions table does not have a properties json object like the events and person tables + + if key == "$channel_type": + return [[name] for name in POSSIBLE_CHANNEL_TYPES if not search_term or search_term.lower() in name.lower()] + + expr = SESSION_PROPERTY_TO_RAW_SESSIONS_EXPR_MAP.get(key) + + if not expr: + return [] + + field_definition = LAZY_SESSIONS_FIELDS.get(key) + if not field_definition: + return [] + + if isinstance(field_definition, StringDatabaseField): + if search_term: + return insight_sync_execute( + SELECT_SESSION_PROP_STRING_VALUES_SQL_WITH_FILTER.format(property_expr=expr), + {"team_id": team.pk, "key": key, "value": "%{}%".format(search_term)}, + query_type="get_session_property_values_with_value", + team_id=team.pk, + ) + return insight_sync_execute( + SELECT_SESSION_PROP_STRING_VALUES_SQL.format(property_expr=expr), + {"team_id": team.pk, "key": key}, + query_type="get_session_property_values", + team_id=team.pk, + ) + + return [] diff --git a/posthog/hogql/database/schema/static_cohort_people.py b/posthog/hogql/database/schema/static_cohort_people.py index 97d90cbd6dcac..fafbe9459eb99 100644 --- a/posthog/hogql/database/schema/static_cohort_people.py +++ b/posthog/hogql/database/schema/static_cohort_people.py @@ -1,5 +1,3 @@ -from typing import Dict - from posthog.hogql.database.models import ( Table, StringDatabaseField, @@ -11,7 +9,7 @@ class StaticCohortPeople(Table): - fields: Dict[str, FieldOrTable] = { + fields: dict[str, FieldOrTable] = { "person_id": StringDatabaseField(name="person_id"), "cohort_id": IntegerDatabaseField(name="cohort_id"), "team_id": IntegerDatabaseField(name="team_id"), diff --git a/posthog/hogql/database/schema/test/test_channel_type.py b/posthog/hogql/database/schema/test/test_channel_type.py index 97dba3e13ba38..363e262944770 100644 --- a/posthog/hogql/database/schema/test/test_channel_type.py +++ b/posthog/hogql/database/schema/test/test_channel_type.py @@ -234,15 +234,15 @@ def test_organic_video(self): ), ) - def test_no_info_is_other(self): + def test_no_info_is_unknown(self): self.assertEqual( - "Other", + "Unknown", self._get_initial_channel_type({}), ) - def test_unknown_domain_is_other(self): + def test_unknown_domain_is_unknown(self): self.assertEqual( - "Other", + "Unknown", self._get_initial_channel_type( { "$initial_referring_domain": "some-unknown-domain.example.com", @@ -252,7 +252,7 @@ def test_unknown_domain_is_other(self): def test_doesnt_fail_on_numbers(self): self.assertEqual( - "Other", + "Unknown", self._get_initial_channel_type( { "$initial_referring_domain": "example.com", @@ -318,7 +318,7 @@ def test_firefox_google_search_for_shoes(self): def test_daily_mail_ad_click(self): # go to daily mail -> click ad self.assertEqual( - "Paid Other", + "Paid Unknown", self._get_initial_channel_type_from_wild_clicks( "https://www.vivaia.com/item/square-toe-v-cut-flats-p_10003645.html?gid=10011676¤cy=GBP&shipping_country_code=GB&gclid=EAIaIQobChMIxvGy5rr_ggMVYi0GAB0KSAumEAEYASABEgLZ2PD_BwE", "https://2bb5cd7f10ba63d8b55ecfac1a3948db.safeframe.googlesyndication.com/", diff --git a/posthog/hogql/database/schema/test/test_event_sessions.py b/posthog/hogql/database/schema/test/test_event_sessions.py index 1a31bc3f4720d..914ac471236d6 100644 --- a/posthog/hogql/database/schema/test/test_event_sessions.py +++ b/posthog/hogql/database/schema/test/test_event_sessions.py @@ -1,4 +1,4 @@ -from typing import List, cast +from typing import cast from posthog.hogql import ast from posthog.hogql.context import HogQLContext from posthog.hogql.database.database import create_hogql_database @@ -21,7 +21,7 @@ def _select(self, query: str) -> ast.SelectQuery: select_query = cast(ast.SelectQuery, clone_expr(parse_select(query), clear_locations=True)) return cast(ast.SelectQuery, resolve_types(select_query, self.context, dialect="clickhouse")) - def _compare_operators(self, query: ast.SelectQuery, table_name: str) -> List[ast.Expr]: + def _compare_operators(self, query: ast.SelectQuery, table_name: str) -> list[ast.Expr]: assert query.where is not None and query.type is not None return WhereClauseExtractor(query.where, table_name, query.type, self.context).compare_operators diff --git a/posthog/hogql/database/schema/test/test_sessions.py b/posthog/hogql/database/schema/test/test_sessions.py index 2f4728fb9b558..230ffc1bc1897 100644 --- a/posthog/hogql/database/schema/test/test_sessions.py +++ b/posthog/hogql/database/schema/test/test_sessions.py @@ -5,6 +5,7 @@ APIBaseTest, ClickhouseTestMixin, _create_event, + _create_person, ) @@ -103,3 +104,35 @@ def test_events_session_dot_channel_type(self): result[0], "Paid Search", ) + + def test_persons_and_sessions_on_events(self): + p1 = _create_person(distinct_ids=["d1"], team=self.team) + p2 = _create_person(distinct_ids=["d2"], team=self.team) + + s1 = "session_test_persons_and_sessions_on_events_1" + s2 = "session_test_persons_and_sessions_on_events_2" + + _create_event( + event="$pageview", + team=self.team, + distinct_id="d1", + properties={"$session_id": s1, "utm_source": "source1"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="d2", + properties={"$session_id": s2, "utm_source": "source2"}, + ) + + response = execute_hogql_query( + parse_select( + "select events.person_id, session.$initial_utm_source from events where $session_id = {session_id} or $session_id = {session_id2} order by 2 asc", + placeholders={"session_id": ast.Constant(value=s1), "session_id2": ast.Constant(value=s2)}, + ), + self.team, + ) + + [row1, row2] = response.results or [] + self.assertEqual(row1, (p1.uuid, "source1")) + self.assertEqual(row2, (p2.uuid, "source2")) diff --git a/posthog/hogql/database/schema/util/session_where_clause_extractor.py b/posthog/hogql/database/schema/util/session_where_clause_extractor.py index d1552ffa75f2f..3d94a4a0f691f 100644 --- a/posthog/hogql/database/schema/util/session_where_clause_extractor.py +++ b/posthog/hogql/database/schema/util/session_where_clause_extractor.py @@ -379,6 +379,8 @@ def visit_alias(self, node: ast.Alias) -> bool: table_type = node.type.resolve_table_type(self.context) if not table_type: return False + if isinstance(table_type, ast.TableAliasType): + table_type = table_type.table_type return ( isinstance(table_type, ast.TableType) and isinstance(table_type.table, EventsTable) @@ -409,7 +411,10 @@ def visit_field(self, node: ast.Field) -> ast.Field: if node.type and isinstance(node.type, ast.FieldType): resolved_field = node.type.resolve_database_field(self.context) - table = node.type.resolve_table_type(self.context).table + table_type = node.type.resolve_table_type(self.context) + if isinstance(table_type, ast.TableAliasType): + table_type = table_type.table_type + table = table_type.table if resolved_field and isinstance(resolved_field, DatabaseField): if (isinstance(table, EventsTable) and resolved_field.name == "timestamp") or ( isinstance(table, SessionsTable) and resolved_field.name == "$start_timestamp" diff --git a/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py index 3fa9df4e8a815..ea8c55d054cad 100644 --- a/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py +++ b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py @@ -1,4 +1,4 @@ -from typing import Union, Optional, Dict +from typing import Union, Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -22,10 +22,9 @@ def f(s: Union[str, ast.Expr, None], placeholders: Optional[dict[str, ast.Expr]] def parse( s: str, - placeholders: Optional[Dict[str, ast.Expr]] = None, -) -> ast.SelectQuery: + placeholders: Optional[dict[str, ast.Expr]] = None, +) -> ast.SelectQuery | ast.SelectUnionQuery: parsed = parse_select(s, placeholders=placeholders) - assert isinstance(parsed, ast.SelectQuery) return parsed @@ -245,6 +244,36 @@ def test_select_query(self): ) assert actual is None + def test_breakdown_subquery(self): + actual = f( + self.inliner.get_inner_where( + parse( + f""" +SELECT + count(DISTINCT e.$session_id) AS total, + toStartOfDay(timestamp) AS day_start, + multiIf(and(greaterOrEquals(session.$session_duration, 2.0), less(session.$session_duration, 4.5)), '[2.0,4.5]', and(greaterOrEquals(session.$session_duration, 4.5), less(session.$session_duration, 27.0)), '[4.5,27.0]', and(greaterOrEquals(session.$session_duration, 27.0), less(session.$session_duration, 44.0)), '[27.0,44.0]', and(greaterOrEquals(session.$session_duration, 44.0), less(session.$session_duration, 48.0)), '[44.0,48.0]', and(greaterOrEquals(session.$session_duration, 48.0), less(session.$session_duration, 57.5)), '[48.0,57.5]', and(greaterOrEquals(session.$session_duration, 57.5), less(session.$session_duration, 61.0)), '[57.5,61.0]', and(greaterOrEquals(session.$session_duration, 61.0), less(session.$session_duration, 74.0)), '[61.0,74.0]', and(greaterOrEquals(session.$session_duration, 74.0), less(session.$session_duration, 90.0)), '[74.0,90.0]', and(greaterOrEquals(session.$session_duration, 90.0), less(session.$session_duration, 98.5)), '[90.0,98.5]', and(greaterOrEquals(session.$session_duration, 98.5), less(session.$session_duration, 167.01)), '[98.5,167.01]', '["",""]') AS breakdown_value + FROM + events AS e SAMPLE 1 + WHERE + and(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(toDateTime('2024-04-13 00:00:00')))), lessOrEquals(timestamp, assumeNotNull(toDateTime('2024-04-20 23:59:59'))), equals(event, '$pageview'), in(person_id, (SELECT + person_id + FROM + raw_cohort_people + WHERE + and(equals(cohort_id, 2), equals(version, 0))))) + GROUP BY + day_start, + breakdown_value + """ + ) + ) + ) + expected = f( + "((raw_sessions.min_timestamp + toIntervalDay(3)) >= toStartOfDay(assumeNotNull(toDateTime('2024-04-13 00:00:00'))) AND (raw_sessions.min_timestamp - toIntervalDay(3)) <= assumeNotNull(toDateTime('2024-04-20 23:59:59')))" + ) + assert expected == actual + class TestSessionsQueriesHogQLToClickhouse(ClickhouseTestMixin, APIBaseTest): def print_query(self, query: str) -> str: @@ -311,5 +340,120 @@ def test_join_with_events(self): and(equals(events.team_id, {self.team.id}), greater(toTimeZone(events.timestamp, %(hogql_val_2)s), %(hogql_val_3)s)) GROUP BY sessions.session_id +LIMIT 10000""" + assert expected == actual + + def test_union(self): + actual = self.print_query( + """ +SELECT 0 as duration +UNION ALL +SELECT events.session.$session_duration as duration +FROM events +WHERE events.timestamp < today() + """ + ) + expected = f"""SELECT + 0 AS duration +LIMIT 10000 +UNION ALL +SELECT + events__session.`$session_duration` AS duration +FROM + events + LEFT JOIN (SELECT + dateDiff(%(hogql_val_0)s, min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, + sessions.session_id AS session_id + FROM + sessions + WHERE + and(equals(sessions.team_id, {self.team.id}), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, %(hogql_val_1)s), toIntervalDay(3)), today()), 0)) + GROUP BY + sessions.session_id, + sessions.session_id) AS events__session ON equals(events.`$session_id`, events__session.session_id) +WHERE + and(equals(events.team_id, {self.team.id}), less(toTimeZone(events.timestamp, %(hogql_val_2)s), today())) +LIMIT 10000""" + assert expected == actual + + def test_session_breakdown(self): + actual = self.print_query( + """SELECT count(DISTINCT e."$session_id") AS total, + toStartOfDay(timestamp) AS day_start, + multiIf(and(greaterOrEquals(session."$session_duration", 2.0), + less(session."$session_duration", 4.5)), + '[2.0,4.5]', + and(greaterOrEquals(session."$session_duration", 4.5), + less(session."$session_duration", 27.0)), + '[4.5,27.0]', + and(greaterOrEquals(session."$session_duration", 27.0), + less(session."$session_duration", 44.0)), + '[27.0,44.0]', + and(greaterOrEquals(session."$session_duration", 44.0), + less(session."$session_duration", 48.0)), + '[44.0,48.0]', + and(greaterOrEquals(session."$session_duration", 48.0), + less(session."$session_duration", 57.5)), + '[48.0,57.5]', + and(greaterOrEquals(session."$session_duration", 57.5), + less(session."$session_duration", 61.0)), + '[57.5,61.0]', + and(greaterOrEquals(session."$session_duration", 61.0), + less(session."$session_duration", 74.0)), + '[61.0,74.0]', + and(greaterOrEquals(session."$session_duration", 74.0), + less(session."$session_duration", 90.0)), + '[74.0,90.0]', + and(greaterOrEquals(session."$session_duration", 90.0), + less(session."$session_duration", 98.5)), + '[90.0,98.5]', and(greaterOrEquals(session."$session_duration", 98.5), + less(session."$session_duration", 167.01)), '[98.5,167.01]', + '["",""]') AS breakdown_value +FROM events AS e SAMPLE 1 +WHERE and(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(toDateTime('2024-04-13 00:00:00')))), + lessOrEquals(timestamp, assumeNotNull(toDateTime('2024-04-20 23:59:59'))), + equals(event, '$pageview'), in(person_id, (SELECT person_id + FROM raw_cohort_people + WHERE and(equals(cohort_id, 2), equals(version, 0))))) +GROUP BY day_start, + breakdown_value""" + ) + expected = f"""SELECT + count(DISTINCT e.`$session_id`) AS total, + toStartOfDay(toTimeZone(e.timestamp, %(hogql_val_7)s)) AS day_start, + multiIf(and(ifNull(greaterOrEquals(e__session.`$session_duration`, 2.0), 0), ifNull(less(e__session.`$session_duration`, 4.5), 0)), %(hogql_val_8)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 4.5), 0), ifNull(less(e__session.`$session_duration`, 27.0), 0)), %(hogql_val_9)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 27.0), 0), ifNull(less(e__session.`$session_duration`, 44.0), 0)), %(hogql_val_10)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 44.0), 0), ifNull(less(e__session.`$session_duration`, 48.0), 0)), %(hogql_val_11)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 48.0), 0), ifNull(less(e__session.`$session_duration`, 57.5), 0)), %(hogql_val_12)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 57.5), 0), ifNull(less(e__session.`$session_duration`, 61.0), 0)), %(hogql_val_13)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 61.0), 0), ifNull(less(e__session.`$session_duration`, 74.0), 0)), %(hogql_val_14)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 74.0), 0), ifNull(less(e__session.`$session_duration`, 90.0), 0)), %(hogql_val_15)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 90.0), 0), ifNull(less(e__session.`$session_duration`, 98.5), 0)), %(hogql_val_16)s, and(ifNull(greaterOrEquals(e__session.`$session_duration`, 98.5), 0), ifNull(less(e__session.`$session_duration`, 167.01), 0)), %(hogql_val_17)s, %(hogql_val_18)s) AS breakdown_value +FROM + events AS e SAMPLE 1 + INNER JOIN (SELECT + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM + person_distinct_id2 + WHERE + equals(person_distinct_id2.team_id, {self.team.id}) + GROUP BY + person_distinct_id2.distinct_id + HAVING + ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN (SELECT + dateDiff(%(hogql_val_0)s, min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, + sessions.session_id AS session_id + FROM + sessions + WHERE + and(equals(sessions.team_id, {self.team.id}), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_1)s), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_2)s, 6, %(hogql_val_3)s)))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, %(hogql_val_4)s), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_5)s, 6, %(hogql_val_6)s))), 0)) + GROUP BY + sessions.session_id, + sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) +WHERE + and(equals(e.team_id, {self.team.id}), and(greaterOrEquals(toTimeZone(e.timestamp, %(hogql_val_19)s), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_20)s, 6, %(hogql_val_21)s)))), lessOrEquals(toTimeZone(e.timestamp, %(hogql_val_22)s), assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_23)s, 6, %(hogql_val_24)s))), equals(e.event, %(hogql_val_25)s), ifNull(in(e__pdi.person_id, (SELECT + cohortpeople.person_id AS person_id + FROM + cohortpeople + WHERE + and(equals(cohortpeople.team_id, {self.team.id}), and(equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0))))), 0))) +GROUP BY + day_start, + breakdown_value LIMIT 10000""" assert expected == actual diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index abca3ce8745ca..217cb33a3c8ea 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -675,6 +675,52 @@ "type": "integer" } ], + "heatmaps": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "x", + "type": "integer" + }, + { + "key": "y", + "type": "integer" + }, + { + "key": "scale_factor", + "type": "integer" + }, + { + "key": "viewport_width", + "type": "integer" + }, + { + "key": "viewport_height", + "type": "integer" + }, + { + "key": "pointer_target_fixed", + "type": "boolean" + }, + { + "key": "current_url", + "type": "string" + }, + { + "key": "timestamp", + "type": "datetime" + }, + { + "key": "type", + "type": "string" + } + ], "raw_session_replay_events": [ { "key": "session_id", @@ -1657,6 +1703,52 @@ "type": "integer" } ], + "heatmaps": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "x", + "type": "integer" + }, + { + "key": "y", + "type": "integer" + }, + { + "key": "scale_factor", + "type": "integer" + }, + { + "key": "viewport_width", + "type": "integer" + }, + { + "key": "viewport_height", + "type": "integer" + }, + { + "key": "pointer_target_fixed", + "type": "boolean" + }, + { + "key": "current_url", + "type": "string" + }, + { + "key": "timestamp", + "type": "datetime" + }, + { + "key": "type", + "type": "string" + } + ], "raw_session_replay_events": [ { "key": "session_id", diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py index 955104aefb09e..56e3f707b8b9f 100644 --- a/posthog/hogql/database/test/test_database.py +++ b/posthog/hogql/database/test/test_database.py @@ -89,7 +89,7 @@ def test_database_with_warehouse_tables(self, patch_execute): self.assertEqual( response.clickhouse, - f"SELECT whatever.id AS id FROM s3(%(hogql_val_0_sensitive)s, %(hogql_val_3_sensitive)s, %(hogql_val_4_sensitive)s, %(hogql_val_1)s, %(hogql_val_2)s) AS whatever LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + f"SELECT whatever.id AS id FROM s3(%(hogql_val_0_sensitive)s, %(hogql_val_3_sensitive)s, %(hogql_val_4_sensitive)s, %(hogql_val_1)s, %(hogql_val_2)s) AS whatever LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_database_group_type_mappings(self): diff --git a/posthog/hogql/escape_sql.py b/posthog/hogql/escape_sql.py index 0d0f12d0d2dea..10f4a413fa60d 100644 --- a/posthog/hogql/escape_sql.py +++ b/posthog/hogql/escape_sql.py @@ -1,6 +1,6 @@ import re from datetime import datetime, date -from typing import Optional, Any, Literal, List, Tuple +from typing import Optional, Any, Literal from uuid import UUID from zoneinfo import ZoneInfo @@ -123,11 +123,14 @@ def visit_datetime(self, value: datetime): return f"toDateTime({self.visit(datetime_string)})" # no timezone for hogql return f"toDateTime64({self.visit(datetime_string)}, 6, {self.visit(self._timezone)})" + def visit_fakedate(self, value: date): + return self.visit_date(value) + def visit_date(self, value: date): return f"toDate({self.visit(value.strftime('%Y-%m-%d'))})" - def visit_list(self, value: List): + def visit_list(self, value: list): return f"[{', '.join(str(self.visit(x)) for x in value)}]" - def visit_tuple(self, value: Tuple): + def visit_tuple(self, value: tuple): return f"({', '.join(str(self.visit(x)) for x in value)})" diff --git a/posthog/hogql/filters.py b/posthog/hogql/filters.py index dcec66efad6e0..06ea36c1cdd10 100644 --- a/posthog/hogql/filters.py +++ b/posthog/hogql/filters.py @@ -1,10 +1,9 @@ -from typing import List, Optional, TypeVar +from typing import Optional, TypeVar from dateutil.parser import isoparse from posthog.hogql import ast from posthog.hogql.errors import QueryError -from posthog.hogql.parser import parse_expr from posthog.hogql.property import property_to_expr from posthog.hogql.visitor import CloningVisitor from posthog.models import Team @@ -24,7 +23,7 @@ def __init__(self, filters: Optional[HogQLFilters], team: Team = None): super().__init__() self.filters = filters self.team = team - self.selects: List[ast.SelectQuery] = [] + self.selects: list[ast.SelectQuery] = [] def visit_select_query(self, node): self.selects.append(node) @@ -52,21 +51,21 @@ def visit_placeholder(self, node): "Cannot use 'filters' placeholder in a SELECT clause that does not select from the events table." ) - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] if self.filters.properties is not None: exprs.append(property_to_expr(self.filters.properties, self.team)) dateTo = self.filters.dateRange.date_to if self.filters.dateRange else None if dateTo is not None: try: - parsed_date = isoparse(dateTo) + parsed_date = isoparse(dateTo).replace(tzinfo=self.team.timezone_info) except ValueError: parsed_date = relative_date_parse(dateTo, self.team.timezone_info) exprs.append( - parse_expr( - "timestamp < {timestamp}", - {"timestamp": ast.Constant(value=parsed_date)}, - start=None, # do not add location information for "timestamp" to the metadata + ast.CompareOperation( + op=ast.CompareOperationOp.Lt, + left=ast.Field(chain=["timestamp"]), + right=ast.Constant(value=parsed_date), ) ) @@ -74,14 +73,14 @@ def visit_placeholder(self, node): dateFrom = self.filters.dateRange.date_from if self.filters.dateRange else None if dateFrom is not None and dateFrom != "all": try: - parsed_date = isoparse(dateFrom) + parsed_date = isoparse(dateFrom).replace(tzinfo=self.team.timezone_info) except ValueError: parsed_date = relative_date_parse(dateFrom, self.team.timezone_info) exprs.append( - parse_expr( - "timestamp >= {timestamp}", - {"timestamp": ast.Constant(value=parsed_date)}, - start=None, # do not add location information for "timestamp" to the metadata + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["timestamp"]), + right=ast.Constant(value=parsed_date), ) ) diff --git a/posthog/hogql/functions/action.py b/posthog/hogql/functions/action.py index 02888081632f3..5ed8a156e393b 100644 --- a/posthog/hogql/functions/action.py +++ b/posthog/hogql/functions/action.py @@ -1,12 +1,10 @@ -from typing import List - from posthog.hogql import ast from posthog.hogql.context import HogQLContext from posthog.hogql.errors import QueryError from posthog.hogql.escape_sql import escape_clickhouse_string -def matches_action(node: ast.Expr, args: List[ast.Expr], context: HogQLContext) -> ast.Expr: +def matches_action(node: ast.Expr, args: list[ast.Expr], context: HogQLContext) -> ast.Expr: arg = args[0] if not isinstance(arg, ast.Constant): raise QueryError("action() takes only constant arguments", node=arg) diff --git a/posthog/hogql/functions/cohort.py b/posthog/hogql/functions/cohort.py index fc5077f610a4f..2b0992c6e7ef9 100644 --- a/posthog/hogql/functions/cohort.py +++ b/posthog/hogql/functions/cohort.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -23,7 +23,7 @@ def cohort_query_node(node: ast.Expr, context: HogQLContext) -> ast.Expr: return cohort(node, [node], context) -def cohort(node: ast.Expr, args: List[ast.Expr], context: HogQLContext) -> ast.Expr: +def cohort(node: ast.Expr, args: list[ast.Expr], context: HogQLContext) -> ast.Expr: arg = args[0] if not isinstance(arg, ast.Constant): raise QueryError("cohort() takes only constant arguments", node=arg) diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 652e1711ff0bb..c4087013c85c4 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -1,13 +1,13 @@ from dataclasses import dataclass from itertools import chain -from typing import List, Optional, Dict, Tuple, Type +from typing import Optional from posthog.hogql import ast from posthog.hogql.base import ConstantType from posthog.hogql.errors import QueryError def validate_function_args( - args: List[ast.Expr], + args: list[ast.Expr], min_args: int, max_args: Optional[int], function_name: str, @@ -31,7 +31,7 @@ def validate_function_args( ) -Overload = Tuple[Tuple[Type[ConstantType], ...] | Type[ConstantType], str] +Overload = tuple[tuple[type[ConstantType], ...] | type[ConstantType], str] @dataclass() @@ -42,7 +42,7 @@ class HogQLFunctionMeta: min_params: int = 0 max_params: Optional[int] = 0 aggregate: bool = False - overloads: Optional[List[Overload]] = None + overloads: Optional[list[Overload]] = None """Overloads allow for using a different ClickHouse function depending on the type of the first arg.""" tz_aware: bool = False """Whether the function is timezone-aware. This means the project timezone will be appended as the last arg.""" @@ -50,7 +50,7 @@ class HogQLFunctionMeta: """Not all ClickHouse functions are case-insensitive. See https://clickhouse.com/docs/en/sql-reference/syntax#keywords.""" -HOGQL_COMPARISON_MAPPING: Dict[str, ast.CompareOperationOp] = { +HOGQL_COMPARISON_MAPPING: dict[str, ast.CompareOperationOp] = { "equals": ast.CompareOperationOp.Eq, "notEquals": ast.CompareOperationOp.NotEq, "less": ast.CompareOperationOp.Lt, @@ -65,7 +65,7 @@ class HogQLFunctionMeta: "notIn": ast.CompareOperationOp.NotIn, } -HOGQL_CLICKHOUSE_FUNCTIONS: Dict[str, HogQLFunctionMeta] = { +HOGQL_CLICKHOUSE_FUNCTIONS: dict[str, HogQLFunctionMeta] = { # arithmetic "plus": HogQLFunctionMeta("plus", 2, 2), "minus": HogQLFunctionMeta("minus", 2, 2), @@ -575,7 +575,7 @@ class HogQLFunctionMeta: "leadInFrame": HogQLFunctionMeta("leadInFrame", 1, 1), } # Permitted HogQL aggregations -HOGQL_AGGREGATIONS: Dict[str, HogQLFunctionMeta] = { +HOGQL_AGGREGATIONS: dict[str, HogQLFunctionMeta] = { # Standard aggregate functions "count": HogQLFunctionMeta("count", 0, 1, aggregate=True, case_sensitive=False), "countIf": HogQLFunctionMeta("countIf", 1, 2, aggregate=True), @@ -598,10 +598,10 @@ class HogQLFunctionMeta: "varPopIf": HogQLFunctionMeta("varPopIf", 2, 2, aggregate=True), "varSamp": HogQLFunctionMeta("varSamp", 1, 1, aggregate=True), "varSampIf": HogQLFunctionMeta("varSampIf", 2, 2, aggregate=True), - "covarPop": HogQLFunctionMeta("covarPop", 1, 1, aggregate=True), - "covarPopIf": HogQLFunctionMeta("covarPopIf", 2, 2, aggregate=True), - "covarSamp": HogQLFunctionMeta("covarSamp", 1, 1, aggregate=True), - "covarSampIf": HogQLFunctionMeta("covarSampIf", 2, 2, aggregate=True), + "covarPop": HogQLFunctionMeta("covarPop", 2, 2, aggregate=True), + "covarPopIf": HogQLFunctionMeta("covarPopIf", 3, 3, aggregate=True), + "covarSamp": HogQLFunctionMeta("covarSamp", 2, 2, aggregate=True), + "covarSampIf": HogQLFunctionMeta("covarSampIf", 3, 3, aggregate=True), # ClickHouse-specific aggregate functions "anyHeavy": HogQLFunctionMeta("anyHeavy", 1, 1, aggregate=True), "anyHeavyIf": HogQLFunctionMeta("anyHeavyIf", 2, 2, aggregate=True), @@ -747,7 +747,7 @@ class HogQLFunctionMeta: "maxIntersectionsPosition": HogQLFunctionMeta("maxIntersectionsPosition", 2, 2, aggregate=True), "maxIntersectionsPositionIf": HogQLFunctionMeta("maxIntersectionsPositionIf", 3, 3, aggregate=True), } -HOGQL_POSTHOG_FUNCTIONS: Dict[str, HogQLFunctionMeta] = { +HOGQL_POSTHOG_FUNCTIONS: dict[str, HogQLFunctionMeta] = { "matchesAction": HogQLFunctionMeta("matchesAction", 1, 1), "sparkline": HogQLFunctionMeta("sparkline", 1, 1), "hogql_lookupDomainType": HogQLFunctionMeta("hogql_lookupDomainType", 1, 1), @@ -781,7 +781,7 @@ class HogQLFunctionMeta: ) -def _find_function(name: str, functions: Dict[str, HogQLFunctionMeta]) -> Optional[HogQLFunctionMeta]: +def _find_function(name: str, functions: dict[str, HogQLFunctionMeta]) -> Optional[HogQLFunctionMeta]: func = functions.get(name) if func is not None: return func diff --git a/posthog/hogql/functions/sparkline.py b/posthog/hogql/functions/sparkline.py index ddd6c02a7b20e..5bbf9004f4425 100644 --- a/posthog/hogql/functions/sparkline.py +++ b/posthog/hogql/functions/sparkline.py @@ -1,9 +1,7 @@ -from typing import List - from posthog.hogql import ast -def sparkline(node: ast.Expr, args: List[ast.Expr]) -> ast.Expr: +def sparkline(node: ast.Expr, args: list[ast.Expr]) -> ast.Expr: return ast.Tuple( exprs=[ ast.Constant(value="__hogql_chart_type"), diff --git a/posthog/hogql/functions/test/__snapshots__/test_action.ambr b/posthog/hogql/functions/test/__snapshots__/test_action.ambr deleted file mode 100644 index 97cd09fe4c9de..0000000000000 --- a/posthog/hogql/functions/test/__snapshots__/test_action.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: TestAction.test_matches_action_id - ''' - -- ClickHouse - - SELECT events.event AS event - FROM events - WHERE and(equals(events.team_id, 420), equals(events.event, %(hogql_val_0)s)) - LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 - - -- HogQL - - SELECT event - FROM events - WHERE equals(event, 'RANDOM_TEST_ID::UUID') - LIMIT 100 - ''' -# --- -# name: TestAction.test_matches_action_name - ''' - -- ClickHouse - - SELECT events.event AS event - FROM events - WHERE and(equals(events.team_id, 420), equals(events.event, %(hogql_val_0)s)) - LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 - - -- HogQL - - SELECT event - FROM events - WHERE equals(event, 'RANDOM_TEST_ID::UUID') - LIMIT 100 - ''' -# --- diff --git a/posthog/hogql/functions/test/__snapshots__/test_cohort.ambr b/posthog/hogql/functions/test/__snapshots__/test_cohort.ambr index 88e7251d665f1..e64affb57df69 100644 --- a/posthog/hogql/functions/test/__snapshots__/test_cohort.ambr +++ b/posthog/hogql/functions/test/__snapshots__/test_cohort.ambr @@ -12,7 +12,7 @@ GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), equals(events.event, %(hogql_val_0)s)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -38,7 +38,7 @@ FROM person_static_cohort WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX))))) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -62,7 +62,7 @@ FROM person_static_cohort WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX))))) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL diff --git a/posthog/hogql/functions/test/test_sparkline.py b/posthog/hogql/functions/test/test_sparkline.py index c0bf34a1c8d46..cd861b4800826 100644 --- a/posthog/hogql/functions/test/test_sparkline.py +++ b/posthog/hogql/functions/test/test_sparkline.py @@ -8,7 +8,7 @@ def test_sparkline(self): response = execute_hogql_query("select sparkline([1,2,3])", self.team, pretty=False) self.assertEqual( response.clickhouse, - f"SELECT tuple(%(hogql_val_0)s, %(hogql_val_1)s, %(hogql_val_2)s, [1, 2, 3]) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + f"SELECT tuple(%(hogql_val_0)s, %(hogql_val_1)s, %(hogql_val_2)s, [1, 2, 3]) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) self.assertEqual( response.hogql, diff --git a/posthog/hogql/hogql.py b/posthog/hogql/hogql.py index d3052f58b01a1..2a537bfd7a8d6 100644 --- a/posthog/hogql/hogql.py +++ b/posthog/hogql/hogql.py @@ -1,4 +1,4 @@ -from typing import Dict, Literal, cast, Optional +from typing import Literal, cast, Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -18,7 +18,7 @@ def translate_hogql( metadata_source: Optional[ast.SelectQuery] = None, *, events_table_alias: Optional[str] = None, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, ) -> str: """Translate a HogQL expression into a ClickHouse expression.""" if query == "": diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py index 0ec619f338909..68637a30a208c 100644 --- a/posthog/hogql/parser.py +++ b/posthog/hogql/parser.py @@ -1,4 +1,5 @@ -from typing import Dict, List, Literal, Optional, cast, Callable +from typing import Literal, Optional, cast +from collections.abc import Callable from antlr4 import CommonTokenStream, InputStream, ParseTreeVisitor, ParserRuleContext from antlr4.error.ErrorListener import ErrorListener @@ -19,7 +20,7 @@ parse_select as _parse_select_cpp, ) -RULE_TO_PARSE_FUNCTION: Dict[Literal["python", "cpp"], Dict[Literal["expr", "order_expr", "select"], Callable]] = { +RULE_TO_PARSE_FUNCTION: dict[Literal["python", "cpp"], dict[Literal["expr", "order_expr", "select"], Callable]] = { "python": { "expr": lambda string, start: HogQLParseTreeConverter(start=start).visit(get_parser(string).expr()), "order_expr": lambda string: HogQLParseTreeConverter().visit(get_parser(string).orderExpr()), @@ -32,7 +33,7 @@ }, } -RULE_TO_HISTOGRAM: Dict[Literal["expr", "order_expr", "select"], Histogram] = { +RULE_TO_HISTOGRAM: dict[Literal["expr", "order_expr", "select"], Histogram] = { rule: Histogram( f"parse_{rule}_seconds", f"Time to parse {rule} expression", @@ -44,7 +45,7 @@ def parse_expr( expr: str, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, start: Optional[int] = 0, timings: Optional[HogQLTimings] = None, *, @@ -65,7 +66,7 @@ def parse_expr( def parse_order_expr( order_expr: str, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, timings: Optional[HogQLTimings] = None, *, backend: Optional[Literal["python", "cpp"]] = None, @@ -85,7 +86,7 @@ def parse_order_expr( def parse_select( statement: str, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, timings: Optional[HogQLTimings] = None, *, backend: Optional[Literal["python", "cpp"]] = None, @@ -159,10 +160,10 @@ def visitSelect(self, ctx: HogQLParser.SelectContext): return self.visit(ctx.selectUnionStmt() or ctx.selectStmt() or ctx.hogqlxTagElement()) def visitSelectUnionStmt(self, ctx: HogQLParser.SelectUnionStmtContext): - select_queries: List[ast.SelectQuery | ast.SelectUnionQuery] = [ + select_queries: list[ast.SelectQuery | ast.SelectUnionQuery] = [ self.visit(select) for select in ctx.selectStmtWithParens() ] - flattened_queries: List[ast.SelectQuery] = [] + flattened_queries: list[ast.SelectQuery] = [] for query in select_queries: if isinstance(query, ast.SelectQuery): flattened_queries.append(query) @@ -771,7 +772,7 @@ def visitColumnLambdaExpr(self, ctx: HogQLParser.ColumnLambdaExprContext): ) def visitWithExprList(self, ctx: HogQLParser.WithExprListContext): - ctes: Dict[str, ast.CTE] = {} + ctes: dict[str, ast.CTE] = {} for expr in ctx.withExpr(): cte = self.visit(expr) ctes[cte.name] = cte diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py index a09e39fd65680..d0e835fb0d853 100644 --- a/posthog/hogql/placeholders.py +++ b/posthog/hogql/placeholders.py @@ -1,15 +1,15 @@ -from typing import Dict, Optional, List +from typing import Optional from posthog.hogql import ast from posthog.hogql.errors import QueryError from posthog.hogql.visitor import CloningVisitor, TraversingVisitor -def replace_placeholders(node: ast.Expr, placeholders: Optional[Dict[str, ast.Expr]]) -> ast.Expr: +def replace_placeholders(node: ast.Expr, placeholders: Optional[dict[str, ast.Expr]]) -> ast.Expr: return ReplacePlaceholders(placeholders).visit(node) -def find_placeholders(node: ast.Expr) -> List[str]: +def find_placeholders(node: ast.Expr) -> list[str]: finder = FindPlaceholders() finder.visit(node) return list(finder.found) @@ -28,7 +28,7 @@ def visit_placeholder(self, node: ast.Placeholder): class ReplacePlaceholders(CloningVisitor): - def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]): + def __init__(self, placeholders: Optional[dict[str, ast.Expr]]): super().__init__() self.placeholders = placeholders @@ -42,5 +42,5 @@ def visit_placeholder(self, node): return new_node raise QueryError( f"Placeholder {{{node.field}}} is not available in this context. You can use the following: " - + ", ".join((f"{placeholder}" for placeholder in self.placeholders)) + + ", ".join(f"{placeholder}" for placeholder in self.placeholders) ) diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index ff4766f86074a..a829697e9007a 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from datetime import datetime, date from difflib import get_close_matches -from typing import List, Literal, Optional, Union, cast +from typing import Literal, Optional, Union, cast from uuid import UUID from posthog.hogql import ast @@ -73,7 +73,7 @@ def print_ast( node: ast.Expr, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, settings: Optional[HogQLGlobalSettings] = None, pretty: bool = False, ) -> str: @@ -92,7 +92,7 @@ def prepare_ast_for_printing( node: ast.Expr, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, settings: Optional[HogQLGlobalSettings] = None, ) -> ast.Expr: with context.timings.measure("create_hogql_database"): @@ -130,7 +130,7 @@ def print_prepared_ast( node: ast.Expr, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, settings: Optional[HogQLGlobalSettings] = None, pretty: bool = False, ) -> str: @@ -158,13 +158,13 @@ def __init__( self, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[AST]] = None, + stack: Optional[list[AST]] = None, settings: Optional[HogQLGlobalSettings] = None, pretty: bool = False, ): self.context = context self.dialect = dialect - self.stack: List[AST] = stack or [] # Keep track of all traversed nodes. + self.stack: list[AST] = stack or [] # Keep track of all traversed nodes. self.settings = settings self.pretty = pretty self._indent = -1 @@ -773,7 +773,7 @@ def visit_call(self, node: ast.Call): if self.dialect == "clickhouse": if node.name in FIRST_ARG_DATETIME_FUNCTIONS: - args: List[str] = [] + args: list[str] = [] for idx, arg in enumerate(node.args): if idx == 0: if isinstance(arg, ast.Call) and arg.name in ADD_OR_NULL_DATETIME_FUNCTIONS: @@ -783,7 +783,7 @@ def visit_call(self, node: ast.Call): else: args.append(self.visit(arg)) elif node.name == "concat": - args: List[str] = [] + args: list[str] = [] for arg in node.args: if isinstance(arg, ast.Constant): if arg.value is None: @@ -1002,7 +1002,7 @@ def visit_property_type(self, type: ast.PropertyType): while isinstance(table, ast.TableAliasType): table = table.table_type - args: List[str] = [] + args: list[str] = [] if self.context.modifiers.materializationMode != "disabled": # find a materialized property for the first part of the chain @@ -1094,7 +1094,7 @@ def visit_unknown(self, node: AST): raise ImpossibleASTError(f"Unknown AST node {type(node).__name__}") def visit_window_expr(self, node: ast.WindowExpr): - strings: List[str] = [] + strings: list[str] = [] if node.partition_by is not None: if len(node.partition_by) == 0: raise ImpossibleASTError("PARTITION BY must have at least one argument") @@ -1168,7 +1168,7 @@ def _print_escaped_string(self, name: float | int | str | list | tuple | datetim return escape_clickhouse_string(name, timezone=self._get_timezone()) return escape_hogql_string(name, timezone=self._get_timezone()) - def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: List[str]) -> str: + def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: list[str]) -> str: return f"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw({', '.join([unsafe_field, *unsafe_args])}), ''), 'null'), '^\"|\"$', '')" def _get_materialized_column( @@ -1209,7 +1209,7 @@ def _print_settings(self, settings): for key, value in settings: if value is None: continue - if not isinstance(value, (int, float, str)): + if not isinstance(value, int | float | str): raise QueryError(f"Setting {key} must be a string, int, or float") if not re.match(r"^[a-zA-Z0-9_]+$", key): raise QueryError(f"Setting {key} is not supported") diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 501bc613bd539..824a11bdae94d 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -1,5 +1,5 @@ import re -from typing import List, Optional, Union, cast, Literal +from typing import Optional, Union, cast, Literal from pydantic import BaseModel @@ -34,6 +34,7 @@ PropertyGroupFilterValue, FilterLogicalOperator, RetentionEntity, + EmptyPropertyFilter, ) @@ -118,12 +119,13 @@ def property_to_expr( return ast.And(exprs=[property_to_expr(p, team, scope) for p in property.values]) else: return ast.Or(exprs=[property_to_expr(p, team, scope) for p in property.values]) + elif isinstance(property, EmptyPropertyFilter): + return ast.Constant(value=True) elif isinstance(property, BaseModel): try: property = Property(**property.dict()) except ValueError: # The property was saved as an incomplete object. Instead of crashing the entire query, pretend it's not there. - # TODO: revert this when removing legacy insights? return ast.Constant(value=True) else: raise NotImplementedError(f"property_to_expr with property of type {type(property).__name__} not implemented") @@ -237,7 +239,10 @@ def property_to_expr( elif operator == PropertyOperator.regex: return ast.Call( name="ifNull", - args=[ast.Call(name="match", args=[field, ast.Constant(value=value)]), ast.Constant(value=False)], + args=[ + ast.Call(name="match", args=[ast.Call(name="toString", args=[field]), ast.Constant(value=value)]), + ast.Constant(value=False), + ], ) elif operator == PropertyOperator.not_regex: return ast.Call( @@ -245,7 +250,11 @@ def property_to_expr( args=[ ast.Call( name="not", - args=[ast.Call(name="match", args=[field, ast.Constant(value=value)])], + args=[ + ast.Call( + name="match", args=[ast.Call(name="toString", args=[field]), ast.Constant(value=value)] + ) + ], ), ast.Constant(value=True), ], @@ -267,19 +276,32 @@ def property_to_expr( # For Boolean and untyped properties, treat "true" and "false" as boolean values if ( - op == ast.CompareOperationOp.Eq - or op == ast.CompareOperationOp.NotEq + (op == ast.CompareOperationOp.Eq or op == ast.CompareOperationOp.NotEq) and team is not None and (value == "true" or value == "false") ): - property_types = PropertyDefinition.objects.filter( - team=team, - name=property.key, - type=PropertyDefinition.Type.PERSON if property.type == "person" else PropertyDefinition.Type.EVENT, - )[0:1].values_list("property_type", flat=True) - property_type = property_types[0] if property_types else None - - if not property_type or property_type == PropertyType.Boolean: + if property.type == "person": + property_types = PropertyDefinition.objects.filter( + team=team, + name=property.key, + type=PropertyDefinition.Type.PERSON, + ) + elif property.type == "group": + property_types = PropertyDefinition.objects.filter( + team=team, + name=property.key, + type=PropertyDefinition.Type.GROUP, + group_type_index=property.group_type_index, + ) + else: + property_types = PropertyDefinition.objects.filter( + team=team, + name=property.key, + type=PropertyDefinition.Type.EVENT, + ) + property_type = property_types[0].property_type if len(property_types) > 0 else None + + if property_type == PropertyType.Boolean: if value == "true": value = True if value == "false": @@ -360,7 +382,7 @@ def action_to_expr(action: Action) -> ast.Expr: or_queries = [] for step in steps: - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] if step.event: exprs.append(parse_expr("event = {event}", {"event": ast.Constant(value=step.event)})) diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index 65c0c9d71356f..b42a61b785541 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Dict, Optional, Union, cast +from typing import Optional, Union, cast from posthog.clickhouse.client.connection import Workload from posthog.errors import ExposedCHQueryError @@ -32,7 +32,7 @@ def execute_hogql_query( *, query_type: str = "hogql_query", filters: Optional[HogQLFilters] = None, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, workload: Workload = Workload.ONLINE, settings: Optional[HogQLGlobalSettings] = None, modifiers: Optional[HogQLQueryModifiers] = None, @@ -175,7 +175,7 @@ def execute_hogql_query( except Exception as e: if explain: results, types = None, None - if isinstance(e, (ExposedCHQueryError, ExposedHogQLError)): + if isinstance(e, ExposedCHQueryError | ExposedHogQLError): error = str(e) else: error = "Unknown error" diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py index fce251dc8a08d..5921e5a6f2d94 100644 --- a/posthog/hogql/resolver.py +++ b/posthog/hogql/resolver.py @@ -1,5 +1,5 @@ from datetime import date, datetime -from typing import List, Optional, Any, cast, Literal +from typing import Optional, Any, cast, Literal from uuid import UUID from posthog.hogql import ast @@ -58,7 +58,7 @@ def resolve_types( node: ast.Expr, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], - scopes: Optional[List[ast.SelectQueryType]] = None, + scopes: Optional[list[ast.SelectQueryType]] = None, ) -> ast.Expr: return Resolver(scopes=scopes, context=context, dialect=dialect).visit(node) @@ -66,7 +66,7 @@ def resolve_types( class AliasCollector(TraversingVisitor): def __init__(self): super().__init__() - self.aliases: List[str] = [] + self.aliases: list[str] = [] def visit_alias(self, node: ast.Alias): self.aliases.append(node.alias) @@ -80,11 +80,11 @@ def __init__( self, context: HogQLContext, dialect: Literal["hogql", "clickhouse"] = "clickhouse", - scopes: Optional[List[ast.SelectQueryType]] = None, + scopes: Optional[list[ast.SelectQueryType]] = None, ): super().__init__() # Each SELECT query creates a new scope (type). Store all of them in a list as we traverse the tree. - self.scopes: List[ast.SelectQueryType] = scopes or [] + self.scopes: list[ast.SelectQueryType] = scopes or [] self.current_view_depth: int = 0 self.context = context self.dialect = dialect @@ -214,7 +214,7 @@ def visit_select_query(self, node: ast.SelectQuery): return new_node - def _asterisk_columns(self, asterisk: ast.AsteriskType) -> List[ast.Expr]: + def _asterisk_columns(self, asterisk: ast.AsteriskType) -> list[ast.Expr]: """Expand an asterisk. Mutates `select_query.select` and `select_query.type.columns` with the new fields""" if isinstance(asterisk.table_type, ast.BaseTableType): table = asterisk.table_type.resolve_database_table(self.context) @@ -393,13 +393,13 @@ def visit_call(self, node: ast.Call): return self.visit(matches_action(node=node, args=node.args, context=self.context)) node = super().visit_call(node) - arg_types: List[ast.ConstantType] = [] + arg_types: list[ast.ConstantType] = [] for arg in node.args: if arg.type: arg_types.append(arg.type.resolve_constant_type(self.context) or ast.UnknownType()) else: arg_types.append(ast.UnknownType()) - param_types: Optional[List[ast.ConstantType]] = None + param_types: Optional[list[ast.ConstantType]] = None if node.params is not None: param_types = [] for param in node.params: diff --git a/posthog/hogql/resolver_utils.py b/posthog/hogql/resolver_utils.py index 7910a17fdb92e..bfede9538ab64 100644 --- a/posthog/hogql/resolver_utils.py +++ b/posthog/hogql/resolver_utils.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog import schema from posthog.hogql import ast @@ -27,7 +27,7 @@ def lookup_field_by_name(scope: ast.SelectQueryType, name: str, context: HogQLCo return None -def lookup_cte_by_name(scopes: List[ast.SelectQueryType], name: str) -> Optional[ast.CTE]: +def lookup_cte_by_name(scopes: list[ast.SelectQueryType], name: str) -> Optional[ast.CTE]: for scope in reversed(scopes): if scope and scope.ctes and name in scope.ctes: return scope.ctes[name] diff --git a/posthog/hogql/test/__snapshots__/test_query.ambr b/posthog/hogql/test/__snapshots__/test_query.ambr index 6ee77080d738f..cad875820ead1 100644 --- a/posthog/hogql/test/__snapshots__/test_query.ambr +++ b/posthog/hogql/test/__snapshots__/test_query.ambr @@ -5,7 +5,7 @@ SELECT [1, 2, 3], [10, 11, 12][1] LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -19,7 +19,7 @@ SELECT arrayMap(x -> multiply(x, 2), [1, 2, 3]), 1 LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -35,7 +35,7 @@ FROM events WHERE and(equals(events.team_id, 420), equals(events.distinct_id, %(hogql_val_0)s), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_1)s), ''), 'null'), '^"|"$', ''), %(hogql_val_2)s), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -62,7 +62,7 @@ FROM events WHERE and(equals(events.team_id, 420), equals(events.distinct_id, %(hogql_val_0)s), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_1)s), ''), 'null'), '^"|"$', ''), %(hogql_val_2)s), 0), less(toTimeZone(events.timestamp, %(hogql_val_3)s), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_4)s), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')))) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -89,7 +89,7 @@ FROM events AS e WHERE and(equals(e.team_id, 420), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, %(hogql_val_0)s), ''), 'null'), '^"|"$', ''), %(hogql_val_1)s), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -111,7 +111,7 @@ FROM events WHERE equals(events.team_id, 420) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -135,7 +135,7 @@ GROUP BY session_replay_events.session_id) AS s ON equals(s.session_id, nullIf(nullIf(e.`$session_id`, ''), 'null')) WHERE and(equals(e.team_id, 420), isNotNull(nullIf(nullIf(e.`$session_id`, ''), 'null'))) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -166,7 +166,7 @@ GROUP BY session_replay_events.session_id) AS s LEFT JOIN events AS e ON equals(nullIf(nullIf(e.`$session_id`, ''), 'null'), s.session_id) WHERE and(equals(e.team_id, 420), isNotNull(nullIf(nullIf(e.`$session_id`, ''), 'null'))) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -197,7 +197,7 @@ GROUP BY session_replay_events.session_id) AS s ON equals(s.session_id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, %(hogql_val_0)s), ''), 'null'), '^"|"$', '')) WHERE and(equals(e.team_id, 420), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, %(hogql_val_1)s), ''), 'null'), '^"|"$', ''))) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -228,7 +228,7 @@ GROUP BY session_replay_events.session_id) AS s LEFT JOIN events AS e ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, %(hogql_val_0)s), ''), 'null'), '^"|"$', ''), s.session_id) WHERE and(equals(e.team_id, 420), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, %(hogql_val_1)s), ''), 'null'), '^"|"$', ''))) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -266,7 +266,7 @@ HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)) GROUP BY events.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -296,7 +296,7 @@ HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0)))) GROUP BY events.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -329,7 +329,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX)))), 0)) GROUP BY events.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -355,7 +355,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX))))) GROUP BY events.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -378,7 +378,7 @@ WHERE and(equals(events.team_id, 420), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_0)s), ''), 'null'), '^"|"$', ''), %(hogql_val_1)s), 0)) GROUP BY events.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -406,7 +406,7 @@ SETTINGS optimize_aggregation_in_order=1) AS persons WHERE ifNull(equals(persons.properties___random_uuid, %(hogql_val_2)s), 0) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -429,7 +429,7 @@ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) WHERE equals(e.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -451,7 +451,7 @@ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS events__pdi ON equals(events.distinct_id, events__pdi.distinct_id) WHERE equals(events.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -482,7 +482,7 @@ SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) WHERE equals(e.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -510,7 +510,7 @@ SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) WHERE equals(events.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -541,7 +541,7 @@ SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) WHERE equals(events.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -572,7 +572,7 @@ SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) WHERE equals(e.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -604,7 +604,7 @@ WHERE equals(s.team_id, 420) GROUP BY s__pdi__person.properties___sneaky_mail LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -627,7 +627,7 @@ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS pdi ON equals(e.distinct_id, pdi.distinct_id) WHERE equals(e.team_id, 420) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -661,7 +661,7 @@ HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS pdi__person ON equals(pdi.pdi___person_id, pdi__person.id) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -688,7 +688,7 @@ HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS pdi__person ON equals(pdi.pdi___person_id, pdi__person.id) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -719,7 +719,7 @@ SETTINGS optimize_aggregation_in_order=1) AS p ON equals(p.id, pdi.person_id) WHERE equals(e.team_id, 420) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -740,7 +740,7 @@ GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS person_distinct_ids LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -771,7 +771,7 @@ SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) WHERE equals(events.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -788,7 +788,7 @@ FROM events WHERE equals(events.team_id, 420) LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -806,7 +806,7 @@ WHERE equals(s.team_id, 420) GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(s.person_properties, %(hogql_val_1)s), ''), 'null'), '^"|"$', '') LIMIT 10 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -828,7 +828,7 @@ GROUP BY events.event) GROUP BY count, event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -854,7 +854,7 @@ GROUP BY events.event) AS c GROUP BY c.count, c.event LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -883,7 +883,7 @@ GROUP BY col_a) GROUP BY col_a ORDER BY col_a ASC LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -916,7 +916,7 @@ GROUP BY PIVOT_TABLE_COL_ABC.col_a) AS PIVOT_FUNCTION_1 GROUP BY PIVOT_FUNCTION_1.col_a) AS PIVOT_FUNCTION_2 ORDER BY PIVOT_FUNCTION_2.col_a ASC LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -953,7 +953,7 @@ GROUP BY PIVOT_TABLE_COL_ABC.col_a) AS PIVOT_FUNCTION_1 GROUP BY PIVOT_FUNCTION_1.col_a) AS PIVOT_FUNCTION_2) AS final ORDER BY final.col_a ASC LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL diff --git a/posthog/hogql/test/_test_parser.py b/posthog/hogql/test/_test_parser.py index 478958746d601..514914906d015 100644 --- a/posthog/hogql/test/_test_parser.py +++ b/posthog/hogql/test/_test_parser.py @@ -1,4 +1,4 @@ -from typing import Literal, cast, Optional, Dict +from typing import Literal, cast, Optional import math @@ -20,10 +20,10 @@ class TestParser(*base_classes): maxDiff = None - def _expr(self, expr: str, placeholders: Optional[Dict[str, ast.Expr]] = None) -> ast.Expr: + def _expr(self, expr: str, placeholders: Optional[dict[str, ast.Expr]] = None) -> ast.Expr: return clear_locations(parse_expr(expr, placeholders=placeholders, backend=backend)) - def _select(self, query: str, placeholders: Optional[Dict[str, ast.Expr]] = None) -> ast.Expr: + def _select(self, query: str, placeholders: Optional[dict[str, ast.Expr]] = None) -> ast.Expr: return clear_locations(parse_select(query, placeholders=placeholders, backend=backend)) def test_numbers(self): diff --git a/posthog/hogql/test/test_filters.py b/posthog/hogql/test/test_filters.py index 4377f9e12b942..05ac11667ae63 100644 --- a/posthog/hogql/test/test_filters.py +++ b/posthog/hogql/test/test_filters.py @@ -1,4 +1,4 @@ -from typing import Dict, Any, Optional +from typing import Any, Optional from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -18,10 +18,10 @@ class TestFilters(BaseTest): maxDiff = None - def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None): + def _parse_expr(self, expr: str, placeholders: Optional[dict[str, Any]] = None): return clear_locations(parse_expr(expr, placeholders=placeholders)) - def _parse_select(self, select: str, placeholders: Optional[Dict[str, Any]] = None): + def _parse_select(self, select: str, placeholders: Optional[dict[str, Any]] = None): return clear_locations(parse_select(select, placeholders=placeholders)) def _print_ast(self, node: ast.Expr): @@ -63,6 +63,34 @@ def test_replace_filters_date_range(self): "SELECT event FROM events WHERE less(timestamp, toDateTime('2020-02-02 00:00:00.000000')) LIMIT 10000", ) + select = replace_filters( + self._parse_select("SELECT event FROM events where {filters}"), + HogQLFilters(dateRange=DateRange(date_from="2020-02-02", date_to="2020-02-03 23:59:59")), + self.team, + ) + self.assertEqual( + self._print_ast(select), + "SELECT event FROM events WHERE " + "and(less(timestamp, toDateTime('2020-02-03 23:59:59.000000')), " + "greaterOrEquals(timestamp, toDateTime('2020-02-02 00:00:00.000000'))) LIMIT 10000", + ) + + # now with different team timezone + self.team.timezone = "America/New_York" + self.team.save() + + select = replace_filters( + self._parse_select("SELECT event FROM events where {filters}"), + HogQLFilters(dateRange=DateRange(date_from="2020-02-02", date_to="2020-02-03 23:59:59")), + self.team, + ) + self.assertEqual( + self._print_ast(select), + "SELECT event FROM events WHERE " + "and(less(timestamp, toDateTime('2020-02-03 23:59:59.000000')), " + "greaterOrEquals(timestamp, toDateTime('2020-02-02 00:00:00.000000'))) LIMIT 10000", + ) + def test_replace_filters_event_property(self): select = replace_filters( self._parse_select("SELECT event FROM events where {filters}"), diff --git a/posthog/hogql/test/test_mapping.py b/posthog/hogql/test/test_mapping.py index b13b2d1c744b2..9af0d9a60e44e 100644 --- a/posthog/hogql/test/test_mapping.py +++ b/posthog/hogql/test/test_mapping.py @@ -23,22 +23,22 @@ def _get_hogql_posthog_function(self, name: str) -> HogQLFunctionMeta: return self._return_present_function(find_hogql_posthog_function(name)) def test_find_case_sensitive_function(self): - self.assertEquals(self._get_hogql_function("toString").clickhouse_name, "toString") - self.assertEquals(find_hogql_function("TOString"), None) - self.assertEquals(find_hogql_function("PlUs"), None) + self.assertEqual(self._get_hogql_function("toString").clickhouse_name, "toString") + self.assertEqual(find_hogql_function("TOString"), None) + self.assertEqual(find_hogql_function("PlUs"), None) - self.assertEquals(self._get_hogql_aggregation("countIf").clickhouse_name, "countIf") - self.assertEquals(find_hogql_aggregation("COUNTIF"), None) + self.assertEqual(self._get_hogql_aggregation("countIf").clickhouse_name, "countIf") + self.assertEqual(find_hogql_aggregation("COUNTIF"), None) - self.assertEquals(self._get_hogql_posthog_function("sparkline").clickhouse_name, "sparkline") - self.assertEquals(find_hogql_posthog_function("SPARKLINE"), None) + self.assertEqual(self._get_hogql_posthog_function("sparkline").clickhouse_name, "sparkline") + self.assertEqual(find_hogql_posthog_function("SPARKLINE"), None) def test_find_case_insensitive_function(self): - self.assertEquals(self._get_hogql_function("CoAlesce").clickhouse_name, "coalesce") + self.assertEqual(self._get_hogql_function("CoAlesce").clickhouse_name, "coalesce") - self.assertEquals(self._get_hogql_aggregation("SuM").clickhouse_name, "sum") + self.assertEqual(self._get_hogql_aggregation("SuM").clickhouse_name, "sum") def test_find_non_existent_function(self): - self.assertEquals(find_hogql_function("functionThatDoesntExist"), None) - self.assertEquals(find_hogql_aggregation("functionThatDoesntExist"), None) - self.assertEquals(find_hogql_posthog_function("functionThatDoesntExist"), None) + self.assertEqual(find_hogql_function("functionThatDoesntExist"), None) + self.assertEqual(find_hogql_aggregation("functionThatDoesntExist"), None) + self.assertEqual(find_hogql_posthog_function("functionThatDoesntExist"), None) diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 1a8a2130c5245..f465db20684f3 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -1,4 +1,4 @@ -from typing import Literal, Optional, Dict +from typing import Literal, Optional import pytest from django.test import override_settings @@ -35,7 +35,7 @@ def _select( self, query: str, context: Optional[HogQLContext] = None, - placeholders: Optional[Dict[str, ast.Expr]] = None, + placeholders: Optional[dict[str, ast.Expr]] = None, ) -> str: return print_ast( parse_select(query, placeholders=placeholders), @@ -1205,7 +1205,7 @@ def test_print_global_settings(self): ) self.assertEqual( printed, - f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_print_query_level_settings(self): @@ -1232,7 +1232,7 @@ def test_print_both_settings(self): ) self.assertEqual( printed, - f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS optimize_aggregation_in_order=1, readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS optimize_aggregation_in_order=1, readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_pretty_print(self): @@ -1335,7 +1335,7 @@ def test_print_hidden_aliases_timestamp(self): printed, f"SELECT timestamp AS timestamp FROM (SELECT toTimeZone(events.timestamp, %(hogql_val_0)s), " f"toTimeZone(events.timestamp, %(hogql_val_1)s) AS timestamp FROM events WHERE equals(events.team_id, {self.team.pk})) " - f"LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_print_hidden_aliases_column_override(self): @@ -1350,7 +1350,7 @@ def test_print_hidden_aliases_column_override(self): printed, f"SELECT event AS event FROM (SELECT toTimeZone(events.timestamp, %(hogql_val_0)s) AS event, " f"event FROM events WHERE equals(events.team_id, {self.team.pk})) " - f"LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"LIMIT 10000 SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_print_hidden_aliases_properties(self): @@ -1373,7 +1373,7 @@ def test_print_hidden_aliases_properties(self): printed, f"SELECT `$browser` AS `$browser` FROM (SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null') AS `$browser` " f"FROM events WHERE equals(events.team_id, {self.team.pk})) LIMIT 10000 " - f"SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_print_hidden_aliases_double_property(self): @@ -1397,7 +1397,7 @@ def test_print_hidden_aliases_double_property(self): f"SELECT `$browser` AS `$browser` FROM (SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null'), " f"nullIf(nullIf(events.`mat_$browser`, ''), 'null') AS `$browser` " # only the second one gets the alias f"FROM events WHERE equals(events.team_id, {self.team.pk})) LIMIT 10000 " - f"SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1", + f"SETTINGS readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) def test_lookup_domain_type(self): @@ -1413,7 +1413,7 @@ def test_lookup_domain_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'domain_type', " "(cutToFirstSignificantSubdomain(coalesce(%(hogql_val_0)s, '')), 'source')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1431,7 +1431,7 @@ def test_lookup_paid_domain_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_paid', " "(cutToFirstSignificantSubdomain(coalesce(%(hogql_val_0)s, '')), 'source')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1449,7 +1449,7 @@ def test_lookup_paid_source_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_paid', " "(coalesce(%(hogql_val_0)s, ''), 'source')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1467,7 +1467,7 @@ def test_lookup_paid_medium_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_paid', " "(coalesce(%(hogql_val_0)s, ''), 'medium')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1485,7 +1485,7 @@ def test_lookup_organic_domain_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_organic', " "(cutToFirstSignificantSubdomain(coalesce(%(hogql_val_0)s, '')), 'source')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1503,7 +1503,7 @@ def test_lookup_organic_source_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_organic', " "(coalesce(%(hogql_val_0)s, ''), 'source')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1521,7 +1521,7 @@ def test_lookup_organic_medium_type(self): "SELECT dictGetOrNull('channel_definition_dict', 'type_if_organic', " "(coalesce(%(hogql_val_0)s, ''), 'medium')) " f"FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ), printed, ) @@ -1572,7 +1572,7 @@ def test_trim_leading_trailing_both(self): ) assert printed == ( "SELECT trim(LEADING %(hogql_val_1)s FROM %(hogql_val_0)s), trim(TRAILING %(hogql_val_3)s FROM %(hogql_val_2)s), trim(BOTH %(hogql_val_5)s FROM %(hogql_val_4)s) LIMIT 10000 SETTINGS " - "readonly=2, max_execution_time=10, allow_experimental_object_type=1" + "readonly=2, max_execution_time=10, allow_experimental_object_type=1, format_csv_allow_double_quotes=0" ) query2 = parse_select("select trimLeft('media', 'xy'), trimRight('media', 'xy'), trim('media', 'xy')") printed2 = print_ast( diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index 44b740552d8f0..4f6ed2e115066 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -1,4 +1,4 @@ -from typing import List, Union, cast, Optional, Dict, Any, Literal +from typing import Union, cast, Optional, Any, Literal from unittest.mock import MagicMock, patch from posthog.constants import PropertyOperatorType, TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS @@ -24,7 +24,7 @@ ) from posthog.models.property import PropertyGroup from posthog.models.property_definition import PropertyType -from posthog.schema import HogQLPropertyFilter, PropertyOperator, RetentionEntity +from posthog.schema import HogQLPropertyFilter, PropertyOperator, RetentionEntity, EmptyPropertyFilter from posthog.test.base import BaseTest elements_chain_match = lambda x: parse_expr("elements_chain =~ {regex}", {"regex": ast.Constant(value=str(x))}) @@ -46,7 +46,7 @@ def _property_to_expr( def _selector_to_expr(self, selector: str): return clear_locations(selector_to_expr(selector)) - def _parse_expr(self, expr: str, placeholders: Optional[Dict[str, Any]] = None): + def _parse_expr(self, expr: str, placeholders: Optional[dict[str, Any]] = None): return clear_locations(parse_expr(expr, placeholders=placeholders)) def test_has_aggregation(self): @@ -89,6 +89,19 @@ def test_property_to_expr_group(self): self.assertEqual(self._property_to_expr({"type": "group", "key": "a", "value": "b"}), self._parse_expr("1")) + def test_property_to_expr_group_booleans(self): + PropertyDefinition.objects.create( + team=self.team, + name="boolean_prop", + type=PropertyDefinition.Type.GROUP, + group_type_index=0, + property_type=PropertyType.Boolean, + ) + self.assertEqual( + self._property_to_expr({"type": "group", "group_type_index": 0, "key": "boolean_prop", "value": ["true"]}), + self._parse_expr("group_0.properties.boolean_prop = true"), + ) + def test_property_to_expr_event(self): self.assertEqual( self._property_to_expr({"key": "a", "value": "b"}), @@ -140,11 +153,11 @@ def test_property_to_expr_event(self): ) self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ".*", "operator": "regex"}), - self._parse_expr("ifNull(match(properties.a, '.*'), false)"), + self._parse_expr("ifNull(match(toString(properties.a), '.*'), false)"), ) self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ".*", "operator": "not_regex"}), - self._parse_expr("ifNull(not(match(properties.a, '.*')), true)"), + self._parse_expr("ifNull(not(match(toString(properties.a), '.*')), true)"), ) self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": [], "operator": "exact"}), @@ -158,6 +171,10 @@ def test_property_to_expr_event(self): self._parse_expr("1"), self._property_to_expr({}), # incomplete event ) + self.assertEqual( + self._parse_expr("1"), + self._property_to_expr(EmptyPropertyFilter()), # type: ignore + ) def test_property_to_expr_boolean(self): PropertyDefinition.objects.create( @@ -185,17 +202,19 @@ def test_property_to_expr_boolean(self): ) self.assertEqual( self._property_to_expr( - {"type": "event", "key": "unknown_prop", "value": "true"}, + {"type": "event", "key": "boolean_prop", "value": "false"}, team=self.team, ), - self._parse_expr("properties.unknown_prop = true"), + self._parse_expr("properties.boolean_prop = false"), ) self.assertEqual( self._property_to_expr( - {"type": "event", "key": "boolean_prop", "value": "false"}, + {"type": "event", "key": "unknown_prop", "value": "true"}, team=self.team, ), - self._parse_expr("properties.boolean_prop = false"), + self._parse_expr( + "properties.unknown_prop = 'true'" # We don't have a type for unknown_prop, so string comparison it is + ), ) def test_property_to_expr_event_list(self): @@ -217,7 +236,9 @@ def test_property_to_expr_event_list(self): ) self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "regex"}), - self._parse_expr("ifNull(match(properties.a, 'b'), false) or ifNull(match(properties.a, 'c'), false)"), + self._parse_expr( + "ifNull(match(toString(properties.a), 'b'), false) or ifNull(match(toString(properties.a), 'c'), false)" + ), ) # negative self.assertEqual( @@ -245,7 +266,7 @@ def test_property_to_expr_event_list(self): } ), self._parse_expr( - "ifNull(not(match(properties.a, 'b')), true) and ifNull(not(match(properties.a, 'c')), true)" + "ifNull(not(match(toString(properties.a), 'b')), true) and ifNull(not(match(toString(properties.a), 'c')), true)" ), ) @@ -395,7 +416,7 @@ def test_property_groups_combined(self): PropertyGroup( type=PropertyOperatorType.AND, values=cast( - Union[List[Property], List[PropertyGroup]], + Union[list[Property], list[PropertyGroup]], [ Property(type="person", key="a", value="b", operator="exact"), PropertyGroup( diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py index 7dc2954380700..c58736727a788 100644 --- a/posthog/hogql/test/test_query.py +++ b/posthog/hogql/test/test_query.py @@ -1012,9 +1012,9 @@ def test_property_access_with_arrays(self): f"FROM events " f"WHERE and(equals(events.team_id, {self.team.pk}), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_46)s), ''), 'null'), '^\"|\"$', ''), %(hogql_val_47)s), 0)) " f"LIMIT 100 " - f"SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + f"SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0", ) - self.assertEqual(response.results[0], tuple((random_uuid for x in alternatives))) + self.assertEqual(response.results[0], tuple(random_uuid for x in alternatives)) def test_property_access_with_arrays_zero_index_error(self): query = f"SELECT properties.something[0] FROM events" diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py index a5f3b838c39be..7cbd5a60a3245 100644 --- a/posthog/hogql/test/test_resolver.py +++ b/posthog/hogql/test/test_resolver.py @@ -1,5 +1,5 @@ from datetime import timezone, datetime, date -from typing import Optional, Dict, cast +from typing import Optional, cast import pytest from django.test import override_settings from uuid import UUID @@ -28,7 +28,7 @@ class TestResolver(BaseTest): maxDiff = None - def _select(self, query: str, placeholders: Optional[Dict[str, ast.Expr]] = None) -> ast.SelectQuery: + def _select(self, query: str, placeholders: Optional[dict[str, ast.Expr]] = None) -> ast.SelectQuery: return cast( ast.SelectQuery, clone_expr(parse_select(query, placeholders=placeholders), clear_locations=True), diff --git a/posthog/hogql/test/test_timings.py b/posthog/hogql/test/test_timings.py index 02f8392da09ca..cfb2259157afa 100644 --- a/posthog/hogql/test/test_timings.py +++ b/posthog/hogql/test/test_timings.py @@ -26,8 +26,8 @@ def test_basic_timing(self): pass results = timings.to_dict() - self.assertAlmostEquals(results["./test"], 0.05) - self.assertAlmostEquals(results["."], 0.15) + self.assertAlmostEqual(results["./test"], 0.05) + self.assertAlmostEqual(results["."], 0.15) def test_no_timing(self): with patch("posthog.hogql.timings.perf_counter", fake_perf_counter): @@ -45,9 +45,9 @@ def test_nested_timing(self): pass results = timings.to_dict() - self.assertAlmostEquals(results["./outer/inner"], 0.05) - self.assertAlmostEquals(results["./outer"], 0.15) - self.assertAlmostEquals(results["."], 0.25) + self.assertAlmostEqual(results["./outer/inner"], 0.05) + self.assertAlmostEqual(results["./outer"], 0.15) + self.assertAlmostEqual(results["."], 0.25) def test_multiple_top_level_timings(self): with patch("posthog.hogql.timings.perf_counter", fake_perf_counter): @@ -59,9 +59,9 @@ def test_multiple_top_level_timings(self): pass results = timings.to_dict() - self.assertAlmostEquals(results["./first"], 0.05) - self.assertAlmostEquals(results["./second"], 0.05) - self.assertAlmostEquals(results["."], 0.25) + self.assertAlmostEqual(results["./first"], 0.05) + self.assertAlmostEqual(results["./second"], 0.05) + self.assertAlmostEqual(results["."], 0.25) def test_deeply_nested_timing(self): with patch("posthog.hogql.timings.perf_counter", fake_perf_counter): @@ -73,10 +73,10 @@ def test_deeply_nested_timing(self): pass results = timings.to_dict() - self.assertAlmostEquals(results["./a/b/c"], 0.05) - self.assertAlmostEquals(results["./a/b"], 0.15) - self.assertAlmostEquals(results["./a"], 0.25) - self.assertAlmostEquals(results["."], 0.35) + self.assertAlmostEqual(results["./a/b/c"], 0.05) + self.assertAlmostEqual(results["./a/b"], 0.15) + self.assertAlmostEqual(results["./a"], 0.25) + self.assertAlmostEqual(results["."], 0.35) def test_overlapping_keys(self): with patch("posthog.hogql.timings.perf_counter", fake_perf_counter): @@ -88,5 +88,5 @@ def test_overlapping_keys(self): pass results = timings.to_dict() - self.assertAlmostEquals(results["./a"], 0.1) - self.assertAlmostEquals(results["."], 0.25) + self.assertAlmostEqual(results["./a"], 0.1) + self.assertAlmostEqual(results["."], 0.25) diff --git a/posthog/hogql/timings.py b/posthog/hogql/timings.py index fca643d640b32..950d0f5bf23ae 100644 --- a/posthog/hogql/timings.py +++ b/posthog/hogql/timings.py @@ -1,6 +1,5 @@ from dataclasses import dataclass, field from time import perf_counter -from typing import Dict, List from contextlib import contextmanager from sentry_sdk import start_span @@ -11,10 +10,10 @@ @dataclass class HogQLTimings: # Completed time in seconds for different parts of the HogQL query - timings: Dict[str, float] = field(default_factory=dict) + timings: dict[str, float] = field(default_factory=dict) # Used for housekeeping - _timing_starts: Dict[str, float] = field(default_factory=dict) + _timing_starts: dict[str, float] = field(default_factory=dict) _timing_pointer: str = "." def __post_init__(self): @@ -37,11 +36,11 @@ def measure(self, key: str): if span: span.set_tag("duration_seconds", duration) - def to_dict(self) -> Dict[str, float]: + def to_dict(self) -> dict[str, float]: timings = {**self.timings} for key, start in reversed(self._timing_starts.items()): timings[key] = timings.get(key, 0.0) + (perf_counter() - start) return timings - def to_list(self) -> List[QueryTiming]: + def to_list(self) -> list[QueryTiming]: return [QueryTiming(k=key, t=time) for key, time in self.to_dict().items()] diff --git a/posthog/hogql/transforms/in_cohort.py b/posthog/hogql/transforms/in_cohort.py index d10e393f539e3..67fdd57a7df15 100644 --- a/posthog/hogql/transforms/in_cohort.py +++ b/posthog/hogql/transforms/in_cohort.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple, cast, Literal +from typing import Optional, cast, Literal from posthog.hogql import ast @@ -13,7 +13,7 @@ def resolve_in_cohorts( node: ast.Expr, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, context: HogQLContext = None, ): InCohortResolver(stack=stack, dialect=dialect, context=context).visit(node) @@ -23,13 +23,13 @@ def resolve_in_cohorts_conjoined( node: ast.Expr, dialect: Literal["hogql", "clickhouse"], context: HogQLContext, - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, ): MultipleInCohortResolver(stack=stack, dialect=dialect, context=context).visit(node) class CohortCompareOperationTraverser(TraversingVisitor): - ops: List[ast.CompareOperation] = [] + ops: list[ast.CompareOperation] = [] def __init__(self, expr: ast.Expr): self.ops = [] @@ -50,10 +50,10 @@ def __init__( self, dialect: Literal["hogql", "clickhouse"], context: HogQLContext, - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, ): super().__init__() - self.stack: List[ast.SelectQuery] = stack or [] + self.stack: list[ast.SelectQuery] = stack or [] self.context = context self.dialect = dialect @@ -68,7 +68,7 @@ def visit_select_query(self, node: ast.SelectQuery): self.stack.pop() - def _execute(self, node: ast.SelectQuery, compare_operations: List[ast.CompareOperation]): + def _execute(self, node: ast.SelectQuery, compare_operations: list[ast.CompareOperation]): if len(compare_operations) == 0: return @@ -81,11 +81,11 @@ def _execute(self, node: ast.SelectQuery, compare_operations: List[ast.CompareOp compare_node.right = ast.Constant(value=1) def _resolve_cohorts( - self, compare_operations: List[ast.CompareOperation] - ) -> List[Tuple[int, StaticOrDynamic, int]]: + self, compare_operations: list[ast.CompareOperation] + ) -> list[tuple[int, StaticOrDynamic, int]]: from posthog.models import Cohort - cohorts: List[Tuple[int, StaticOrDynamic, int]] = [] + cohorts: list[tuple[int, StaticOrDynamic, int]] = [] for node in compare_operations: arg = node.right @@ -132,9 +132,9 @@ def _resolve_cohorts( def _add_join( self, - cohorts: List[Tuple[int, StaticOrDynamic, int]], + cohorts: list[tuple[int, StaticOrDynamic, int]], select: ast.SelectQuery, - compare_operations: List[ast.CompareOperation], + compare_operations: list[ast.CompareOperation], ): must_add_join = True last_join = select.select_from @@ -264,11 +264,11 @@ class InCohortResolver(TraversingVisitor): def __init__( self, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, context: HogQLContext = None, ): super().__init__() - self.stack: List[ast.SelectQuery] = stack or [] + self.stack: list[ast.SelectQuery] = stack or [] self.context = context self.dialect = dialect diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index bd3a3550034cd..c010fc13ce408 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Dict, List, Optional, cast, Literal +from typing import Optional, cast, Literal from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -13,7 +13,7 @@ def resolve_lazy_tables( node: ast.Expr, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, context: HogQLContext = None, ): LazyTableResolver(stack=stack, context=context, dialect=dialect).visit(node) @@ -21,7 +21,7 @@ def resolve_lazy_tables( @dataclasses.dataclass class JoinToAdd: - fields_accessed: Dict[str, List[str | int]] + fields_accessed: dict[str, list[str | int]] lazy_join: LazyJoin from_table: str to_table: str @@ -29,7 +29,7 @@ class JoinToAdd: @dataclasses.dataclass class TableToAdd: - fields_accessed: Dict[str, List[str | int]] + fields_accessed: dict[str, list[str | int]] lazy_table: LazyTable @@ -37,13 +37,13 @@ class TableToAdd: class ConstraintOverride: alias: str table_name: str - chain_to_replace: List[str | int] + chain_to_replace: list[str | int] class FieldChainReplacer(TraversingVisitor): - overrides: List[ConstraintOverride] = {} + overrides: list[ConstraintOverride] = {} - def __init__(self, overrides: List[ConstraintOverride]) -> None: + def __init__(self, overrides: list[ConstraintOverride]) -> None: super().__init__() self.overrides = overrides @@ -58,7 +58,7 @@ class LazyFinder(TraversingVisitor): max_type_visits: int = 3 def __init__(self) -> None: - self.visited_field_type_counts: Dict[int, int] = {} + self.visited_field_type_counts: dict[int, int] = {} def visit_lazy_join_type(self, node: ast.LazyJoinType): self.found_lazy = True @@ -80,11 +80,11 @@ class LazyTableResolver(TraversingVisitor): def __init__( self, dialect: Literal["hogql", "clickhouse"], - stack: Optional[List[ast.SelectQuery]] = None, + stack: Optional[list[ast.SelectQuery]] = None, context: HogQLContext = None, ): super().__init__() - self.stack_of_fields: List[List[ast.FieldType | ast.PropertyType]] = [[]] if stack else [] + self.stack_of_fields: list[list[ast.FieldType | ast.PropertyType]] = [[]] if stack else [] self.context = context self.dialect: Literal["hogql", "clickhouse"] = dialect @@ -129,30 +129,30 @@ def visit_select_query(self, node: ast.SelectQuery): assert select_type is not None # Collect each `ast.Field` with `ast.LazyJoinType` - field_collector: List[ast.FieldType | ast.PropertyType] = [] + field_collector: list[ast.FieldType | ast.PropertyType] = [] self.stack_of_fields.append(field_collector) # Collect all visited fields on lazy tables into field_collector super().visit_select_query(node) # Collect all the joins we need to add to the select query - joins_to_add: Dict[str, JoinToAdd] = {} - tables_to_add: Dict[str, TableToAdd] = {} + joins_to_add: dict[str, JoinToAdd] = {} + tables_to_add: dict[str, TableToAdd] = {} # First properties, then fields. This way we always get the smallest units to query first. - matched_properties: List[ast.PropertyType | ast.FieldType] = [ + matched_properties: list[ast.PropertyType | ast.FieldType] = [ property for property in field_collector if isinstance(property, ast.PropertyType) ] - matched_fields: List[ast.PropertyType | ast.FieldType] = [ + matched_fields: list[ast.PropertyType | ast.FieldType] = [ field for field in field_collector if isinstance(field, ast.FieldType) ] - sorted_properties: List[ast.PropertyType | ast.FieldType] = matched_properties + matched_fields + sorted_properties: list[ast.PropertyType | ast.FieldType] = matched_properties + matched_fields # Look for tables without requested fields to support cases like `select count() from table` join = node.select_from while join: if join.table is not None and isinstance(join.table.type, ast.LazyTableType): - fields: List[ast.FieldType | ast.PropertyType] = [] + fields: list[ast.FieldType | ast.PropertyType] = [] for field_or_property in field_collector: if isinstance(field_or_property, ast.FieldType): if isinstance(field_or_property.table_type, ast.TableAliasType): @@ -186,7 +186,7 @@ def visit_select_query(self, node: ast.SelectQuery): # Traverse the lazy tables until we reach a real table, collecting them in a list. # Usually there's just one or two. - table_types: List[ast.LazyJoinType | ast.LazyTableType | ast.TableAliasType] = [] + table_types: list[ast.LazyJoinType | ast.LazyTableType | ast.TableAliasType] = [] while ( isinstance(table_type, ast.TableAliasType) or isinstance(table_type, ast.LazyJoinType) @@ -217,12 +217,12 @@ def visit_select_query(self, node: ast.SelectQuery): ) new_join = joins_to_add[to_table] if table_type == field.table_type: - chain: List[str | int] = [] + chain: list[str | int] = [] chain.append(field.name) if property is not None: chain.extend(property.chain) property.joined_subquery_field_name = ( - f"{field.name}___{'___'.join((str(x) for x in property.chain))}" + f"{field.name}___{'___'.join(str(x) for x in property.chain)}" ) new_join.fields_accessed[property.joined_subquery_field_name] = chain else: @@ -241,7 +241,7 @@ def visit_select_query(self, node: ast.SelectQuery): if property is not None: chain.extend(property.chain) property.joined_subquery_field_name = ( - f"{field.name}___{'___'.join((str(x) for x in property.chain))}" + f"{field.name}___{'___'.join(str(x) for x in property.chain)}" ) new_table.fields_accessed[property.joined_subquery_field_name] = chain else: @@ -259,12 +259,12 @@ def visit_select_query(self, node: ast.SelectQuery): ) new_join = joins_to_add[to_table] if table_type == field.table_type: - chain: List[str | int] = [] + chain: list[str | int] = [] chain.append(field.name) if property is not None: chain.extend(property.chain) property.joined_subquery_field_name = ( - f"{field.name}___{'___'.join((str(x) for x in property.chain))}" + f"{field.name}___{'___'.join(str(x) for x in property.chain)}" ) new_join.fields_accessed[property.joined_subquery_field_name] = chain else: @@ -283,7 +283,7 @@ def visit_select_query(self, node: ast.SelectQuery): if property is not None: chain.extend(property.chain) property.joined_subquery_field_name = ( - f"{field.name}___{'___'.join((str(x) for x in property.chain))}" + f"{field.name}___{'___'.join(str(x) for x in property.chain)}" ) new_table.fields_accessed[property.joined_subquery_field_name] = chain else: @@ -291,10 +291,10 @@ def visit_select_query(self, node: ast.SelectQuery): # Make sure we also add fields we will use for the join's "ON" condition into the list of fields accessed. # Without this "pdi.person.id" won't work if you did not ALSO select "pdi.person_id" explicitly for the join. - join_constraint_overrides: Dict[str, List[ConstraintOverride]] = {} + join_constraint_overrides: dict[str, list[ConstraintOverride]] = {} - def create_override(table_name: str, field_chain: List[str | int]) -> None: - alias = f"{table_name}___{'___'.join((str(x) for x in field_chain))}" + def create_override(table_name: str, field_chain: list[str | int]) -> None: + alias = f"{table_name}___{'___'.join(str(x) for x in field_chain)}" if table_name in tables_to_add: tables_to_add[table_name].fields_accessed[alias] = field_chain @@ -387,7 +387,7 @@ def create_override(table_name: str, field_chain: List[str | int]) -> None: node.select_from = join_to_add # Collect any fields or properties that may have been added from the join_function with the LazyJoinType - join_field_collector: List[ast.FieldType | ast.PropertyType] = [] + join_field_collector: list[ast.FieldType | ast.PropertyType] = [] self.stack_of_fields.append(join_field_collector) super().visit(join_to_add) self.stack_of_fields.pop() diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py index cc5451bf6bc3a..5627980fa0dfc 100644 --- a/posthog/hogql/transforms/property_types.py +++ b/posthog/hogql/transforms/property_types.py @@ -1,4 +1,4 @@ -from typing import Dict, Set, Literal, Optional, cast +from typing import Literal, Optional, cast from posthog.hogql import ast from posthog.hogql.context import HogQLContext @@ -81,9 +81,9 @@ class PropertyFinder(TraversingVisitor): def __init__(self, context: HogQLContext): super().__init__() - self.person_properties: Set[str] = set() - self.event_properties: Set[str] = set() - self.group_properties: Dict[int, Set[str]] = {} + self.person_properties: set[str] = set() + self.event_properties: set[str] = set() + self.group_properties: dict[int, set[str]] = {} self.found_timestamps = False self.context = context @@ -123,9 +123,9 @@ class PropertySwapper(CloningVisitor): def __init__( self, timezone: str, - event_properties: Dict[str, str], - person_properties: Dict[str, str], - group_properties: Dict[str, str], + event_properties: dict[str, str], + person_properties: dict[str, str], + group_properties: dict[str, str], context: HogQLContext, ): super().__init__(clear_types=False) diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr index 66816083348f0..5e4695929160d 100644 --- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr +++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr @@ -10,7 +10,7 @@ WHERE and(equals(cohortpeople.team_id, 420), equals(cohortpeople.cohort_id, XX), equals(cohortpeople.version, 0))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), and(1, equals(events.event, %(hogql_val_0)s)), ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -34,7 +34,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [13]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -58,7 +58,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [14]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -84,7 +84,7 @@ HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0)) AS in_cohort__XX ON equals(in_cohort__XX.person_id, events.person_id) WHERE and(equals(events.team_id, 420), ifNull(equals(in_cohort__XX.matched, 1), 0), equals(events.event, %(hogql_val_0)s)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -110,7 +110,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX))) AS in_cohort__XX ON equals(in_cohort__XX.person_id, events.person_id) WHERE and(equals(events.team_id, 420), ifNull(equals(in_cohort__XX.matched, 1), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL @@ -134,7 +134,7 @@ WHERE and(equals(person_static_cohort.team_id, 420), equals(person_static_cohort.cohort_id, XX))) AS in_cohort__XX ON equals(in_cohort__XX.person_id, events.person_id) WHERE and(equals(events.team_id, 420), ifNull(equals(in_cohort__XX.matched, 1), 0)) LIMIT 100 - SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 + SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0 -- HogQL diff --git a/posthog/hogql_queries/actor_strategies.py b/posthog/hogql_queries/actor_strategies.py index d05661d4eddb6..41cd8d5a1bf3a 100644 --- a/posthog/hogql_queries/actor_strategies.py +++ b/posthog/hogql_queries/actor_strategies.py @@ -1,4 +1,4 @@ -from typing import Dict, List, cast, Literal, Optional +from typing import cast, Literal, Optional from django.db.models import Prefetch @@ -21,19 +21,19 @@ def __init__(self, team: Team, query: ActorsQuery, paginator: HogQLHasMorePagina self.paginator = paginator self.query = query - def get_actors(self, actor_ids) -> Dict[str, Dict]: + def get_actors(self, actor_ids) -> dict[str, dict]: raise NotImplementedError() def get_recordings(self, matching_events) -> dict[str, list[dict]]: return {} - def input_columns(self) -> List[str]: + def input_columns(self) -> list[str]: raise NotImplementedError() - def filter_conditions(self) -> List[ast.Expr]: + def filter_conditions(self) -> list[ast.Expr]: return [] - def order_by(self) -> Optional[List[ast.OrderExpr]]: + def order_by(self) -> Optional[list[ast.OrderExpr]]: return None @@ -42,7 +42,7 @@ class PersonStrategy(ActorStrategy): origin = "persons" origin_id = "id" - def get_actors(self, actor_ids) -> Dict[str, Dict]: + def get_actors(self, actor_ids) -> dict[str, dict]: return { str(p.uuid): { "id": p.uuid, @@ -58,11 +58,11 @@ def get_actors(self, actor_ids) -> Dict[str, Dict]: def get_recordings(self, matching_events) -> dict[str, list[dict]]: return RecordingsHelper(self.team).get_recordings(matching_events) - def input_columns(self) -> List[str]: + def input_columns(self) -> list[str]: return ["person", "id", "created_at", "person.$delete"] - def filter_conditions(self) -> List[ast.Expr]: - where_exprs: List[ast.Expr] = [] + def filter_conditions(self) -> list[ast.Expr]: + where_exprs: list[ast.Expr] = [] if self.query.properties: where_exprs.append(property_to_expr(self.query.properties, self.team, scope="person")) @@ -98,7 +98,7 @@ def filter_conditions(self) -> List[ast.Expr]: ) return where_exprs - def order_by(self) -> Optional[List[ast.OrderExpr]]: + def order_by(self) -> Optional[list[ast.OrderExpr]]: if self.query.orderBy not in [["person"], ["person DESC"], ["person ASC"]]: return None @@ -125,7 +125,7 @@ def __init__(self, group_type_index: int, **kwargs): self.group_type_index = group_type_index super().__init__(**kwargs) - def get_actors(self, actor_ids) -> Dict[str, Dict]: + def get_actors(self, actor_ids) -> dict[str, dict]: return { str(p["group_key"]): { "id": p["group_key"], @@ -140,11 +140,11 @@ def get_actors(self, actor_ids) -> Dict[str, Dict]: .iterator(chunk_size=self.paginator.limit) } - def input_columns(self) -> List[str]: + def input_columns(self) -> list[str]: return ["group"] - def filter_conditions(self) -> List[ast.Expr]: - where_exprs: List[ast.Expr] = [] + def filter_conditions(self) -> list[ast.Expr]: + where_exprs: list[ast.Expr] = [] if self.query.search is not None and self.query.search != "": where_exprs.append( @@ -166,7 +166,7 @@ def filter_conditions(self) -> List[ast.Expr]: return where_exprs - def order_by(self) -> Optional[List[ast.OrderExpr]]: + def order_by(self) -> Optional[list[ast.OrderExpr]]: if self.query.orderBy not in [["group"], ["group DESC"], ["group ASC"]]: return None diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py index da2e142bf6636..8224067c24d36 100644 --- a/posthog/hogql_queries/actors_query_runner.py +++ b/posthog/hogql_queries/actors_query_runner.py @@ -1,6 +1,7 @@ import itertools from datetime import timedelta -from typing import List, Generator, Sequence, Iterator, Optional +from typing import Optional +from collections.abc import Generator, Sequence, Iterator from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import has_aggregation @@ -53,7 +54,7 @@ def enrich_with_actors( actors_lookup, recordings_column_index: Optional[int], recordings_lookup: Optional[dict[str, list[dict]]], - ) -> Generator[List, None, None]: + ) -> Generator[list, None, None]: for result in results: new_row = list(result) actor_id = str(result[actor_column_index]) @@ -70,9 +71,7 @@ def prepare_recordings(self, column_name, input_columns): return None, None column_index_events = input_columns.index("matched_recordings") - matching_events_list = itertools.chain.from_iterable( - (row[column_index_events] for row in self.paginator.results) - ) + matching_events_list = itertools.chain.from_iterable(row[column_index_events] for row in self.paginator.results) return column_index_events, self.strategy.get_recordings(matching_events_list) def calculate(self) -> ActorsQueryResponse: @@ -85,7 +84,7 @@ def calculate(self) -> ActorsQueryResponse: ) input_columns = self.input_columns() missing_actors_count = None - results: Sequence[List] | Iterator[List] = self.paginator.results + results: Sequence[list] | Iterator[list] = self.paginator.results enrich_columns = filter(lambda column: column in ("person", "group", "actor"), input_columns) for column_name in enrich_columns: @@ -110,14 +109,14 @@ def calculate(self) -> ActorsQueryResponse: **self.paginator.response_params(), ) - def input_columns(self) -> List[str]: + def input_columns(self) -> list[str]: if self.query.select: return self.query.select return self.strategy.input_columns() # TODO: Figure out a more sure way of getting the actor id than using the alias or chain name - def source_id_column(self, source_query: ast.SelectQuery | ast.SelectUnionQuery) -> List[str]: + def source_id_column(self, source_query: ast.SelectQuery | ast.SelectUnionQuery) -> list[str]: # Figure out the id column of the source query, first column that has id in the name if isinstance(source_query, ast.SelectQuery): select = source_query.select diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index 191abd080d2e2..9dc329e9e464d 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -1,6 +1,6 @@ import json from datetime import timedelta -from typing import Dict, List, Optional +from typing import Optional from dateutil.parser import isoparse from django.db.models import Prefetch @@ -19,7 +19,7 @@ from posthog.models.element import chain_to_elements from posthog.models.person.person import get_distinct_ids_for_subquery from posthog.models.person.util import get_persons_by_distinct_ids -from posthog.schema import EventsQuery, EventsQueryResponse +from posthog.schema import DashboardFilter, EventsQuery, EventsQueryResponse from posthog.utils import relative_date_parse # Allow-listed fields returned when you select "*" from events. Person and group fields will be nested later. @@ -53,8 +53,8 @@ def to_query(self) -> ast.SelectQuery: with self.timings.measure("build_ast"): # columns & group_by with self.timings.measure("columns"): - select_input: List[str] = [] - person_indices: List[int] = [] + select_input: list[str] = [] + person_indices: list[int] = [] for index, col in enumerate(self.select_input_raw()): # Selecting a "*" expands the list of columns, resulting in a table that's not what we asked for. # Instead, ask for a tuple with all the columns we want. Later transform this back into a dict. @@ -66,11 +66,11 @@ def to_query(self) -> ast.SelectQuery: person_indices.append(index) else: select_input.append(col) - select: List[ast.Expr] = [parse_expr(column, timings=self.timings) for column in select_input] + select: list[ast.Expr] = [parse_expr(column, timings=self.timings) for column in select_input] with self.timings.measure("aggregations"): - group_by: List[ast.Expr] = [column for column in select if not has_aggregation(column)] - aggregations: List[ast.Expr] = [column for column in select if has_aggregation(column)] + group_by: list[ast.Expr] = [column for column in select if not has_aggregation(column)] + aggregations: list[ast.Expr] = [column for column in select if has_aggregation(column)] has_any_aggregation = len(aggregations) > 0 # filters @@ -210,7 +210,7 @@ def calculate(self) -> EventsQueryResponse: ).data self.paginator.results[index][star_idx] = new_result - person_indices: List[int] = [] + person_indices: list[int] = [] for index, col in enumerate(self.select_input_raw()): if col.split("--")[0].strip() == "person": person_indices.append(index) @@ -222,7 +222,7 @@ def calculate(self) -> EventsQueryResponse: distinct_ids = list({event[person_idx] for event in self.paginator.results}) persons = get_persons_by_distinct_ids(self.team.pk, distinct_ids) persons = persons.prefetch_related(Prefetch("persondistinctid_set", to_attr="distinct_ids_cache")) - distinct_to_person: Dict[str, Person] = {} + distinct_to_person: dict[str, Person] = {} for person in persons: if person: for person_distinct_id in person.distinct_ids: @@ -256,7 +256,19 @@ def calculate(self) -> EventsQueryResponse: **self.paginator.response_params(), ) - def select_input_raw(self) -> List[str]: + def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): + new_query = self.query.model_copy() # Shallow copy! + + if dashboard_filter.date_to or dashboard_filter.date_from: + new_query.before = dashboard_filter.date_to + new_query.after = dashboard_filter.date_from + + if dashboard_filter.properties: + new_query.properties = (new_query.properties or []) + dashboard_filter.properties + + return new_query + + def select_input_raw(self) -> list[str]: return ["*"] if len(self.query.select) == 0 else self.query.select def _is_stale(self, cached_result_package): diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index 46b4c105a4336..3a9a0b62efd98 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -1,5 +1,6 @@ from datetime import timedelta -from typing import Callable, Dict, Optional, cast +from typing import Optional, cast +from collections.abc import Callable from posthog.clickhouse.client.connection import Workload from posthog.hogql import ast @@ -26,7 +27,7 @@ class HogQLQueryRunner(QueryRunner): def to_query(self) -> ast.SelectQuery: if self.timings is None: self.timings = HogQLTimings() - values: Optional[Dict[str, ast.Expr]] = ( + values: Optional[dict[str, ast.Expr]] = ( {key: ast.Constant(value=value) for key, value in self.query.values.items()} if self.query.values else None ) with self.timings.measure("parse_select"): diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 1dade0de4b052..40614464f1361 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -1,6 +1,6 @@ from abc import ABC from functools import cached_property -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast import uuid from posthog.clickhouse.materialized_columns.column import ColumnName from posthog.hogql import ast @@ -37,14 +37,14 @@ class FunnelBase(ABC): context: FunnelQueryContext - _extra_event_fields: List[ColumnName] - _extra_event_properties: List[PropertyName] + _extra_event_fields: list[ColumnName] + _extra_event_properties: list[PropertyName] def __init__(self, context: FunnelQueryContext): self.context = context - self._extra_event_fields: List[ColumnName] = [] - self._extra_event_properties: List[PropertyName] = [] + self._extra_event_fields: list[ColumnName] = [] + self._extra_event_properties: list[PropertyName] = [] if ( hasattr(self.context, "actorsQuery") @@ -86,7 +86,7 @@ def get_step_counts_without_aggregation_query(self) -> ast.SelectQuery: raise NotImplementedError() @cached_property - def breakdown_cohorts(self) -> List[Cohort]: + def breakdown_cohorts(self) -> list[Cohort]: team, breakdown = self.context.team, self.context.breakdown if isinstance(breakdown, list): @@ -97,7 +97,7 @@ def breakdown_cohorts(self) -> List[Cohort]: return list(cohorts) @cached_property - def breakdown_cohorts_ids(self) -> List[int]: + def breakdown_cohorts_ids(self) -> list[int]: breakdown = self.context.breakdown ids = [int(cohort.pk) for cohort in self.breakdown_cohorts] @@ -108,7 +108,7 @@ def breakdown_cohorts_ids(self) -> List[int]: return ids @cached_property - def breakdown_values(self) -> List[int] | List[str] | List[List[str]]: + def breakdown_values(self) -> list[int] | list[str] | list[list[str]]: # """ # Returns the top N breakdown prop values for event/person breakdown @@ -169,7 +169,7 @@ def breakdown_values(self) -> List[int] | List[str] | List[List[str]]: else: prop_exprs = [] - where_exprs: List[ast.Expr | None] = [ + where_exprs: list[ast.Expr | None] = [ # entity filter entity_expr, # prop filter @@ -209,7 +209,7 @@ def breakdown_values(self) -> List[int] | List[str] | List[List[str]]: raise ValidationError("Apologies, there has been an error computing breakdown values.") return [row[0] for row in results[0:breakdown_limit_or_default]] - def _get_breakdown_select_prop(self) -> List[ast.Expr]: + def _get_breakdown_select_prop(self) -> list[ast.Expr]: breakdown, breakdownAttributionType, funnelsFilter = ( self.context.breakdown, self.context.breakdownAttributionType, @@ -296,7 +296,7 @@ def _get_breakdown_expr(self) -> ast.Expr: def _format_results( self, results - ) -> Union[FunnelTimeToConvertResults, List[Dict[str, Any]], List[List[Dict[str, Any]]]]: + ) -> Union[FunnelTimeToConvertResults, list[dict[str, Any]], list[list[dict[str, Any]]]]: breakdown = self.context.breakdown if not results or len(results) == 0: @@ -387,9 +387,9 @@ def _serialize_step( step: ActionsNode | EventsNode | DataWarehouseNode, count: int, index: int, - people: Optional[List[uuid.UUID]] = None, + people: Optional[list[uuid.UUID]] = None, sampling_factor: Optional[float] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: action_id: Optional[str | int] if isinstance(step, EventsNode): name = step.event @@ -419,7 +419,7 @@ def extra_event_fields_and_properties(self): def _get_inner_event_query( self, - entities: List[EntityNode] | None = None, + entities: list[EntityNode] | None = None, entity_name="events", skip_entity_filter=False, skip_step_filter=False, @@ -433,7 +433,7 @@ def _get_inner_event_query( ) entities_to_use = entities or query.series - extra_fields: List[str] = [] + extra_fields: list[str] = [] for prop in self.context.includeProperties: extra_fields.append(prop) @@ -450,7 +450,7 @@ def _get_inner_event_query( # extra_event_properties=self._extra_event_properties, # ).get_query(entities_to_use, entity_name, skip_entity_filter=skip_entity_filter) - all_step_cols: List[ast.Expr] = [] + all_step_cols: list[ast.Expr] = [] for index, entity in enumerate(entities_to_use): step_cols = self._get_step_col(entity, index, entity_name) all_step_cols.extend(step_cols) @@ -489,7 +489,7 @@ def _get_inner_event_query( def _get_cohort_breakdown_join(self) -> ast.JoinExpr: breakdown = self.context.breakdown - cohort_queries: List[ast.SelectQuery] = [] + cohort_queries: list[ast.SelectQuery] = [] for cohort in self.breakdown_cohorts: query = parse_select( @@ -564,7 +564,7 @@ def _add_breakdown_attribution_subquery(self, inner_query: ast.SelectQuery) -> a return query def _get_steps_conditions(self, length: int) -> ast.Expr: - step_conditions: List[ast.Expr] = [] + step_conditions: list[ast.Expr] = [] for index in range(length): step_conditions.append(parse_expr(f"step_{index} = 1")) @@ -580,10 +580,10 @@ def _get_step_col( index: int, entity_name: str, step_prefix: str = "", - ) -> List[ast.Expr]: + ) -> list[ast.Expr]: # step prefix is used to distinguish actual steps, and exclusion steps # without the prefix, we get the same parameter binding for both, which borks things up - step_cols: List[ast.Expr] = [] + step_cols: list[ast.Expr] = [] condition = self._build_step_query(entity, index, entity_name, step_prefix) step_cols.append( parse_expr(f"if({{condition}}, 1, 0) as {step_prefix}step_{index}", placeholders={"condition": condition}) @@ -626,7 +626,7 @@ def _build_step_query( else: return event_expr - def _get_timestamp_outer_select(self) -> List[ast.Expr]: + def _get_timestamp_outer_select(self) -> list[ast.Expr]: if self.context.includePrecedingTimestamp: return [ast.Field(chain=["max_timestamp"]), ast.Field(chain=["min_timestamp"])] elif self.context.includeTimestamp: @@ -646,7 +646,7 @@ def _get_funnel_person_step_condition(self) -> ast.Expr: funnelCustomSteps = actorsQuery.funnelCustomSteps funnelStepBreakdown = actorsQuery.funnelStepBreakdown - conditions: List[ast.Expr] = [] + conditions: list[ast.Expr] = [] if funnelCustomSteps: conditions.append(parse_expr(f"steps IN {funnelCustomSteps}")) @@ -673,7 +673,7 @@ def _get_funnel_person_step_condition(self) -> ast.Expr: return ast.And(exprs=conditions) - def _get_funnel_person_step_events(self) -> List[ast.Expr]: + def _get_funnel_person_step_events(self) -> list[ast.Expr]: if ( hasattr(self.context, "actorsQuery") and self.context.actorsQuery is not None @@ -694,23 +694,23 @@ def _get_funnel_person_step_events(self) -> List[ast.Expr]: return [parse_expr(f"step_{matching_events_step_num}_matching_events as matching_events")] return [] - def _get_count_columns(self, max_steps: int) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_count_columns(self, max_steps: int) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] for i in range(max_steps): exprs.append(parse_expr(f"countIf(steps = {i + 1}) step_{i + 1}")) return exprs - def _get_step_time_names(self, max_steps: int) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_step_time_names(self, max_steps: int) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] for i in range(1, max_steps): exprs.append(parse_expr(f"step_{i}_conversion_time")) return exprs - def _get_final_matching_event(self, max_steps: int) -> List[ast.Expr]: + def _get_final_matching_event(self, max_steps: int) -> list[ast.Expr]: statement = None for i in range(max_steps - 1, -1, -1): if i == max_steps - 1: @@ -721,7 +721,7 @@ def _get_final_matching_event(self, max_steps: int) -> List[ast.Expr]: statement = f"if(isNull(latest_{i}),step_{i-1}_matching_event,{statement})" return [parse_expr(f"{statement} as final_matching_event")] if statement else [] - def _get_matching_events(self, max_steps: int) -> List[ast.Expr]: + def _get_matching_events(self, max_steps: int) -> list[ast.Expr]: if ( hasattr(self.context, "actorsQuery") and self.context.actorsQuery is not None @@ -737,8 +737,8 @@ def _get_matching_events(self, max_steps: int) -> List[ast.Expr]: return [*events, *self._get_final_matching_event(max_steps)] return [] - def _get_matching_event_arrays(self, max_steps: int) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_matching_event_arrays(self, max_steps: int) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] if ( hasattr(self.context, "actorsQuery") and self.context.actorsQuery is not None @@ -749,8 +749,8 @@ def _get_matching_event_arrays(self, max_steps: int) -> List[ast.Expr]: exprs.append(parse_expr(f"groupArray(10)(final_matching_event) as final_matching_events")) return exprs - def _get_step_time_avgs(self, max_steps: int, inner_query: bool = False) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_step_time_avgs(self, max_steps: int, inner_query: bool = False) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] for i in range(1, max_steps): exprs.append( @@ -761,8 +761,8 @@ def _get_step_time_avgs(self, max_steps: int, inner_query: bool = False) -> List return exprs - def _get_step_time_median(self, max_steps: int, inner_query: bool = False) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_step_time_median(self, max_steps: int, inner_query: bool = False) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] for i in range(1, max_steps): exprs.append( @@ -773,7 +773,7 @@ def _get_step_time_median(self, max_steps: int, inner_query: bool = False) -> Li return exprs - def _get_timestamp_selects(self) -> Tuple[List[ast.Expr], List[ast.Expr]]: + def _get_timestamp_selects(self) -> tuple[list[ast.Expr], list[ast.Expr]]: """ Returns timestamp selectors for the target step and optionally the preceding step. In the former case, always returns the timestamp for the first and last step as well. @@ -829,11 +829,11 @@ def _get_timestamp_selects(self) -> Tuple[List[ast.Expr], List[ast.Expr]]: else: return [], [] - def _get_step_times(self, max_steps: int) -> List[ast.Expr]: + def _get_step_times(self, max_steps: int) -> list[ast.Expr]: windowInterval = self.context.funnelWindowInterval windowIntervalUnit = funnel_window_interval_unit_to_sql(self.context.funnelWindowIntervalUnit) - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] for i in range(1, max_steps): exprs.append( @@ -844,12 +844,12 @@ def _get_step_times(self, max_steps: int) -> List[ast.Expr]: return exprs - def _get_partition_cols(self, level_index: int, max_steps: int) -> List[ast.Expr]: + def _get_partition_cols(self, level_index: int, max_steps: int) -> list[ast.Expr]: query, funnelsFilter = self.context.query, self.context.funnelsFilter exclusions = funnelsFilter.exclusions series = query.series - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] for i in range(0, max_steps): exprs.append(ast.Field(chain=[f"step_{i}"])) @@ -894,7 +894,7 @@ def _get_partition_cols(self, level_index: int, max_steps: int) -> List[ast.Expr return exprs - def _get_breakdown_prop_expr(self, group_remaining=False) -> List[ast.Expr]: + def _get_breakdown_prop_expr(self, group_remaining=False) -> list[ast.Expr]: # SEE BELOW for a string implementation of the following breakdown, breakdownType = self.context.breakdown, self.context.breakdownType @@ -938,7 +938,7 @@ def _get_breakdown_prop(self, group_remaining=False) -> str: else: return "" - def _get_breakdown_conditions(self) -> Optional[List[int] | List[str] | List[List[str]]]: + def _get_breakdown_conditions(self) -> Optional[list[int] | list[str] | list[list[str]]]: """ For people, pagination sets the offset param, which is common across filters and gives us the wrong breakdown values here, so we override it. @@ -957,7 +957,7 @@ def _query_has_array_breakdown(self) -> bool: breakdown, breakdownType = self.context.breakdown, self.context.breakdownType return not isinstance(breakdown, str) and breakdownType != "cohort" - def _get_exclusion_condition(self) -> List[ast.Expr]: + def _get_exclusion_condition(self) -> list[ast.Expr]: funnelsFilter = self.context.funnelsFilter windowInterval = self.context.funnelWindowInterval windowIntervalUnit = funnel_window_interval_unit_to_sql(self.context.funnelWindowIntervalUnit) @@ -965,7 +965,7 @@ def _get_exclusion_condition(self) -> List[ast.Expr]: if not funnelsFilter.exclusions: return [] - conditions: List[ast.Expr] = [] + conditions: list[ast.Expr] = [] for exclusion_id, exclusion in enumerate(funnelsFilter.exclusions): from_time = f"latest_{exclusion.funnelFromStep}" @@ -995,7 +995,7 @@ def _get_sorting_condition(self, curr_index: int, max_steps: int) -> ast.Expr: if curr_index == 1: return ast.Constant(value=1) - conditions: List[ast.Expr] = [] + conditions: list[ast.Expr] = [] for i in range(1, curr_index): duplicate_event = is_equal(series[i], series[i - 1]) or is_superset(series[i], series[i - 1]) @@ -1016,8 +1016,8 @@ def _get_sorting_condition(self, curr_index: int, max_steps: int) -> ast.Expr: ], ) - def _get_person_and_group_properties(self, aggregate: bool = False) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def _get_person_and_group_properties(self, aggregate: bool = False) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] for prop in self.context.includeProperties: exprs.append(parse_expr(f"any({prop}) as {prop}") if aggregate else parse_expr(prop)) diff --git a/posthog/hogql_queries/insights/funnels/funnel.py b/posthog/hogql_queries/insights/funnels/funnel.py index b5ce2bb7faf53..1975645d753e1 100644 --- a/posthog/hogql_queries/insights/funnels/funnel.py +++ b/posthog/hogql_queries/insights/funnels/funnel.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.hogql import ast from posthog.hogql.parser import parse_expr from posthog.hogql_queries.insights.funnels.base import FunnelBase @@ -35,7 +33,7 @@ def get_query(self): breakdown_exprs = self._get_breakdown_prop_expr() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ *self._get_count_columns(max_steps), *self._get_step_time_avgs(max_steps), *self._get_step_time_median(max_steps), @@ -54,13 +52,13 @@ def get_step_counts_query(self): inner_timestamps, outer_timestamps = self._get_timestamp_selects() person_and_group_properties = self._get_person_and_group_properties(aggregate=True) - group_by_columns: List[ast.Expr] = [ + group_by_columns: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["steps"]), *breakdown_exprs, ] - outer_select: List[ast.Expr] = [ + outer_select: list[ast.Expr] = [ *group_by_columns, *self._get_step_time_avgs(max_steps, inner_query=True), *self._get_step_time_median(max_steps, inner_query=True), @@ -74,7 +72,7 @@ def get_step_counts_query(self): f"max(steps) over (PARTITION BY aggregation_target {self._get_breakdown_prop()}) as max_steps" ) - inner_select: List[ast.Expr] = [ + inner_select: list[ast.Expr] = [ *group_by_columns, max_steps_expr, *self._get_step_time_names(max_steps), @@ -106,7 +104,7 @@ def get_step_counts_without_aggregation_query(self): formatted_query = self._build_step_subquery(2, max_steps) breakdown_exprs = self._get_breakdown_prop_expr() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Field(chain=["*"]), ast.Alias(alias="steps", expr=self._get_sorting_condition(max_steps, max_steps)), *self._get_exclusion_condition(), @@ -135,7 +133,7 @@ def get_step_counts_without_aggregation_query(self): def _build_step_subquery( self, level_index: int, max_steps: int, event_names_alias: str = "events" ) -> ast.SelectQuery: - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["timestamp"]), ] @@ -175,12 +173,12 @@ def _build_step_subquery( ), ) - def _get_comparison_cols(self, level_index: int, max_steps: int) -> List[ast.Expr]: + def _get_comparison_cols(self, level_index: int, max_steps: int) -> list[ast.Expr]: """ level_index: The current smallest comparison step. Everything before level index is already at the minimum ordered timestamps. """ - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] funnelsFilter = self.context.funnelsFilter exclusions = funnelsFilter.exclusions @@ -225,7 +223,7 @@ def _get_comparison_cols(self, level_index: int, max_steps: int) -> List[ast.Exp return exprs def _get_comparison_at_step(self, index: int, level_index: int) -> ast.Or: - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] for i in range(level_index, index + 1): exprs.append(parse_expr(f"latest_{i} < latest_{level_index - 1}")) diff --git a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py index 04b1115fd38d2..035339c8e02ad 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py @@ -1,6 +1,6 @@ import dataclasses from datetime import timedelta -from typing import List, Literal, Optional, Any, Dict, Set, TypedDict, cast +from typing import Literal, Optional, Any, TypedDict, cast from posthog.constants import AUTOCAPTURE_EVENT from posthog.hogql.parser import parse_select @@ -95,7 +95,7 @@ class FunnelCorrelationQueryRunner(QueryRunner): def __init__( self, - query: FunnelCorrelationQuery | Dict[str, Any], + query: FunnelCorrelationQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, @@ -132,7 +132,7 @@ def __init__( # Used for generating the funnel persons cte funnel_order_actor_class = get_funnel_actor_class(self.context.funnelsFilter)(context=self.context) assert isinstance( - funnel_order_actor_class, (FunnelActors, FunnelStrictActors, FunnelUnorderedActors) + funnel_order_actor_class, FunnelActors | FunnelStrictActors | FunnelUnorderedActors ) # for typings self._funnel_actors_generator = funnel_order_actor_class @@ -228,7 +228,7 @@ def calculate(self) -> FunnelCorrelationResponse: modifiers=self.modifiers, ) - def _calculate(self) -> tuple[List[EventOddsRatio], bool, str, HogQLQueryResponse]: + def _calculate(self) -> tuple[list[EventOddsRatio], bool, str, HogQLQueryResponse]: query = self.to_query() hogql = to_printed_hogql(query, self.team) @@ -823,8 +823,8 @@ def _get_properties_prop_clause(self): props_str = ", ".join(props) return f"arrayJoin(arrayZip({self.query.funnelCorrelationNames}, [{props_str}])) as prop" - def _get_funnel_step_names(self) -> List[str]: - events: Set[str] = set() + def _get_funnel_step_names(self) -> list[str]: + events: set[str] = set() for entity in self.funnels_query.series: if isinstance(entity, ActionsNode): action = Action.objects.get(pk=int(entity.id), team=self.context.team) @@ -838,8 +838,8 @@ def _get_funnel_step_names(self) -> List[str]: return sorted(events) @property - def properties_to_include(self) -> List[str]: - props_to_include: List[str] = [] + def properties_to_include(self) -> list[str]: + props_to_include: list[str] = [] # TODO: implement or remove # if self.query.funnelCorrelationType == FunnelCorrelationResultsType.properties: # assert self.query.funnelCorrelationNames is not None diff --git a/posthog/hogql_queries/insights/funnels/funnel_event_query.py b/posthog/hogql_queries/insights/funnels/funnel_event_query.py index b2fd19083ed75..8acb0f7dea87b 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_event_query.py +++ b/posthog/hogql_queries/insights/funnels/funnel_event_query.py @@ -1,4 +1,4 @@ -from typing import List, Set, Union, Optional +from typing import Union, Optional from posthog.clickhouse.materialized_columns.column import ColumnName from posthog.hogql import ast from posthog.hogql.parser import parse_expr @@ -13,16 +13,16 @@ class FunnelEventQuery: context: FunnelQueryContext - _extra_fields: List[ColumnName] - _extra_event_properties: List[PropertyName] + _extra_fields: list[ColumnName] + _extra_event_properties: list[PropertyName] EVENT_TABLE_ALIAS = "e" def __init__( self, context: FunnelQueryContext, - extra_fields: Optional[List[ColumnName]] = None, - extra_event_properties: Optional[List[PropertyName]] = None, + extra_fields: Optional[list[ColumnName]] = None, + extra_event_properties: Optional[list[PropertyName]] = None, ): if extra_event_properties is None: extra_event_properties = [] @@ -38,12 +38,12 @@ def to_query( # entities=None, # TODO: implement passed in entities when needed skip_entity_filter=False, ) -> ast.SelectQuery: - _extra_fields: List[ast.Expr] = [ + _extra_fields: list[ast.Expr] = [ ast.Alias(alias=field, expr=ast.Field(chain=[self.EVENT_TABLE_ALIAS, field])) for field in self._extra_fields ] - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Alias(alias="timestamp", expr=ast.Field(chain=[self.EVENT_TABLE_ALIAS, "timestamp"])), ast.Alias(alias="aggregation_target", expr=self._aggregation_target_expr()), *_extra_fields, @@ -132,7 +132,7 @@ def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: if skip_entity_filter is True: return None - events: Set[Union[int, str, None]] = set() + events: set[Union[int, str, None]] = set() for node in [*query.series, *exclusions]: if isinstance(node, EventsNode) or isinstance(node, FunnelExclusionEventsNode): @@ -157,5 +157,5 @@ def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: op=ast.CompareOperationOp.In, ) - def _properties_expr(self) -> List[ast.Expr]: + def _properties_expr(self) -> list[ast.Expr]: return Properties(context=self.context).to_exprs() diff --git a/posthog/hogql_queries/insights/funnels/funnel_persons.py b/posthog/hogql_queries/insights/funnels/funnel_persons.py index 68781c6bbd0c8..5fc06a07a7d4d 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_persons.py +++ b/posthog/hogql_queries/insights/funnels/funnel_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.hogql import ast from posthog.hogql_queries.insights.funnels.funnel import Funnel @@ -7,9 +7,9 @@ class FunnelActors(Funnel): def actor_query( self, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ) -> ast.SelectQuery: - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Alias(alias="actor_id", expr=ast.Field(chain=["aggregation_target"])), *self._get_funnel_person_step_events(), *self._get_timestamp_outer_select(), diff --git a/posthog/hogql_queries/insights/funnels/funnel_query_context.py b/posthog/hogql_queries/insights/funnels/funnel_query_context.py index 3b777e3ff8026..499dc3eb9ed4c 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_query_context.py +++ b/posthog/hogql_queries/insights/funnels/funnel_query_context.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Union +from typing import Optional, Union from posthog.hogql.constants import LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.query_context import QueryContext @@ -25,7 +25,7 @@ class FunnelQueryContext(QueryContext): interval: IntervalType - breakdown: List[Union[str, int]] | str | int | None + breakdown: list[Union[str, int]] | str | int | None breakdownType: BreakdownType breakdownAttributionType: BreakdownAttributionType @@ -36,7 +36,7 @@ class FunnelQueryContext(QueryContext): includeTimestamp: Optional[bool] includePrecedingTimestamp: Optional[bool] - includeProperties: List[str] + includeProperties: list[str] includeFinalMatchingEvents: Optional[bool] def __init__( @@ -48,7 +48,7 @@ def __init__( limit_context: Optional[LimitContext] = None, include_timestamp: Optional[bool] = None, include_preceding_timestamp: Optional[bool] = None, - include_properties: Optional[List[str]] = None, + include_properties: Optional[list[str]] = None, include_final_matching_events: Optional[bool] = None, ): super().__init__(query=query, team=team, timings=timings, modifiers=modifiers, limit_context=limit_context) @@ -98,7 +98,7 @@ def __init__( "hogql", None, ]: - boxed_breakdown: List[Union[str, int]] = box_value(self.breakdownFilter.breakdown) + boxed_breakdown: list[Union[str, int]] = box_value(self.breakdownFilter.breakdown) self.breakdown = boxed_breakdown else: self.breakdown = self.breakdownFilter.breakdown # type: ignore diff --git a/posthog/hogql_queries/insights/funnels/funnel_strict.py b/posthog/hogql_queries/insights/funnels/funnel_strict.py index 1bea66772a6f5..1b5bf73ad5033 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_strict.py +++ b/posthog/hogql_queries/insights/funnels/funnel_strict.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.hogql import ast from posthog.hogql.parser import parse_expr from posthog.hogql_queries.insights.funnels.base import FunnelBase @@ -11,7 +9,7 @@ def get_query(self): breakdown_exprs = self._get_breakdown_prop_expr() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ *self._get_count_columns(max_steps), *self._get_step_time_avgs(max_steps), *self._get_step_time_median(max_steps), @@ -30,13 +28,13 @@ def get_step_counts_query(self): inner_timestamps, outer_timestamps = self._get_timestamp_selects() person_and_group_properties = self._get_person_and_group_properties(aggregate=True) - group_by_columns: List[ast.Expr] = [ + group_by_columns: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["steps"]), *breakdown_exprs, ] - outer_select: List[ast.Expr] = [ + outer_select: list[ast.Expr] = [ *group_by_columns, *self._get_step_time_avgs(max_steps, inner_query=True), *self._get_step_time_median(max_steps, inner_query=True), @@ -50,7 +48,7 @@ def get_step_counts_query(self): f"max(steps) over (PARTITION BY aggregation_target {self._get_breakdown_prop()}) as max_steps" ) - inner_select: List[ast.Expr] = [ + inner_select: list[ast.Expr] = [ *group_by_columns, max_steps_expr, *self._get_step_time_names(max_steps), @@ -77,7 +75,7 @@ def get_step_counts_query(self): def get_step_counts_without_aggregation_query(self): max_steps = self.context.max_steps - select_inner: List[ast.Expr] = [ + select_inner: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["timestamp"]), *self._get_partition_cols(1, max_steps), @@ -87,7 +85,7 @@ def get_step_counts_without_aggregation_query(self): select_from_inner = self._get_inner_event_query(skip_entity_filter=True, skip_step_filter=True) inner_query = ast.SelectQuery(select=select_inner, select_from=ast.JoinExpr(table=select_from_inner)) - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Field(chain=["*"]), ast.Alias(alias="steps", expr=self._get_sorting_condition(max_steps, max_steps)), *self._get_step_times(max_steps), @@ -101,7 +99,7 @@ def get_step_counts_without_aggregation_query(self): return ast.SelectQuery(select=select, select_from=select_from, where=where) def _get_partition_cols(self, level_index: int, max_steps: int): - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] for i in range(0, max_steps): exprs.append(ast.Field(chain=[f"step_{i}"])) diff --git a/posthog/hogql_queries/insights/funnels/funnel_strict_persons.py b/posthog/hogql_queries/insights/funnels/funnel_strict_persons.py index f55afbd218266..299bd982b972b 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_strict_persons.py +++ b/posthog/hogql_queries/insights/funnels/funnel_strict_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.hogql import ast from posthog.hogql_queries.insights.funnels.funnel_strict import FunnelStrict @@ -7,9 +7,9 @@ class FunnelStrictActors(FunnelStrict): def actor_query( self, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ) -> ast.SelectQuery: - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Alias(alias="actor_id", expr=ast.Field(chain=["aggregation_target"])), *self._get_funnel_person_step_events(), *self._get_timestamp_outer_select(), diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends.py b/posthog/hogql_queries/insights/funnels/funnel_trends.py index 9d486f1b06196..964f5d05cc6d0 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends.py @@ -1,6 +1,6 @@ from datetime import datetime from itertools import groupby -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from posthog.hogql import ast from posthog.hogql.parser import parse_expr from posthog.hogql_queries.insights.funnels.base import FunnelBase @@ -58,7 +58,7 @@ def __init__(self, context: FunnelQueryContext, just_summarize=False): self.just_summarize = just_summarize self.funnel_order = get_funnel_order_class(self.context.funnelsFilter)(context=self.context) - def _format_results(self, results) -> List[Dict[str, Any]]: + def _format_results(self, results) -> list[dict[str, Any]]: query = self.context.query breakdown_clause = self._get_breakdown_prop() @@ -75,7 +75,7 @@ def _format_results(self, results) -> List[Dict[str, Any]]: if breakdown_clause: if isinstance(period_row[-1], str) or ( - isinstance(period_row[-1], List) and all(isinstance(item, str) for item in period_row[-1]) + isinstance(period_row[-1], list) and all(isinstance(item, str) for item in period_row[-1]) ): serialized_result.update({"breakdown_value": (period_row[-1])}) else: @@ -145,7 +145,7 @@ def get_query(self) -> ast.SelectQuery: breakdown_clause = self._get_breakdown_prop_expr() - data_select: List[ast.Expr] = [ + data_select: list[ast.Expr] = [ ast.Field(chain=["entrance_period_start"]), parse_expr(f"countIf({reached_from_step_count_condition}) AS reached_from_step_count"), parse_expr(f"countIf({reached_to_step_count_condition}) AS reached_to_step_count"), @@ -163,10 +163,10 @@ def get_query(self) -> ast.SelectQuery: args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], ) data_select_from = ast.JoinExpr(table=step_counts) - data_group_by: List[ast.Expr] = [ast.Field(chain=["entrance_period_start"]), *breakdown_clause] + data_group_by: list[ast.Expr] = [ast.Field(chain=["entrance_period_start"]), *breakdown_clause] data_query = ast.SelectQuery(select=data_select, select_from=data_select_from, group_by=data_group_by) - fill_select: List[ast.Expr] = [ + fill_select: list[ast.Expr] = [ ast.Alias( alias="entrance_period_start", expr=ast.ArithmeticOperation( @@ -249,7 +249,7 @@ def get_query(self) -> ast.SelectQuery: ), ) - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Field(chain=["fill", "entrance_period_start"]), ast.Field(chain=["reached_from_step_count"]), ast.Field(chain=["reached_to_step_count"]), @@ -263,7 +263,7 @@ def get_query(self) -> ast.SelectQuery: alias="data", next_join=fill_join, ) - order_by: List[ast.OrderExpr] = [ + order_by: list[ast.OrderExpr] = [ ast.OrderExpr(expr=ast.Field(chain=["fill", "entrance_period_start"]), order="ASC") ] @@ -281,7 +281,7 @@ def get_step_counts_without_aggregation_query( steps_per_person_query = self.funnel_order.get_step_counts_without_aggregation_query() - event_select_clause: List[ast.Expr] = [] + event_select_clause: list[ast.Expr] = [] if ( hasattr(self.context, "actorsQuery") and self.context.actorsQuery is not None @@ -291,7 +291,7 @@ def get_step_counts_without_aggregation_query( breakdown_clause = self._get_breakdown_prop_expr() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Alias(alias="entrance_period_start", expr=get_start_of_interval_hogql(interval.value, team=team)), parse_expr("max(steps) AS steps_completed"), @@ -309,7 +309,7 @@ def get_step_counts_without_aggregation_query( if specific_entrance_period_start else None ) - group_by: List[ast.Expr] = [ + group_by: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["entrance_period_start"]), *breakdown_clause, @@ -317,7 +317,7 @@ def get_step_counts_without_aggregation_query( return ast.SelectQuery(select=select, select_from=select_from, where=where, group_by=group_by) - def get_steps_reached_conditions(self) -> Tuple[str, str, str]: + def get_steps_reached_conditions(self) -> tuple[str, str, str]: funnelsFilter, max_steps = self.context.funnelsFilter, self.context.max_steps # How many steps must have been done to count for the denominator of a funnel trends data point diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_persons.py b/posthog/hogql_queries/insights/funnels/funnel_trends_persons.py index c90a9ed576270..c124265ba653e 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends_persons.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_persons.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import List from rest_framework.exceptions import ValidationError @@ -39,7 +38,7 @@ def __init__(self, context: FunnelQueryContext, just_summarize=False): self.dropOff = actorsQuery.funnelTrendsDropOff self.entrancePeriodStart = entrancePeriodStart - def _get_funnel_person_step_events(self) -> List[ast.Expr]: + def _get_funnel_person_step_events(self) -> list[ast.Expr]: if ( hasattr(self.context, "actorsQuery") and self.context.actorsQuery is not None @@ -71,7 +70,7 @@ def actor_query(self) -> ast.SelectQuery: did_not_reach_to_step_count_condition, ) = self.get_steps_reached_conditions() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Alias(alias="actor_id", expr=ast.Field(chain=["aggregation_target"])), *self._get_funnel_person_step_events(), ] diff --git a/posthog/hogql_queries/insights/funnels/funnel_unordered.py b/posthog/hogql_queries/insights/funnels/funnel_unordered.py index af3ed18d4f82e..4ac87866d7fcc 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_unordered.py +++ b/posthog/hogql_queries/insights/funnels/funnel_unordered.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional +from typing import Any, Optional import uuid from rest_framework.exceptions import ValidationError @@ -45,7 +45,7 @@ def get_query(self): breakdown_exprs = self._get_breakdown_prop_expr() - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ *self._get_count_columns(max_steps), *self._get_step_time_avgs(max_steps), *self._get_step_time_median(max_steps), @@ -64,13 +64,13 @@ def get_step_counts_query(self): inner_timestamps, outer_timestamps = self._get_timestamp_selects() person_and_group_properties = self._get_person_and_group_properties(aggregate=True) - group_by_columns: List[ast.Expr] = [ + group_by_columns: list[ast.Expr] = [ ast.Field(chain=["aggregation_target"]), ast.Field(chain=["steps"]), *breakdown_exprs, ] - outer_select: List[ast.Expr] = [ + outer_select: list[ast.Expr] = [ *group_by_columns, *self._get_step_time_avgs(max_steps, inner_query=True), *self._get_step_time_median(max_steps, inner_query=True), @@ -82,7 +82,7 @@ def get_step_counts_query(self): f"max(steps) over (PARTITION BY aggregation_target {self._get_breakdown_prop()}) as max_steps" ) - inner_select: List[ast.Expr] = [ + inner_select: list[ast.Expr] = [ *group_by_columns, max_steps_expr, *self._get_step_time_names(max_steps), @@ -106,7 +106,7 @@ def get_step_counts_query(self): def get_step_counts_without_aggregation_query(self): max_steps = self.context.max_steps - union_queries: List[ast.SelectQuery] = [] + union_queries: list[ast.SelectQuery] = [] entities_to_use = list(self.context.query.series) for i in range(max_steps): @@ -153,11 +153,11 @@ def get_step_counts_without_aggregation_query(self): return ast.SelectUnionQuery(select_queries=union_queries) - def _get_step_times(self, max_steps: int) -> List[ast.Expr]: + def _get_step_times(self, max_steps: int) -> list[ast.Expr]: windowInterval = self.context.funnelWindowInterval windowIntervalUnit = funnel_window_interval_unit_to_sql(self.context.funnelWindowIntervalUnit) - exprs: List[ast.Expr] = [] + exprs: list[ast.Expr] = [] conversion_times_elements = [] for i in range(max_steps): @@ -175,7 +175,7 @@ def _get_step_times(self, max_steps: int) -> List[ast.Expr]: return exprs - def get_sorting_condition(self, max_steps: int) -> List[ast.Expr]: + def get_sorting_condition(self, max_steps: int) -> list[ast.Expr]: windowInterval = self.context.funnelWindowInterval windowIntervalUnit = funnel_window_interval_unit_to_sql(self.context.funnelWindowIntervalUnit) @@ -187,7 +187,7 @@ def get_sorting_condition(self, max_steps: int) -> List[ast.Expr]: conditions.append(parse_expr(f"arraySort([{','.join(event_times_elements)}]) as event_times")) # replacement of latest_i for whatever query part requires it, just like conversion_times - basic_conditions: List[str] = [] + basic_conditions: list[str] = [] for i in range(1, max_steps): basic_conditions.append( f"if(latest_0 < latest_{i} AND latest_{i} <= toTimeZone(latest_0, 'UTC') + INTERVAL {windowInterval} {windowIntervalUnit}, 1, 0)" @@ -199,7 +199,7 @@ def get_sorting_condition(self, max_steps: int) -> List[ast.Expr]: else: return [ast.Alias(alias="steps", expr=ast.Constant(value=1))] - def _get_exclusion_condition(self) -> List[ast.Expr]: + def _get_exclusion_condition(self) -> list[ast.Expr]: funnelsFilter = self.context.funnelsFilter windowInterval = self.context.funnelWindowInterval windowIntervalUnit = funnel_window_interval_unit_to_sql(self.context.funnelWindowIntervalUnit) @@ -207,7 +207,7 @@ def _get_exclusion_condition(self) -> List[ast.Expr]: if not funnelsFilter.exclusions: return [] - conditions: List[ast.Expr] = [] + conditions: list[ast.Expr] = [] for exclusion_id, exclusion in enumerate(funnelsFilter.exclusions): from_time = f"latest_{exclusion.funnelFromStep}" @@ -233,9 +233,9 @@ def _serialize_step( step: ActionsNode | EventsNode | DataWarehouseNode, count: int, index: int, - people: Optional[List[uuid.UUID]] = None, + people: Optional[list[uuid.UUID]] = None, sampling_factor: Optional[float] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: if isinstance(step, DataWarehouseNode): raise NotImplementedError("Data Warehouse queries are not supported in funnels") diff --git a/posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py b/posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py index a378f044b5d56..ad1086bdc3324 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py +++ b/posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.hogql import ast from posthog.hogql.parser import parse_expr @@ -6,7 +6,7 @@ class FunnelUnorderedActors(FunnelUnordered): - def _get_funnel_person_step_events(self) -> List[ast.Expr]: + def _get_funnel_person_step_events(self) -> list[ast.Expr]: # Unordered funnels does not support matching events (and thereby recordings), # but it simplifies the logic if we return an empty array for matching events if ( @@ -19,9 +19,9 @@ def _get_funnel_person_step_events(self) -> List[ast.Expr]: def actor_query( self, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ) -> ast.SelectQuery: - select: List[ast.Expr] = [ + select: list[ast.Expr] = [ ast.Alias(alias="actor_id", expr=ast.Field(chain=["aggregation_target"])), *self._get_funnel_person_step_events(), *self._get_timestamp_outer_select(), diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index d2ec04e3e8489..3e1173b276091 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta from math import ceil -from typing import Optional, Any, Dict +from typing import Optional, Any from django.utils.timezone import datetime from posthog.caching.insights_api import ( @@ -37,7 +37,7 @@ class FunnelsQueryRunner(QueryRunner): def __init__( self, - query: FunnelsQuery | Dict[str, Any], + query: FunnelsQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr index 60adaa8480c7c..98cc4b42e0cab 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr @@ -88,7 +88,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_conversion_window_seconds.1 @@ -190,7 +191,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_events_with_person_on_events_v2 @@ -295,7 +297,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_precalculated_cohort_step_filter @@ -377,7 +380,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups @@ -481,7 +485,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.1 @@ -595,7 +600,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.2 @@ -709,7 +715,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.3 @@ -823,7 +830,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_funnel_with_static_cohort_step_filter @@ -894,7 +902,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFOSSFunnel.test_timezones @@ -953,7 +962,8 @@ and isNull(max_steps))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen @@ -975,7 +985,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1 @@ -1061,7 +1072,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -1083,7 +1095,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1 @@ -1176,7 +1189,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -1197,7 +1211,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot.1 @@ -1282,7 +1297,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -1304,7 +1320,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1 @@ -1429,7 +1446,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -1457,7 +1475,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1 @@ -1582,7 +1601,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_breakdown_group @@ -1611,7 +1631,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.1 @@ -1743,7 +1764,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.2 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr index a50033fda2f7f..a117a7dee57fa 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr @@ -17,7 +17,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_current_url.1 @@ -102,7 +103,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_pathname @@ -123,7 +125,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_pathname.1 @@ -208,6 +211,7 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr index 41b0c76b35d61..1342a4a8849de 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr @@ -145,7 +145,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties @@ -303,7 +304,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.1 @@ -432,7 +434,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.2 @@ -446,7 +449,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.3 @@ -575,7 +579,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.4 @@ -589,7 +594,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.5 @@ -718,7 +724,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.6 @@ -732,7 +739,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.7 @@ -861,7 +869,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.8 @@ -875,7 +884,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_event_properties_and_groups @@ -1009,7 +1019,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_event_properties_and_groups_materialized @@ -1143,7 +1154,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups @@ -1271,7 +1283,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.1 @@ -1382,7 +1395,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.2 @@ -1493,7 +1507,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.3 @@ -1604,7 +1619,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.4 @@ -1715,7 +1731,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.5 @@ -1859,7 +1876,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.6 @@ -1970,7 +1988,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.7 @@ -2081,7 +2100,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2 @@ -2209,7 +2229,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.1 @@ -2320,7 +2341,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.2 @@ -2431,7 +2453,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.3 @@ -2542,7 +2565,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.4 @@ -2653,7 +2677,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.5 @@ -2797,7 +2822,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.6 @@ -2908,7 +2934,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.7 @@ -3019,7 +3046,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups @@ -3162,7 +3190,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.1 @@ -3280,7 +3309,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.2 @@ -3398,7 +3428,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.3 @@ -3516,7 +3547,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.4 @@ -3634,7 +3666,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.5 @@ -3777,7 +3810,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized @@ -3920,7 +3954,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.1 @@ -4038,7 +4073,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.2 @@ -4156,7 +4192,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.3 @@ -4274,7 +4311,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.4 @@ -4392,7 +4430,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.5 @@ -4535,7 +4574,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events @@ -4678,7 +4718,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.1 @@ -4796,7 +4837,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.2 @@ -4914,7 +4956,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.3 @@ -5032,7 +5075,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.4 @@ -5150,7 +5194,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.5 @@ -5293,7 +5338,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized @@ -5436,7 +5482,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.1 @@ -5554,7 +5601,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.2 @@ -5672,7 +5720,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.3 @@ -5790,7 +5839,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.4 @@ -5908,7 +5958,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.5 @@ -6051,7 +6102,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2 @@ -6194,7 +6246,8 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.1 @@ -6312,7 +6365,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.2 @@ -6430,7 +6484,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.3 @@ -6548,7 +6603,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.4 @@ -6666,7 +6722,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.5 @@ -6809,6 +6866,7 @@ ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr index cdfd93b09cb4e..0f5341515f9bd 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr @@ -122,7 +122,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.1 @@ -136,7 +137,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.2 @@ -328,7 +330,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.3 @@ -342,7 +345,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_funnel_correlation_on_properties_with_recordings @@ -471,7 +475,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_funnel_correlation_on_properties_with_recordings.1 @@ -485,7 +490,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings @@ -614,7 +620,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.1 @@ -628,7 +635,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.2 @@ -757,7 +765,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.3 @@ -771,6 +780,7 @@ WHERE ifNull(in(session_replay_events.session_id, ['s3']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr index 0218c64c21632..57f8c9bd743a7 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr @@ -164,7 +164,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.1 @@ -178,7 +179,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.2 @@ -346,7 +348,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.3 @@ -360,7 +363,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.4 @@ -528,7 +532,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.5 @@ -542,6 +547,7 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index 927a924d8a884..d5456643eb4de 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -17,7 +17,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1 @@ -101,7 +102,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -122,7 +124,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1 @@ -213,7 +216,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -234,7 +238,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot.1 @@ -318,7 +323,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -340,7 +346,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1 @@ -465,7 +472,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -493,7 +501,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1 @@ -618,7 +627,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group @@ -647,7 +657,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 @@ -779,7 +790,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr index 43eee21ac83ce..4231f0adefb53 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr @@ -124,7 +124,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.1 @@ -138,7 +139,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.2 @@ -266,7 +268,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.3 @@ -280,7 +283,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.4 @@ -408,7 +412,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.5 @@ -422,6 +427,7 @@ WHERE ifNull(in(session_replay_events.session_id, ['s2']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr index 2463fd084116f..f51e48e85045b 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr @@ -411,7 +411,8 @@ ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTimeToConvert.test_basic_strict @@ -753,7 +754,8 @@ ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTimeToConvert.test_basic_unordered @@ -1495,6 +1497,7 @@ ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr index c86902d5df182..900924d3624c7 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr @@ -83,7 +83,8 @@ ORDER BY fill.entrance_period_start ASC LIMIT 1000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrends.test_timezones_trends.1 @@ -170,7 +171,8 @@ ORDER BY fill.entrance_period_start ASC LIMIT 1000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrends.test_week_interval @@ -257,7 +259,8 @@ ORDER BY fill.entrance_period_start ASC LIMIT 1000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrends.test_week_interval.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr index 2c6daed30808b..b60cee65595a0 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr @@ -150,7 +150,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_returns_recordings.1 @@ -164,7 +165,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1b']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off @@ -318,7 +320,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off.1 @@ -332,7 +335,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1a']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step @@ -486,7 +490,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step.1 @@ -500,6 +505,7 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1c']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr index 52269964088e1..07bbd70658a90 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr @@ -17,7 +17,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1 @@ -153,7 +154,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -174,7 +176,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1 @@ -324,7 +327,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -345,7 +349,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot.1 @@ -481,7 +486,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -503,7 +509,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1 @@ -628,7 +635,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -656,7 +664,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1 @@ -781,7 +790,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group @@ -810,7 +820,8 @@ LIMIT 26 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.1 @@ -942,7 +953,8 @@ GROUP BY prop LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.10 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr index 7f607b21570bc..2871145ab59e6 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr @@ -268,7 +268,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestFunnelUnorderedStepsPersons.test_unordered_funnel_does_not_return_recordings.1 @@ -282,6 +283,7 @@ WHERE ifNull(in(session_replay_events.session_id, []), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py b/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py index db5e882963e9f..2b1b08f444553 100644 --- a/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py +++ b/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py @@ -2,7 +2,8 @@ from datetime import datetime from string import ascii_lowercase -from typing import Any, Callable, Dict, List, Literal, Optional, Union, cast +from typing import Any, Literal, Optional, Union, cast +from collections.abc import Callable from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner @@ -30,7 +31,7 @@ class FunnelStepResult: name: str count: int - breakdown: Union[List[str], str] + breakdown: Union[list[str], str] average_conversion_time: Optional[float] = None median_conversion_time: Optional[float] = None type: Literal["events", "actions"] = "events" @@ -51,8 +52,8 @@ def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): return [val["id"] for val in serialized_result] - def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]): - def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: + def _assert_funnel_breakdown_result_is_correct(self, result, steps: list[FunnelStepResult]): + def funnel_result(step: FunnelStepResult, order: int) -> dict[str, Any]: return { "action_id": step.name if step.type == "events" else step.action_id, "name": step.name, @@ -2695,8 +2696,8 @@ def _create_groups(self): properties={"industry": "random"}, ) - def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]): - def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: + def _assert_funnel_breakdown_result_is_correct(self, result, steps: list[FunnelStepResult]): + def funnel_result(step: FunnelStepResult, order: int) -> dict[str, Any]: return { "action_id": step.name if step.type == "events" else step.action_id, "name": step.name, @@ -3067,11 +3068,11 @@ def test_funnel_aggregate_by_groups_breakdown_group_person_on_events(self): return TestFunnelBreakdownGroup -def sort_breakdown_funnel_results(results: List[Dict[int, Any]]): +def sort_breakdown_funnel_results(results: list[dict[int, Any]]): return sorted(results, key=lambda r: r[0]["breakdown_value"]) -def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str, Any]]): +def assert_funnel_results_equal(left: list[dict[str, Any]], right: list[dict[str, Any]]): """ Helper to be able to compare two funnel results, but exclude people urls from the comparison, as these include: @@ -3081,7 +3082,7 @@ def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str 2. contain timestamps which are not stable across runs """ - def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _filter(steps: list[dict[str, Any]]) -> list[dict[str, Any]]: return [{**step, "converted_people_url": None, "dropped_people_url": None} for step in steps] assert len(left) == len(right) diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py index 859f3e627aab7..aef262ba22edb 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, cast, Optional +from typing import cast, Optional from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query @@ -116,7 +116,7 @@ def setUp(self): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + def _run(self, extra: Optional[dict] = None, events_extra: Optional[dict] = None): if events_extra is None: events_extra = {} if extra is None: diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py index f69eb3c6977b6..4db744a6d9280 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, cast +from typing import Any, cast import unittest from rest_framework.exceptions import ValidationError @@ -77,7 +77,7 @@ def _get_events_for_filters( result, skewed_totals, _, _ = FunnelCorrelationQueryRunner(query=correlation_query, team=self.team)._calculate() return result, skewed_totals - def _get_actors_for_event(self, filters: Dict[str, Any], event_name: str, properties=None, success=True): + def _get_actors_for_event(self, filters: dict[str, Any], event_name: str, properties=None, success=True): serialized_actors = get_actors( filters, self.team, @@ -87,7 +87,7 @@ def _get_actors_for_event(self, filters: Dict[str, Any], event_name: str, proper return [str(row[0]) for row in serialized_actors] def _get_actors_for_property( - self, filters: Dict[str, Any], property_values: list, success=True, funnelCorrelationNames=None + self, filters: dict[str, Any], property_values: list, success=True, funnelCorrelationNames=None ): funnelCorrelationPropertyValues = [ ( diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlations_persons.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlations_persons.py index f324dcfcf7c3a..223b24a949b3e 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlations_persons.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlations_persons.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast from datetime import datetime, timedelta from uuid import UUID @@ -37,7 +37,7 @@ def get_actors( - filters: Dict[str, Any], + filters: dict[str, Any], team: Team, funnelCorrelationType: Optional[FunnelCorrelationResultsType] = FunnelCorrelationResultsType.events, funnelCorrelationNames=None, diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py index dec7bdd933b3e..37d9b853404b7 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import Dict, List, Optional, cast, Any +from typing import Optional, cast, Any from uuid import UUID from django.utils import timezone @@ -32,11 +32,11 @@ def get_actors( - filters: Dict[str, Any], + filters: dict[str, Any], team: Team, funnelStep: Optional[int] = None, - funnelCustomSteps: Optional[List[int]] = None, - funnelStepBreakdown: Optional[str | float | List[str | float]] = None, + funnelCustomSteps: Optional[list[int]] = None, + funnelStepBreakdown: Optional[str | float | list[str | float]] = None, funnelTrendsDropOff: Optional[bool] = None, funnelTrendsEntrancePeriodStart: Optional[str] = None, offset: Optional[int] = None, diff --git a/posthog/hogql_queries/insights/funnels/utils.py b/posthog/hogql_queries/insights/funnels/utils.py index 95374f179e1af..7aea066883eda 100644 --- a/posthog/hogql_queries/insights/funnels/utils.py +++ b/posthog/hogql_queries/insights/funnels/utils.py @@ -1,4 +1,3 @@ -from typing import List from posthog.constants import FUNNEL_WINDOW_INTERVAL_TYPES from posthog.hogql import ast from posthog.hogql.parser import parse_expr @@ -61,7 +60,7 @@ def funnel_window_interval_unit_to_sql( def get_breakdown_expr( - breakdowns: List[str | int] | str | int, properties_column: str, normalize_url: bool | None = False + breakdowns: list[str | int] | str | int, properties_column: str, normalize_url: bool | None = False ) -> ast.Expr: if isinstance(breakdowns, str) or isinstance(breakdowns, int) or breakdowns is None: return ast.Call( diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py index 42b35d6b4df51..5e11dcdcae0ec 100644 --- a/posthog/hogql_queries/insights/lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta from math import ceil -from typing import Optional, List +from typing import Optional from django.utils.timezone import datetime from posthog.caching.insights_api import ( @@ -225,7 +225,7 @@ def query_date_range(self): @cached_property def event_filter(self) -> ast.Expr: - event_filters: List[ast.Expr] = [] + event_filters: list[ast.Expr] = [] with self.timings.measure("date_range"): event_filters.append( parse_expr( diff --git a/posthog/hogql_queries/insights/paths_query_runner.py b/posthog/hogql_queries/insights/paths_query_runner.py index ca7890735f814..8c2bc84d821ad 100644 --- a/posthog/hogql_queries/insights/paths_query_runner.py +++ b/posthog/hogql_queries/insights/paths_query_runner.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta from math import ceil from re import escape -from typing import Any, Dict, Literal, cast +from typing import Any, Literal, cast from typing import Optional from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL @@ -47,7 +47,7 @@ class PathsQueryRunner(QueryRunner): def __init__( self, - query: PathsQuery | Dict[str, Any], + query: PathsQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, diff --git a/posthog/hogql_queries/insights/retention_query_runner.py b/posthog/hogql_queries/insights/retention_query_runner.py index ac15ded6728b1..f79af288ca665 100644 --- a/posthog/hogql_queries/insights/retention_query_runner.py +++ b/posthog/hogql_queries/insights/retention_query_runner.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from math import ceil -from typing import Any, Dict +from typing import Any from typing import Optional from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL @@ -39,7 +39,7 @@ class RetentionQueryRunner(QueryRunner): def __init__( self, - query: RetentionQuery | Dict[str, Any], + query: RetentionQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, diff --git a/posthog/hogql_queries/insights/stickiness_query_runner.py b/posthog/hogql_queries/insights/stickiness_query_runner.py index d9096f05853b6..24bb2504de6f2 100644 --- a/posthog/hogql_queries/insights/stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/stickiness_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta from math import ceil -from typing import List, Optional, Any, Dict, cast +from typing import Optional, Any, cast from django.utils.timezone import datetime from posthog.caching.insights_api import ( @@ -47,11 +47,11 @@ def __init__( class StickinessQueryRunner(QueryRunner): query: StickinessQuery query_type = StickinessQuery - series: List[SeriesWithExtras] + series: list[SeriesWithExtras] def __init__( self, - query: StickinessQuery | Dict[str, Any], + query: StickinessQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, @@ -134,7 +134,7 @@ def _events_query(self, series_with_extra: SeriesWithExtras) -> ast.SelectQuery: def to_query(self) -> ast.SelectUnionQuery: return ast.SelectUnionQuery(select_queries=self.to_queries()) - def to_queries(self) -> List[ast.SelectQuery]: + def to_queries(self) -> list[ast.SelectQuery]: queries = [] for series in self.series: @@ -174,7 +174,7 @@ def to_queries(self) -> List[ast.SelectQuery]: return queries def to_actors_query(self, interval_num: Optional[int] = None) -> ast.SelectQuery | ast.SelectUnionQuery: - queries: List[ast.SelectQuery] = [] + queries: list[ast.SelectQuery] = [] for series in self.series: events_query = self._events_query(series) @@ -253,7 +253,7 @@ def calculate(self): def where_clause(self, series_with_extra: SeriesWithExtras) -> ast.Expr: date_range = self.date_range(series_with_extra) series = series_with_extra.series - filters: List[ast.Expr] = [] + filters: list[ast.Expr] = [] # Dates filters.extend( @@ -344,7 +344,7 @@ def intervals_num(self): else: return delta.days - def setup_series(self) -> List[SeriesWithExtras]: + def setup_series(self) -> list[SeriesWithExtras]: series_with_extras = [ SeriesWithExtras( series, diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr index 52d5bd0fa78a0..039e6cc532388 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr @@ -91,7 +91,8 @@ GROUP BY status LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestLifecycleQueryRunner.test_sampling @@ -163,7 +164,8 @@ GROUP BY status LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestLifecycleQueryRunner.test_timezones @@ -235,7 +237,8 @@ GROUP BY status LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestLifecycleQueryRunner.test_timezones.1 @@ -307,6 +310,7 @@ GROUP BY status LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr index 194b925d5e63d..df39dbb3222fa 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr @@ -75,7 +75,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_end.1 @@ -154,7 +155,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_end_materialized @@ -233,7 +235,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_end_materialized.1 @@ -312,7 +315,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_exclusion_filters_with_wildcard_groups @@ -390,7 +394,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_exclusion_filters_with_wildcard_groups.1 @@ -468,7 +473,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_inclusion_exclusion_filters @@ -546,7 +552,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_inclusion_exclusion_filters.1 @@ -624,7 +631,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_inclusion_exclusion_filters.2 @@ -702,7 +710,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_inclusion_exclusion_filters.3 @@ -780,7 +789,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_event_ordering @@ -858,7 +868,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_groups_filtering_person_on_events @@ -944,7 +955,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_groups_filtering_person_on_events.1 @@ -1023,7 +1035,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_groups_filtering_person_on_events.2 @@ -1102,7 +1115,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs @@ -1218,7 +1232,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.1 @@ -1333,7 +1348,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.2 @@ -1448,7 +1464,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.3 @@ -1564,7 +1581,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.4 @@ -1679,7 +1697,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.5 @@ -1794,7 +1813,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.6 @@ -1910,7 +1930,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.7 @@ -2025,7 +2046,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_dropoffs.8 @@ -2140,7 +2162,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_person_on_events_v2 @@ -2231,7 +2254,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording @@ -2350,7 +2374,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording.1 @@ -2364,7 +2389,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s3', 's1', 's5']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_for_dropoff @@ -2484,7 +2510,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_for_dropoff.1 @@ -2498,7 +2525,8 @@ WHERE ifNull(in(session_replay_events.session_id, []), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_for_dropoff.2 @@ -2618,7 +2646,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_for_dropoff.3 @@ -2632,7 +2661,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_with_no_window_or_session_id @@ -2751,7 +2781,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_with_no_window_or_session_id.1 @@ -2765,7 +2796,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_with_start_and_end @@ -2893,7 +2925,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_recording_with_start_and_end.1 @@ -2907,7 +2940,8 @@ WHERE ifNull(in(session_replay_events.session_id, ['s1']), 0) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_respect_session_limits @@ -2985,7 +3019,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end @@ -3068,7 +3103,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end.1 @@ -3192,7 +3228,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end.2 @@ -3275,7 +3312,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end.3 @@ -3399,7 +3437,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end_materialized @@ -3482,7 +3521,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end_materialized.1 @@ -3606,7 +3646,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end_materialized.2 @@ -3689,7 +3730,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_and_end_materialized.3 @@ -3813,7 +3855,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_start_dropping_orphaned_edges @@ -3892,7 +3935,8 @@ target_event ASC LIMIT 6 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_conversion_times @@ -3970,7 +4014,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit @@ -4048,7 +4093,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.1 @@ -4163,7 +4209,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.2 @@ -4278,7 +4325,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.3 @@ -4356,7 +4404,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.4 @@ -4471,7 +4520,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.5 @@ -4549,7 +4599,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.6 @@ -4664,7 +4715,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.7 @@ -4779,7 +4831,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_step_limit.8 @@ -4894,7 +4947,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_wildcard_groups_across_people @@ -4972,7 +5026,8 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhousePaths.test_wildcard_groups_evil_input @@ -5055,6 +5110,7 @@ target_event ASC LIMIT 50 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr index f961e10d59a03..b343a2bf5e619 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr @@ -42,7 +42,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseRetentionGroupAggregation.test_groups_aggregating.1 @@ -104,7 +105,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseRetentionGroupAggregation.test_groups_aggregating.2 @@ -150,7 +152,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseRetentionGroupAggregation.test_groups_aggregating_person_on_events @@ -196,7 +199,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseRetentionGroupAggregation.test_groups_aggregating_person_on_events.1 @@ -258,7 +262,8 @@ LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestClickhouseRetentionGroupAggregation.test_groups_aggregating_person_on_events.2 @@ -304,7 +309,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_day_interval_sampled @@ -371,7 +377,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_month_interval_with_person_on_events_v2 @@ -449,7 +456,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_retention_event_action @@ -516,7 +524,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_retention_with_user_properties_via_action @@ -605,7 +614,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_timezones @@ -672,7 +682,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_timezones.1 @@ -739,7 +750,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_week_interval @@ -806,7 +818,8 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestRetention.test_week_interval.1 @@ -873,6 +886,7 @@ intervals_from_base ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py index bb963cf1f8b62..830ecc3982b6c 100644 --- a/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_insight_actors_query_runner.py @@ -1,4 +1,4 @@ -from typing import Dict, Any, Optional +from typing import Any, Optional from freezegun import freeze_time @@ -69,7 +69,7 @@ def _create_test_events(self): ] ) - def select(self, query: str, placeholders: Optional[Dict[str, Any]] = None): + def select(self, query: str, placeholders: Optional[dict[str, Any]] = None): if placeholders is None: placeholders = {} return execute_hogql_query( diff --git a/posthog/hogql_queries/insights/test/test_paths_query_runner.py b/posthog/hogql_queries/insights/test/test_paths_query_runner.py index b74102ba70510..0b82f33ca7e52 100644 --- a/posthog/hogql_queries/insights/test/test_paths_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_paths_query_runner.py @@ -1,5 +1,4 @@ import dataclasses -from typing import Dict from dateutil.relativedelta import relativedelta from django.utils.timezone import now @@ -25,7 +24,7 @@ class MockEvent: distinct_id: str team: Team timestamp: str - properties: Dict + properties: dict class TestPaths(ClickhouseTestMixin, APIBaseTest): diff --git a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py index 6e25827e6ecba..e61f4160276ab 100644 --- a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Dict, List, Optional, Union +from typing import Optional, Union from unittest.mock import MagicMock, patch from django.test import override_settings @@ -41,18 +41,18 @@ @dataclass class Series: event: str - timestamps: List[str] + timestamps: list[str] @dataclass class SeriesTestData: distinct_id: str - events: List[Series] - properties: Dict[str, str | int] + events: list[Series] + properties: dict[str, str | int] StickinessProperties = Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -74,9 +74,9 @@ class TestStickinessQueryRunner(APIBaseTest): default_date_from = "2020-01-11" default_date_to = "2020-01-20" - def _create_events(self, data: List[SeriesTestData]): + def _create_events(self, data: list[SeriesTestData]): person_result = [] - properties_to_create: Dict[str, str] = {} + properties_to_create: dict[str, str] = {} for person in data: first_timestamp = person.events[0].timestamps[0] @@ -194,7 +194,7 @@ def _create_test_events(self): def _run_query( self, - series: Optional[List[EventsNode | ActionsNode]] = None, + series: Optional[list[EventsNode | ActionsNode]] = None, date_from: Optional[str] = None, date_to: Optional[str] = None, interval: Optional[IntervalType] = None, @@ -203,7 +203,7 @@ def _run_query( filter_test_accounts: Optional[bool] = False, limit_context: Optional[LimitContext] = None, ): - query_series: List[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series + query_series: list[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series query_date_from = date_from or self.default_date_from query_date_to = None if date_to == "now" else date_to or self.default_date_to query_interval = interval or IntervalType.day @@ -223,8 +223,8 @@ def test_stickiness_runs(self): response = self._run_query() assert isinstance(response, StickinessQueryResponse) - assert isinstance(response.results, List) - assert isinstance(response.results[0], Dict) + assert isinstance(response.results, list) + assert isinstance(response.results[0], dict) @override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=True) def test_stickiness_runs_with_poe(self): @@ -232,8 +232,8 @@ def test_stickiness_runs_with_poe(self): response = self._run_query() assert isinstance(response, StickinessQueryResponse) - assert isinstance(response.results, List) - assert isinstance(response.results[0], Dict) + assert isinstance(response.results, list) + assert isinstance(response.results[0], dict) def test_days(self): self._create_test_events() @@ -423,7 +423,7 @@ def test_property_filtering_hogql(self): def test_event_filtering(self): self._create_test_events() - series: List[EventsNode | ActionsNode] = [ + series: list[EventsNode | ActionsNode] = [ EventsNode( event="$pageview", properties=[EventPropertyFilter(key="$browser", operator=PropertyOperator.exact, value="Chrome")], @@ -450,7 +450,7 @@ def test_event_filtering(self): def test_any_event(self): self._create_test_events() - series: List[EventsNode | ActionsNode] = [ + series: list[EventsNode | ActionsNode] = [ EventsNode( event=None, ) @@ -484,7 +484,7 @@ def test_actions(self): properties=[{"key": "$browser", "type": "event", "value": "Chrome", "operator": "exact"}], ) - series: List[EventsNode | ActionsNode] = [ActionsNode(id=action.pk)] + series: list[EventsNode | ActionsNode] = [ActionsNode(id=action.pk)] response = self._run_query(series=series) @@ -541,7 +541,7 @@ def test_group_aggregations(self): self._create_test_groups() self._create_test_events() - series: List[EventsNode | ActionsNode] = [ + series: list[EventsNode | ActionsNode] = [ EventsNode(event="$pageview", math="unique_group", math_group_type_index=MathGroupTypeIndex.number_0) ] @@ -565,7 +565,7 @@ def test_group_aggregations(self): def test_hogql_aggregations(self): self._create_test_events() - series: List[EventsNode | ActionsNode] = [ + series: list[EventsNode | ActionsNode] = [ EventsNode(event="$pageview", math="hogql", math_hogql="e.properties.prop") ] diff --git a/posthog/hogql_queries/insights/trends/aggregation_operations.py b/posthog/hogql_queries/insights/trends/aggregation_operations.py index 1c356277548d0..2e716b2b1caea 100644 --- a/posthog/hogql_queries/insights/trends/aggregation_operations.py +++ b/posthog/hogql_queries/insights/trends/aggregation_operations.py @@ -1,4 +1,4 @@ -from typing import List, Optional, cast, Union +from typing import Optional, cast, Union from posthog.constants import NON_TIME_SERIES_DISPLAY_TYPES from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_select @@ -13,8 +13,8 @@ class QueryAlternator: """Allows query_builder to modify the query without having to expost the whole AST interface""" _query: ast.SelectQuery - _selects: List[ast.Expr] - _group_bys: List[ast.Expr] + _selects: list[ast.Expr] + _group_bys: list[ast.Expr] _select_from: ast.JoinExpr | None def __init__(self, query: ast.SelectQuery | ast.SelectUnionQuery): @@ -143,7 +143,7 @@ def is_count_per_actor_variant(self): "p99_count_per_actor", ] - def _math_func(self, method: str, override_chain: Optional[List[str | int]]) -> ast.Call: + def _math_func(self, method: str, override_chain: Optional[list[str | int]]) -> ast.Call: if override_chain is not None: return ast.Call(name=method, args=[ast.Field(chain=override_chain)]) @@ -167,7 +167,7 @@ def _math_func(self, method: str, override_chain: Optional[List[str | int]]) -> return ast.Call(name=method, args=[ast.Field(chain=chain)]) - def _math_quantile(self, percentile: float, override_chain: Optional[List[str | int]]) -> ast.Call: + def _math_quantile(self, percentile: float, override_chain: Optional[list[str | int]]) -> ast.Call: if self.series.math_property == "$session_duration": chain = ["session_duration"] else: diff --git a/posthog/hogql_queries/insights/trends/breakdown.py b/posthog/hogql_queries/insights/trends/breakdown.py index d7d1ad9518a39..025d181bf81ef 100644 --- a/posthog/hogql_queries/insights/trends/breakdown.py +++ b/posthog/hogql_queries/insights/trends/breakdown.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Optional, Union, cast from posthog.hogql import ast from posthog.hogql.constants import LimitContext from posthog.hogql.parser import parse_expr @@ -30,7 +30,7 @@ class Breakdown: timings: HogQLTimings modifiers: HogQLQueryModifiers events_filter: ast.Expr - breakdown_values_override: Optional[List[str]] + breakdown_values_override: Optional[list[str]] limit_context: LimitContext def __init__( @@ -42,7 +42,7 @@ def __init__( timings: HogQLTimings, modifiers: HogQLQueryModifiers, events_filter: ast.Expr, - breakdown_values_override: Optional[List[str]] = None, + breakdown_values_override: Optional[list[str]] = None, limit_context: LimitContext = LimitContext.QUERY, ): self.team = team @@ -71,7 +71,7 @@ def is_session_type(self) -> bool: def is_histogram_breakdown(self) -> bool: return self.enabled and self.query.breakdownFilter.breakdown_histogram_bin_count is not None - def placeholders(self) -> Dict[str, ast.Expr]: + def placeholders(self) -> dict[str, ast.Expr]: values = self._breakdown_buckets_ast if self.is_histogram_breakdown else self._breakdown_values_ast return {"cross_join_breakdown_values": ast.Alias(alias="breakdown_value", expr=values)} @@ -106,7 +106,7 @@ def events_where_filter(self) -> ast.Expr | None: if self.query.breakdownFilter.breakdown == "all": return None - if isinstance(self.query.breakdownFilter.breakdown, List): + if isinstance(self.query.breakdownFilter.breakdown, list): or_clause = ast.Or( exprs=[ ast.CompareOperation( @@ -131,7 +131,12 @@ def events_where_filter(self) -> ast.Expr | None: ) # No need to filter if we're showing the "other" bucket, as we need to look at all events anyway. - if self.query.breakdownFilter is not None and not self.query.breakdownFilter.breakdown_hide_other_aggregation: + # Except when explicitly filtering + if ( + self.query.breakdownFilter is not None + and not self.query.breakdownFilter.breakdown_hide_other_aggregation + and len(self.breakdown_values_override or []) == 0 + ): return ast.Constant(value=True) if ( @@ -221,10 +226,10 @@ def _breakdown_values_ast(self) -> ast.Array: return ast.Array(exprs=exprs) @cached_property - def _all_breakdown_values(self) -> List[str | int | None]: + def _all_breakdown_values(self) -> list[str | int | None]: # Used in the actors query if self.breakdown_values_override is not None: - return cast(List[str | int | None], self.breakdown_values_override) + return cast(list[str | int | None], self.breakdown_values_override) if self.query.breakdownFilter is None: return [] @@ -240,18 +245,18 @@ def _all_breakdown_values(self) -> List[str | int | None]: modifiers=self.modifiers, limit_context=self.limit_context, ) - return cast(List[str | int | None], breakdown.get_breakdown_values()) + return cast(list[str | int | None], breakdown.get_breakdown_values()) @cached_property - def _breakdown_values(self) -> List[str | int]: + def _breakdown_values(self) -> list[str | int]: values = [BREAKDOWN_NULL_STRING_LABEL if v is None else v for v in self._all_breakdown_values] - return cast(List[str | int], values) + return cast(list[str | int], values) @cached_property def has_breakdown_values(self) -> bool: return len(self._breakdown_values) > 0 - def _get_breakdown_histogram_buckets(self) -> List[Tuple[float, float]]: + def _get_breakdown_histogram_buckets(self) -> list[tuple[float, float]]: buckets = [] values = self._breakdown_values @@ -270,7 +275,7 @@ def _get_breakdown_histogram_buckets(self) -> List[Tuple[float, float]]: return buckets def _get_breakdown_histogram_multi_if(self) -> ast.Expr: - multi_if_exprs: List[ast.Expr] = [] + multi_if_exprs: list[ast.Expr] = [] buckets = self._get_breakdown_histogram_buckets() diff --git a/posthog/hogql_queries/insights/trends/breakdown_values.py b/posthog/hogql_queries/insights/trends/breakdown_values.py index 6a9b9a24a22f0..b15897b360fde 100644 --- a/posthog/hogql_queries/insights/trends/breakdown_values.py +++ b/posthog/hogql_queries/insights/trends/breakdown_values.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Union, Any +from typing import Optional, Union, Any from posthog.hogql import ast from posthog.hogql.constants import LimitContext, get_breakdown_limit_for_context, BREAKDOWN_VALUES_LIMIT_FOR_COUNTRIES from posthog.hogql.parser import parse_expr, parse_select @@ -30,7 +30,7 @@ class BreakdownValues: team: Team series: Union[EventsNode, ActionsNode, DataWarehouseNode] - breakdown_field: Union[str, float, List[Union[str, float]]] + breakdown_field: Union[str, float, list[Union[str, float]]] breakdown_type: BreakdownType events_filter: ast.Expr chart_display_type: ChartDisplayType @@ -76,12 +76,12 @@ def __init__( self.query_date_range = query_date_range self.modifiers = modifiers - def get_breakdown_values(self) -> List[str | int]: + def get_breakdown_values(self) -> list[str | int]: if self.breakdown_type == "cohort": if self.breakdown_field == "all": return [0] - if isinstance(self.breakdown_field, List): + if isinstance(self.breakdown_field, list): return [value if isinstance(value, str) else int(value) for value in self.breakdown_field] return [self.breakdown_field if isinstance(self.breakdown_field, str) else int(self.breakdown_field)] @@ -186,7 +186,7 @@ def get_breakdown_values(self) -> List[str | int]: ): inner_events_query.order_by[0].order = "ASC" - values: List[Any] + values: list[Any] if self.histogram_bin_count is not None: query = parse_select( """ diff --git a/posthog/hogql_queries/insights/trends/display.py b/posthog/hogql_queries/insights/trends/display.py index 8747544f7241c..75567250e4cf5 100644 --- a/posthog/hogql_queries/insights/trends/display.py +++ b/posthog/hogql_queries/insights/trends/display.py @@ -12,6 +12,7 @@ def __init__(self, display_type: ChartDisplayType | None) -> None: else: self.display_type = ChartDisplayType.ActionsAreaGraph + # No time range def should_aggregate_values(self) -> bool: return ( self.display_type == ChartDisplayType.BoldNumber diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 5885c57710928..0e8375a79b058 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -93,7 +93,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_action_filtering_with_cohort_poe_v2 @@ -178,7 +179,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events @@ -199,7 +201,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events.1 @@ -248,7 +251,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events.2 @@ -300,7 +304,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_by_group_props_with_person_filter_person_on_events.1 @@ -349,7 +354,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format @@ -362,7 +368,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.1 @@ -403,7 +410,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.2 @@ -416,7 +424,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.3 @@ -457,7 +466,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated @@ -477,7 +487,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated.1 @@ -515,7 +526,8 @@ GROUP BY breakdown_value LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated_materialized @@ -535,7 +547,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated_materialized.1 @@ -573,7 +586,8 @@ GROUP BY breakdown_value LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_daily_based_on_action @@ -627,7 +641,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_daily_based_on_action.3 @@ -705,7 +720,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events @@ -726,7 +742,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events.1 @@ -775,7 +792,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events_v2 @@ -810,7 +828,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events_v2.2 @@ -865,7 +884,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling @@ -878,7 +898,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 @@ -926,7 +947,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 @@ -939,7 +961,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 @@ -987,7 +1010,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_filter_events_by_precalculated_cohort @@ -1071,7 +1095,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_filter_events_by_precalculated_cohort_poe_v2 @@ -1137,7 +1162,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_filtering_by_multiple_groups_person_on_events @@ -1179,7 +1205,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_filtering_by_multiple_groups_person_on_events.1 @@ -1252,7 +1279,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter @@ -1284,7 +1312,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter.1 @@ -1360,7 +1389,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2 @@ -1379,7 +1409,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2.1 @@ -1442,7 +1473,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_non_deterministic_timezones @@ -1468,7 +1500,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action @@ -1511,7 +1544,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action.3 @@ -1562,7 +1596,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 @@ -1604,7 +1639,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.3 @@ -1654,7 +1690,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering @@ -1698,7 +1735,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property @@ -1742,7 +1780,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property.1 @@ -1768,7 +1807,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized @@ -1812,7 +1852,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized.1 @@ -1838,7 +1879,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_person_property_filtering_materialized @@ -1882,7 +1924,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2 @@ -1922,7 +1965,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2.2 @@ -1954,7 +1998,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override @@ -2000,7 +2045,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.2 @@ -2046,7 +2092,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.4 @@ -2092,7 +2139,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily @@ -2118,7 +2166,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily.1 @@ -2151,7 +2200,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily.2 @@ -2197,7 +2247,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily.3 @@ -2223,7 +2274,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily.4 @@ -2236,7 +2288,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily.5 @@ -2284,7 +2337,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc @@ -2310,7 +2364,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.1 @@ -2343,7 +2398,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.2 @@ -2389,7 +2445,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.3 @@ -2415,7 +2472,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.4 @@ -2428,7 +2486,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.5 @@ -2476,7 +2535,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc @@ -2502,7 +2562,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.1 @@ -2535,7 +2596,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.2 @@ -2581,7 +2643,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.3 @@ -2607,7 +2670,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.4 @@ -2620,7 +2684,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.5 @@ -2668,7 +2733,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from @@ -2701,7 +2767,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from.1 @@ -2727,7 +2794,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_minus_utc @@ -2760,7 +2828,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_minus_utc.1 @@ -2786,7 +2855,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_plus_utc @@ -2819,7 +2889,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_plus_utc.1 @@ -2845,7 +2916,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly @@ -2871,7 +2943,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly.1 @@ -2897,7 +2970,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly_minus_utc @@ -2923,7 +2997,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly_minus_utc.1 @@ -2949,7 +3024,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly_plus_utc @@ -2975,7 +3051,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_timezones_weekly_plus_utc.1 @@ -3001,7 +3078,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns @@ -3034,7 +3112,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.1 @@ -3095,7 +3174,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.2 @@ -3128,7 +3208,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.3 @@ -3189,7 +3270,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id @@ -3215,7 +3297,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.1 @@ -3259,7 +3342,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.2 @@ -3290,7 +3374,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.3 @@ -3349,7 +3434,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.4 @@ -3388,7 +3474,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.5 @@ -3427,7 +3514,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.6 @@ -3440,7 +3528,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.7 @@ -3481,7 +3570,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_any_event_total_count @@ -3507,7 +3597,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_any_event_total_count.1 @@ -3533,7 +3624,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative @@ -3546,7 +3638,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative.1 @@ -3600,7 +3693,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 @@ -3613,7 +3707,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 @@ -3666,7 +3761,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_normalize_url @@ -3681,7 +3777,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_normalize_url.1 @@ -3737,7 +3834,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_normalize_url_poe_v2 @@ -3752,7 +3850,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_normalize_url_poe_v2.1 @@ -3807,7 +3906,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown @@ -3819,7 +3919,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -3827,7 +3927,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown.1 @@ -3842,7 +3943,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true) @@ -3851,7 +3952,8 @@ GROUP BY breakdown_value LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown.2 @@ -3863,7 +3965,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -3871,7 +3973,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown.3 @@ -3886,7 +3989,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true) @@ -3895,7 +3998,8 @@ GROUP BY breakdown_value LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range @@ -3921,7 +4025,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.1 @@ -3947,7 +4052,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.2 @@ -3973,7 +4079,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated @@ -3995,7 +4102,8 @@ GROUP BY e__pdi.person_id)) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_poe_v2 @@ -4016,7 +4124,8 @@ GROUP BY ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id))) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_with_event_property_breakdown_with_sampling @@ -4029,7 +4138,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_with_event_property_breakdown_with_sampling.1 @@ -4056,7 +4166,8 @@ GROUP BY breakdown_value) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_daily @@ -4097,7 +4208,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_count_per_user_average_daily_poe_v2 @@ -4137,7 +4249,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_groups_per_day @@ -4163,7 +4276,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_groups_per_day_cumulative @@ -4194,7 +4308,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_per_day_cumulative @@ -4225,7 +4340,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_per_day_dau_cumulative @@ -4263,7 +4379,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown @@ -4275,7 +4392,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) INNER JOIN @@ -4301,7 +4418,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown.1 @@ -4316,7 +4434,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) INNER JOIN @@ -4343,7 +4461,8 @@ GROUP BY breakdown_value LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_hogql_math @@ -4369,7 +4488,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_single_aggregate_math @@ -4382,14 +4502,15 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_single_aggregate_math.1 @@ -4402,14 +4523,15 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math @@ -4435,7 +4557,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) @@ -4448,7 +4570,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math.1 @@ -4474,7 +4597,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) @@ -4487,7 +4610,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns @@ -4499,7 +4623,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -4507,7 +4631,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.1 @@ -4545,7 +4670,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true) @@ -4563,7 +4688,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.2 @@ -4575,7 +4701,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -4583,7 +4709,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.3 @@ -4621,7 +4748,7 @@ (SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`, sessions.session_id AS session_id FROM sessions - WHERE equals(sessions.team_id, 2) + WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY sessions.session_id, sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true) @@ -4639,7 +4766,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_narrower_than_week @@ -4671,7 +4799,8 @@ WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), 0)) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_wider_than_week @@ -4703,7 +4832,8 @@ WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-18 23:59:59', 6, 'UTC'))), 0)) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_wider_than_week_with_sampling @@ -4735,7 +4865,8 @@ WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-18 23:59:59', 6, 'UTC'))), 0)) LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_daily @@ -4781,7 +4912,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_daily_minus_utc @@ -4827,7 +4959,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_daily_plus_utc @@ -4873,7 +5006,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_filtering @@ -4930,7 +5064,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_filtering_materialized @@ -4987,7 +5122,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_hourly @@ -5033,7 +5169,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_weekly @@ -5079,7 +5216,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_weekly_minus_utc @@ -5125,7 +5263,8 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrends.test_weekly_active_users_weekly_plus_utc @@ -5171,6 +5310,7 @@ ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr index 1e05cbd1da44a..6371c71e79e97 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr @@ -9,7 +9,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_breakdown.1 @@ -50,7 +51,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_breakdown_with_property @@ -63,7 +65,8 @@ ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_breakdown_with_property.1 @@ -104,7 +107,8 @@ sum(count) DESC, breakdown_value ASC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_data_warehouse @@ -130,7 +134,8 @@ ORDER BY sum(count) DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_entity_property @@ -156,7 +161,8 @@ ORDER BY sum(count) DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestTrendsDataWarehouseQuery.test_trends_property @@ -182,6 +188,7 @@ ORDER BY sum(count) DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/insights/trends/test/test_trends.py b/posthog/hogql_queries/insights/trends/test/test_trends.py index 8ba4aea1b3459..f34229e99ded7 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends.py @@ -1,7 +1,7 @@ import json import uuid from datetime import datetime -from typing import Dict, List, Optional, Tuple, Union +from typing import Optional, Union from unittest.mock import patch from zoneinfo import ZoneInfo @@ -68,8 +68,8 @@ from posthog.test.test_journeys import journeys_for -def breakdown_label(entity: Entity, value: Union[str, int]) -> Dict[str, Optional[Union[str, int]]]: - ret_dict: Dict[str, Optional[Union[str, int]]] = {} +def breakdown_label(entity: Entity, value: Union[str, int]) -> dict[str, Optional[Union[str, int]]]: + ret_dict: dict[str, Optional[Union[str, int]]] = {} if not value or not isinstance(value, str) or "cohort_" not in value: label = value if (value or isinstance(value, bool)) and value != "None" and value != "nan" else "Other" ret_dict["label"] = f"{entity.name} - {label}" @@ -103,7 +103,7 @@ def _create_cohort(**kwargs): return cohort -def _props(dict: Dict): +def _props(dict: dict): props = dict.get("properties", None) if not props: return None @@ -125,11 +125,11 @@ def _props(dict: Dict): def convert_filter_to_trends_query(filter: Filter) -> TrendsQuery: filter_as_dict = filter.to_dict() - events: List[EventsNode] = [] - actions: List[ActionsNode] = [] + events: list[EventsNode] = [] + actions: list[ActionsNode] = [] for event in filter.events: - if isinstance(event._data.get("properties", None), List): + if isinstance(event._data.get("properties", None), list): properties = clean_entity_properties(event._data.get("properties", None)) elif event._data.get("properties", None) is not None: values = event._data.get("properties", None).get("values", None) @@ -151,7 +151,7 @@ def convert_filter_to_trends_query(filter: Filter) -> TrendsQuery: ) for action in filter.actions: - if isinstance(action._data.get("properties", None), List): + if isinstance(action._data.get("properties", None), list): properties = clean_entity_properties(action._data.get("properties", None)) elif action._data.get("properties", None) is not None: values = action._data.get("properties", None).get("values", None) @@ -172,7 +172,7 @@ def convert_filter_to_trends_query(filter: Filter) -> TrendsQuery: ) ) - series: List[Union[EventsNode, ActionsNode, DataWarehouseNode]] = [*events, *actions] + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = [*events, *actions] tq = TrendsQuery( series=series, @@ -304,7 +304,7 @@ def _create_group(self, **kwargs): type=PropertyDefinition.Type.GROUP, ) - def _create_events(self, use_time=False) -> Tuple[Action, Person]: + def _create_events(self, use_time=False) -> tuple[Action, Person]: person = self._create_person( team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], @@ -2080,7 +2080,7 @@ def test_trends_compare_hour_interval_relative_range(self): ], ) - def _test_events_with_dates(self, dates: List[str], result, query_time=None, **filter_params): + def _test_events_with_dates(self, dates: list[str], result, query_time=None, **filter_params): self._create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"}) for time in dates: with freeze_time(time): diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index dbb996b1672b6..772d71922727b 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -1,7 +1,7 @@ import zoneinfo from dataclasses import dataclass from datetime import datetime -from typing import Dict, List, Optional +from typing import Optional from unittest.mock import MagicMock, patch from django.test import override_settings from freezegun import freeze_time @@ -10,6 +10,7 @@ from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS, LimitContext from posthog.hogql.modifiers import create_default_modifiers_for_team from posthog.hogql.query import INCREASED_MAX_EXECUTION_TIME +from posthog.hogql_queries.insights.trends.breakdown_values import BREAKDOWN_OTHER_DISPLAY from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.models.cohort.cohort import Cohort from posthog.models.property_definition import PropertyDefinition @@ -48,14 +49,14 @@ @dataclass class Series: event: str - timestamps: List[str] + timestamps: list[str] @dataclass class SeriesTestData: distinct_id: str - events: List[Series] - properties: Dict[str, str | int] + events: list[Series] + properties: dict[str, str | int] @override_settings(IN_UNIT_TESTING=True) @@ -63,9 +64,9 @@ class TestTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): default_date_from = "2020-01-09" default_date_to = "2020-01-19" - def _create_events(self, data: List[SeriesTestData]): + def _create_events(self, data: list[SeriesTestData]): person_result = [] - properties_to_create: Dict[str, str] = {} + properties_to_create: dict[str, str] = {} for person in data: first_timestamp = person.events[0].timestamps[0] @@ -173,7 +174,7 @@ def _create_query_runner( date_from: str, date_to: Optional[str], interval: IntervalType, - series: Optional[List[EventsNode | ActionsNode]], + series: Optional[list[EventsNode | ActionsNode]], trends_filters: Optional[TrendsFilter] = None, breakdown: Optional[BreakdownFilter] = None, filter_test_accounts: Optional[bool] = None, @@ -181,7 +182,7 @@ def _create_query_runner( limit_context: Optional[LimitContext] = None, explicit_date: Optional[bool] = None, ) -> TrendsQueryRunner: - query_series: List[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series + query_series: list[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series query = TrendsQuery( dateRange=DateRange(date_from=date_from, date_to=date_to, explicitDate=explicit_date), interval=interval, @@ -197,7 +198,7 @@ def _run_trends_query( date_from: str, date_to: Optional[str], interval: IntervalType, - series: Optional[List[EventsNode | ActionsNode]], + series: Optional[list[EventsNode | ActionsNode]], trends_filters: Optional[TrendsFilter] = None, breakdown: Optional[BreakdownFilter] = None, *, @@ -1784,19 +1785,18 @@ def test_to_actors_query_options_breakdowns(self): IntervalType.day, [EventsNode(event="$pageview")], None, - BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="$browser"), + BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="$browser", breakdown_limit=3), ) response = runner.to_actors_query_options() + assert response.day is not None assert response.series == [InsightActorsQuerySeries(label="$pageview", value=0)] - assert response.breakdown == [ - # BreakdownItem(label="Other", value="$$_posthog_breakdown_other_$$"), # TODO: uncomment when "other" shows correct results + BreakdownItem(label=BREAKDOWN_OTHER_DISPLAY, value="$$_posthog_breakdown_other_$$"), BreakdownItem(label="Chrome", value="Chrome"), BreakdownItem(label="Firefox", value="Firefox"), BreakdownItem(label="Safari", value="Safari"), - BreakdownItem(label="Edge", value="Edge"), ] def test_to_actors_query_options_breakdowns_boolean(self): @@ -1904,7 +1904,30 @@ def test_to_actors_query_options_breakdowns_hogql(self): assert response.series == [InsightActorsQuerySeries(label="$pageview", value=0)] assert response.breakdown == [ - # BreakdownItem(label="Other", value="$$_posthog_breakdown_other_$$"), # TODO: uncomment when "other" shows correct results + BreakdownItem(label="Chrome", value="Chrome"), + BreakdownItem(label="Firefox", value="Firefox"), + BreakdownItem(label="Safari", value="Safari"), + BreakdownItem(label="Edge", value="Edge"), + ] + + def test_to_actors_query_options_bar_value(self): + self._create_test_events() + flush_persons_and_events() + + runner = self._create_query_runner( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview")], + TrendsFilter(display=ChartDisplayType.ActionsBarValue), + BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="$browser"), + ) + + response = runner.to_actors_query_options() + + assert response.day is None + assert response.series == [InsightActorsQuerySeries(label="$pageview", value=0)] + assert response.breakdown == [ BreakdownItem(label="Chrome", value="Chrome"), BreakdownItem(label="Firefox", value="Firefox"), BreakdownItem(label="Safari", value="Safari"), diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py index 82fbb849ef5d9..072f371e4c058 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py @@ -1,4 +1,4 @@ -from typing import List, Optional, cast +from typing import Optional, cast from posthog.hogql import ast from posthog.hogql.constants import LimitContext from posthog.hogql.parser import parse_expr, parse_select @@ -98,7 +98,7 @@ def build_actors_query( }, ) - def _get_date_subqueries(self, breakdown: Breakdown, ignore_breakdowns: bool = False) -> List[ast.SelectQuery]: + def _get_date_subqueries(self, breakdown: Breakdown, ignore_breakdowns: bool = False) -> list[ast.SelectQuery]: if not breakdown.enabled or ignore_breakdowns: return [ cast( @@ -473,7 +473,7 @@ def _events_filter( actors_query_time_frame: Optional[str] = None, ) -> ast.Expr: series = self.series - filters: List[ast.Expr] = [] + filters: list[ast.Expr] = [] # Dates if is_actors_query and actors_query_time_frame is not None: diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index d3cf45e5055be..6ceb2dd185739 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -7,7 +7,7 @@ from math import ceil from operator import itemgetter import threading -from typing import List, Optional, Any, Dict +from typing import Optional, Any from dateutil import parser from dateutil.relativedelta import relativedelta from django.conf import settings @@ -33,7 +33,7 @@ from posthog.hogql_queries.insights.trends.display import TrendsDisplay from posthog.hogql_queries.insights.trends.trends_query_builder import TrendsQueryBuilder from posthog.hogql_queries.insights.trends.series_with_extras import SeriesWithExtras -from posthog.hogql_queries.query_runner import QueryRunner +from posthog.hogql_queries.query_runner import QueryRunner, RunnableQueryNode from posthog.hogql_queries.utils.formula_ast import FormulaAST from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.hogql_queries.utils.query_previous_period_date_range import ( @@ -70,11 +70,11 @@ class TrendsQueryRunner(QueryRunner): query: TrendsQuery query_type = TrendsQuery - series: List[SeriesWithExtras] + series: list[SeriesWithExtras] def __init__( self, - query: TrendsQuery | Dict[str, Any], + query: TrendsQuery | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, @@ -115,7 +115,7 @@ def to_query(self) -> ast.SelectUnionQuery: queries.extend(query.select_queries) return ast.SelectUnionQuery(select_queries=queries) - def to_queries(self) -> List[ast.SelectQuery | ast.SelectUnionQuery]: + def to_queries(self) -> list[ast.SelectQuery | ast.SelectUnionQuery]: queries = [] with self.timings.measure("trends_to_query"): for series in self.series: @@ -184,18 +184,22 @@ def to_actors_query( return query def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: - res_breakdown: List[BreakdownItem] | None = None - res_series: List[Series] = [] - res_compare: List[CompareItem] | None = None + res_breakdown: list[BreakdownItem] | None = None + res_series: list[Series] = [] + res_compare: list[CompareItem] | None = None # Days - res_days: list[DayItem] = [ - DayItem( - label=format_label_date(value, self.query_date_range.interval_name), - value=value, - ) - for value in self.query_date_range.all_values() - ] + res_days: Optional[list[DayItem]] = ( + None + if self._trends_display.should_aggregate_values() + else [ + DayItem( + label=format_label_date(value, self.query_date_range.interval_name), + value=value, + ) + for value in self.query_date_range.all_values() + ] + ) # Series for index, series in enumerate(self.query.series): @@ -235,7 +239,7 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: is_boolean_breakdown = self._is_breakdown_field_boolean() is_histogram_breakdown = breakdown.is_histogram_breakdown - breakdown_values: List[str | int] + breakdown_values: list[str | int] res_breakdown = [] if is_histogram_breakdown: @@ -285,9 +289,9 @@ def calculate(self): with self.timings.measure("printing_hogql_for_response"): response_hogql = to_printed_hogql(response_hogql_query, self.team, self.modifiers) - res_matrix: List[List[Any] | Any | None] = [None] * len(queries) - timings_matrix: List[List[QueryTiming] | None] = [None] * len(queries) - errors: List[Exception] = [] + res_matrix: list[list[Any] | Any | None] = [None] * len(queries) + timings_matrix: list[list[QueryTiming] | None] = [None] * len(queries) + errors: list[Exception] = [] def run(index: int, query: ast.SelectQuery | ast.SelectUnionQuery, is_parallel: bool): try: @@ -338,14 +342,14 @@ def run(index: int, query: ast.SelectQuery | ast.SelectUnionQuery, is_parallel: # Flatten res and timings res = [] for result in res_matrix: - if isinstance(result, List): + if isinstance(result, list): res.extend(result) else: res.append(result) timings = [] for result in timings_matrix: - if isinstance(result, List): + if isinstance(result, list): timings.extend(result) else: timings.append(result) @@ -551,7 +555,7 @@ def update_hogql_modifiers(self) -> None: self.modifiers.inCohortVia == InCohortVia.auto and self.query.breakdownFilter is not None and self.query.breakdownFilter.breakdown_type == "cohort" - and isinstance(self.query.breakdownFilter.breakdown, List) + and isinstance(self.query.breakdownFilter.breakdown, list) and len(self.query.breakdownFilter.breakdown) > 1 and not any(value == "all" for value in self.query.breakdownFilter.breakdown) ): @@ -571,7 +575,7 @@ def update_hogql_modifiers(self) -> None: self.modifiers.dataWarehouseEventsModifiers = datawarehouse_modifiers - def setup_series(self) -> List[SeriesWithExtras]: + def setup_series(self) -> list[SeriesWithExtras]: series_with_extras = [ SeriesWithExtras( series=series, @@ -589,7 +593,7 @@ def setup_series(self) -> List[SeriesWithExtras]: and self.query.breakdownFilter.breakdown_type == "cohort" ): updated_series = [] - if isinstance(self.query.breakdownFilter.breakdown, List): + if isinstance(self.query.breakdownFilter.breakdown, list): cohort_ids = self.query.breakdownFilter.breakdown elif self.query.breakdownFilter.breakdown is not None: cohort_ids = [self.query.breakdownFilter.breakdown] @@ -638,7 +642,7 @@ def setup_series(self) -> List[SeriesWithExtras]: return series_with_extras - def apply_formula(self, formula: str, results: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def apply_formula(self, formula: str, results: list[dict[str, Any]]) -> list[dict[str, Any]]: has_compare = bool(self.query.trendsFilter and self.query.trendsFilter.compare) has_breakdown = bool(self.query.breakdownFilter and self.query.breakdownFilter.breakdown) is_total_value = self._trends_display.should_aggregate_values() @@ -690,8 +694,8 @@ def apply_formula(self, formula: str, results: List[Dict[str, Any]]) -> List[Dic @staticmethod def apply_formula_to_results_group( - results_group: List[Dict[str, Any]], formula: str, aggregate_values: Optional[bool] = False - ) -> Dict[str, Any]: + results_group: list[dict[str, Any]], formula: str, aggregate_values: Optional[bool] = False + ) -> dict[str, Any]: """ Applies the formula to a list of results, resulting in a single, computed result. """ @@ -783,7 +787,7 @@ def _event_property( return "String" # TODO: Move this to posthog/hogql_queries/legacy_compatibility/query_to_filter.py - def _query_to_filter(self) -> Dict[str, Any]: + def _query_to_filter(self) -> dict[str, Any]: filter_dict = { "insight": "TRENDS", "properties": self.query.properties, @@ -812,3 +816,10 @@ def _trends_display(self) -> TrendsDisplay: display = self.query.trendsFilter.display return TrendsDisplay(display) + + def apply_dashboard_filters(self, *args, **kwargs) -> RunnableQueryNode: + updated_query = super().apply_dashboard_filters(*args, **kwargs) + # Remove any set breakdown limit for display on the dashboard + if updated_query.breakdownFilter: + updated_query.breakdownFilter.breakdown_limit = None + return updated_query diff --git a/posthog/hogql_queries/insights/trends/utils.py b/posthog/hogql_queries/insights/trends/utils.py index 61a4252d499f2..b8f6c3989f1fd 100644 --- a/posthog/hogql_queries/insights/trends/utils.py +++ b/posthog/hogql_queries/insights/trends/utils.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Union +from typing import Optional, Union from posthog.schema import ActionsNode, DataWarehouseNode, EventsNode, BreakdownType @@ -12,7 +12,7 @@ def get_properties_chain( breakdown_type: BreakdownType | None, breakdown_field: str, group_type_index: Optional[float | int], -) -> List[str | int]: +) -> list[str | int]: if breakdown_type == "person": return ["person", "properties", breakdown_field] diff --git a/posthog/hogql_queries/insights/utils/properties.py b/posthog/hogql_queries/insights/utils/properties.py index ea4770037b78d..41826b28535d8 100644 --- a/posthog/hogql_queries/insights/utils/properties.py +++ b/posthog/hogql_queries/insights/utils/properties.py @@ -1,11 +1,11 @@ -from typing import List, TypeAlias +from typing import TypeAlias from posthog.hogql import ast from posthog.hogql.property import property_to_expr from posthog.hogql_queries.insights.query_context import QueryContext from posthog.schema import PropertyGroupFilter from posthog.types import AnyPropertyFilter -PropertiesType: TypeAlias = List[AnyPropertyFilter] | PropertyGroupFilter | None +PropertiesType: TypeAlias = list[AnyPropertyFilter] | PropertyGroupFilter | None class Properties: @@ -17,8 +17,8 @@ def __init__( ) -> None: self.context = context - def to_exprs(self) -> List[ast.Expr]: - exprs: List[ast.Expr] = [] + def to_exprs(self) -> list[ast.Expr]: + exprs: list[ast.Expr] = [] team, query = self.context.team, self.context.query diff --git a/posthog/hogql_queries/insights/utils/utils.py b/posthog/hogql_queries/insights/utils/utils.py index c3b99c6a3b625..747d7e2b6ca5a 100644 --- a/posthog/hogql_queries/insights/utils/utils.py +++ b/posthog/hogql_queries/insights/utils/utils.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.hogql import ast from posthog.models.team.team import Team, WeekStartDay from posthog.queries.util import get_trunc_func_ch @@ -6,7 +6,7 @@ def get_start_of_interval_hogql(interval: str, *, team: Team, source: Optional[ast.Expr] = None) -> ast.Expr: trunc_func = get_trunc_func_ch(interval) - trunc_func_args: List[ast.Expr] = [source] if source else [ast.Field(chain=["timestamp"])] + trunc_func_args: list[ast.Expr] = [source] if source else [ast.Field(chain=["timestamp"])] if trunc_func == "toStartOfWeek": trunc_func_args.append(ast.Constant(value=int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode))) return ast.Call(name=trunc_func, args=trunc_func_args) diff --git a/posthog/hogql_queries/legacy_compatibility/feature_flag.py b/posthog/hogql_queries/legacy_compatibility/feature_flag.py index e6cf742166610..69e08ea5aa988 100644 --- a/posthog/hogql_queries/legacy_compatibility/feature_flag.py +++ b/posthog/hogql_queries/legacy_compatibility/feature_flag.py @@ -2,36 +2,13 @@ from django.conf import settings from posthog.models.user import User -from posthog.schema import InsightType - -GLOBAL_FLAG = "hogql-insights-preview" -INSIGHT_TYPE_TO_FLAG: dict[InsightType, str] = { - InsightType.TRENDS: "hogql-insights-trends", - InsightType.FUNNELS: "hogql-insights-funnels", - InsightType.RETENTION: "hogql-insights-retention", - InsightType.PATHS: "hogql-insights-paths", - InsightType.LIFECYCLE: "hogql-insights-lifecycle", - InsightType.STICKINESS: "hogql-insights-stickiness", -} - - -def hogql_insights_enabled(user: User, insight_type: InsightType) -> bool: +def should_use_hogql_backend_in_insight_serialization(user: User) -> bool: if settings.HOGQL_INSIGHTS_OVERRIDE is not None: return settings.HOGQL_INSIGHTS_OVERRIDE - if posthoganalytics.feature_enabled( - GLOBAL_FLAG, - user.distinct_id, - person_properties={"email": user.email}, - only_evaluate_locally=True, - send_feature_flag_events=False, - ): - # HogQL insights enabled all the way - return True - return posthoganalytics.feature_enabled( - INSIGHT_TYPE_TO_FLAG[insight_type], + "hogql-in-insight-serialization", user.distinct_id, person_properties={"email": user.email}, only_evaluate_locally=True, diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index 382b37fa56db0..fdeb74fc9076f 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -1,7 +1,7 @@ import copy from enum import Enum import json -from typing import Any, List, Dict, Literal +from typing import Any, Literal from posthog.hogql_queries.legacy_compatibility.clean_properties import clean_entity_properties, clean_global_properties from posthog.models.entity.entity import Entity as LegacyEntity from posthog.schema import ( @@ -118,7 +118,7 @@ def exlusion_entity_to_node(entity) -> FunnelExclusionEventsNode | FunnelExclusi # TODO: remove this method that returns legacy entities -def to_base_entity_dict(entity: Dict): +def to_base_entity_dict(entity: dict): return { "type": entity.get("type"), "id": entity.get("id"), @@ -140,7 +140,7 @@ def to_base_entity_dict(entity: Dict): INSIGHT_TYPE = Literal["TRENDS", "FUNNELS", "RETENTION", "PATHS", "LIFECYCLE", "STICKINESS"] -def _date_range(filter: Dict): +def _date_range(filter: dict): date_range = DateRange( date_from=filter.get("date_from"), date_to=filter.get("date_to"), @@ -153,7 +153,7 @@ def _date_range(filter: Dict): return {"dateRange": date_range} -def _interval(filter: Dict): +def _interval(filter: dict): if _insight_type(filter) == "RETENTION" or _insight_type(filter) == "PATHS": return {} @@ -163,7 +163,7 @@ def _interval(filter: Dict): return {"interval": filter.get("interval")} -def _series(filter: Dict): +def _series(filter: dict): if _insight_type(filter) == "RETENTION" or _insight_type(filter) == "PATHS": return {} @@ -188,8 +188,8 @@ def _series(filter: Dict): } -def _entities(filter: Dict): - processed_entities: List[LegacyEntity] = [] +def _entities(filter: dict): + processed_entities: list[LegacyEntity] = [] # add actions actions = filter.get("actions", []) @@ -213,7 +213,7 @@ def _entities(filter: Dict): return processed_entities -def _sampling_factor(filter: Dict): +def _sampling_factor(filter: dict): if isinstance(filter.get("sampling_factor"), str): try: return float(filter.get("sampling_factor")) @@ -223,16 +223,16 @@ def _sampling_factor(filter: Dict): return {"samplingFactor": filter.get("sampling_factor")} -def _properties(filter: Dict): +def _properties(filter: dict): raw_properties = filter.get("properties", None) return {"properties": clean_global_properties(raw_properties)} -def _filter_test_accounts(filter: Dict): +def _filter_test_accounts(filter: dict): return {"filterTestAccounts": filter.get("filter_test_accounts")} -def _breakdown_filter(_filter: Dict): +def _breakdown_filter(_filter: dict): if _insight_type(_filter) != "TRENDS" and _insight_type(_filter) != "FUNNELS": return {} @@ -275,13 +275,13 @@ def _breakdown_filter(_filter: Dict): return {"breakdownFilter": BreakdownFilter(**breakdownFilter)} -def _group_aggregation_filter(filter: Dict): +def _group_aggregation_filter(filter: dict): if _insight_type(filter) == "STICKINESS" or _insight_type(filter) == "LIFECYCLE": return {} return {"aggregation_group_type_index": filter.get("aggregation_group_type_index")} -def _insight_filter(filter: Dict): +def _insight_filter(filter: dict): if _insight_type(filter) == "TRENDS": insight_filter = { "trendsFilter": TrendsFilter( @@ -387,7 +387,7 @@ def _insight_filter(filter: Dict): return insight_filter -def filters_to_funnel_paths_query(filter: Dict[str, Any]) -> FunnelPathsFilter | None: +def filters_to_funnel_paths_query(filter: dict[str, Any]) -> FunnelPathsFilter | None: funnel_paths = filter.get("funnel_paths") funnel_filter = filter.get("funnel_filter") @@ -404,13 +404,13 @@ def filters_to_funnel_paths_query(filter: Dict[str, Any]) -> FunnelPathsFilter | ) -def _insight_type(filter: Dict) -> INSIGHT_TYPE: +def _insight_type(filter: dict) -> INSIGHT_TYPE: if filter.get("insight") == "SESSIONS": return "TRENDS" return filter.get("insight", "TRENDS") -def filter_to_query(filter: Dict) -> InsightQueryNode: +def filter_to_query(filter: dict) -> InsightQueryNode: filter = copy.deepcopy(filter) # duplicate to prevent accidental filter alterations Query = insight_to_query_type[_insight_type(filter)] diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 50c6c1ddb1191..c9209794aaa7d 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from datetime import datetime from enum import IntEnum -from typing import Any, Generic, List, Optional, Type, Dict, TypeVar, Union, Tuple, cast, TypeGuard +from typing import Any, Generic, Optional, TypeVar, Union, cast, TypeGuard from django.conf import settings from django.core.cache import cache @@ -25,6 +25,7 @@ FunnelCorrelationQuery, FunnelsActorsQuery, PropertyGroupFilter, + PropertyGroupFilterValue, TrendsQuery, FunnelsQuery, RetentionQuery, @@ -63,11 +64,11 @@ class ExecutionMode(IntEnum): - CALCULATION_REQUESTED = 2 + CALCULATION_ALWAYS = 2 """Always recalculate.""" - CALCULATION_ONLY_IF_STALE = 1 + RECENT_CACHE_CALCULATE_IF_STALE = 1 """Use cache, unless the results are missing or stale.""" - CACHE_ONLY = 0 + CACHE_ONLY_NEVER_CALCULATE = 0 """Do not initiate calculation.""" @@ -76,9 +77,9 @@ class QueryResponse(BaseModel, Generic[DataT]): extra="forbid", ) results: DataT - timings: Optional[List[QueryTiming]] = None - types: Optional[List[Union[Tuple[str, str], str]]] = None - columns: Optional[List[str]] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list[Union[tuple[str, str], str]]] = None + columns: Optional[list[str]] = None hogql: Optional[str] = None hasMore: Optional[bool] = None limit: Optional[int] = None @@ -102,7 +103,7 @@ class CacheMissResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - cache_key: str + cache_key: Optional[str] RunnableQueryNode = Union[ @@ -128,7 +129,7 @@ class CacheMissResponse(BaseModel): def get_query_runner( - query: Dict[str, Any] | RunnableQueryNode | BaseModel, + query: dict[str, Any] | RunnableQueryNode | BaseModel, team: Team, timings: Optional[HogQLTimings] = None, limit_context: Optional[LimitContext] = None, @@ -146,7 +147,7 @@ def get_query_runner( from .insights.trends.trends_query_runner import TrendsQueryRunner return TrendsQueryRunner( - query=cast(TrendsQuery | Dict[str, Any], query), + query=cast(TrendsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -156,7 +157,7 @@ def get_query_runner( from .insights.funnels.funnels_query_runner import FunnelsQueryRunner return FunnelsQueryRunner( - query=cast(FunnelsQuery | Dict[str, Any], query), + query=cast(FunnelsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -166,7 +167,7 @@ def get_query_runner( from .insights.retention_query_runner import RetentionQueryRunner return RetentionQueryRunner( - query=cast(RetentionQuery | Dict[str, Any], query), + query=cast(RetentionQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -176,7 +177,7 @@ def get_query_runner( from .insights.paths_query_runner import PathsQueryRunner return PathsQueryRunner( - query=cast(PathsQuery | Dict[str, Any], query), + query=cast(PathsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -186,7 +187,7 @@ def get_query_runner( from .insights.stickiness_query_runner import StickinessQueryRunner return StickinessQueryRunner( - query=cast(StickinessQuery | Dict[str, Any], query), + query=cast(StickinessQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -196,7 +197,7 @@ def get_query_runner( from .insights.lifecycle_query_runner import LifecycleQueryRunner return LifecycleQueryRunner( - query=cast(LifecycleQuery | Dict[str, Any], query), + query=cast(LifecycleQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -206,7 +207,7 @@ def get_query_runner( from .events_query_runner import EventsQueryRunner return EventsQueryRunner( - query=cast(EventsQuery | Dict[str, Any], query), + query=cast(EventsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -216,7 +217,7 @@ def get_query_runner( from .actors_query_runner import ActorsQueryRunner return ActorsQueryRunner( - query=cast(ActorsQuery | Dict[str, Any], query), + query=cast(ActorsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -226,7 +227,7 @@ def get_query_runner( from .insights.insight_actors_query_runner import InsightActorsQueryRunner return InsightActorsQueryRunner( - query=cast(InsightActorsQuery | Dict[str, Any], query), + query=cast(InsightActorsQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -236,7 +237,7 @@ def get_query_runner( from .insights.insight_actors_query_options_runner import InsightActorsQueryOptionsRunner return InsightActorsQueryOptionsRunner( - query=cast(InsightActorsQueryOptions | Dict[str, Any], query), + query=cast(InsightActorsQueryOptions | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -246,7 +247,7 @@ def get_query_runner( from .insights.funnels.funnel_correlation_query_runner import FunnelCorrelationQueryRunner return FunnelCorrelationQueryRunner( - query=cast(FunnelCorrelationQuery | Dict[str, Any], query), + query=cast(FunnelCorrelationQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -256,7 +257,7 @@ def get_query_runner( from .hogql_query_runner import HogQLQueryRunner return HogQLQueryRunner( - query=cast(HogQLQuery | Dict[str, Any], query), + query=cast(HogQLQuery | dict[str, Any], query), team=team, timings=timings, limit_context=limit_context, @@ -266,7 +267,7 @@ def get_query_runner( from .sessions_timeline_query_runner import SessionsTimelineQueryRunner return SessionsTimelineQueryRunner( - query=cast(SessionsTimelineQuery | Dict[str, Any], query), + query=cast(SessionsTimelineQuery | dict[str, Any], query), team=team, timings=timings, modifiers=modifiers, @@ -287,9 +288,12 @@ def get_query_runner( raise ValueError(f"Can't get a runner for an unknown query kind: {kind}") -class QueryRunner(ABC): - query: RunnableQueryNode - query_type: Type[RunnableQueryNode] +Q = TypeVar("Q", bound=RunnableQueryNode) + + +class QueryRunner(ABC, Generic[Q]): + query: Q + query_type: type[Q] team: Team timings: HogQLTimings modifiers: HogQLQueryModifiers @@ -297,7 +301,7 @@ class QueryRunner(ABC): def __init__( self, - query: RunnableQueryNode | BaseModel | Dict[str, Any], + query: Q | BaseModel | dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, @@ -314,7 +318,7 @@ def __init__( assert isinstance(query, self.query_type) self.query = query - def is_query_node(self, data) -> TypeGuard[RunnableQueryNode]: + def is_query_node(self, data) -> TypeGuard[Q]: return isinstance(data, self.query_type) @abstractmethod @@ -324,12 +328,13 @@ def calculate(self) -> BaseModel: raise NotImplementedError() def run( - self, execution_mode: ExecutionMode = ExecutionMode.CALCULATION_ONLY_IF_STALE + self, execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE ) -> CachedQueryResponse | CacheMissResponse: - cache_key = f"{self._cache_key()}_{self.limit_context or LimitContext.QUERY}" + # TODO: `self.limit_context` should probably just be in get_cache_key() + cache_key = cache_key = f"{self.get_cache_key()}_{self.limit_context or LimitContext.QUERY}" tag_queries(cache_key=cache_key) - if execution_mode != ExecutionMode.CALCULATION_REQUESTED: + if execution_mode != ExecutionMode.CALCULATION_ALWAYS: # Let's look in the cache first cached_response: CachedQueryResponse | CacheMissResponse cached_response_candidate = get_safe_cache(cache_key) @@ -357,13 +362,13 @@ def run( QUERY_CACHE_HIT_COUNTER.labels(team_id=self.team.pk, cache_hit="stale").inc() # We have a stale result. If we aren't allowed to calculate, let's still return it # – otherwise let's proceed to calculation - if execution_mode == ExecutionMode.CACHE_ONLY: + if execution_mode == ExecutionMode.CACHE_ONLY_NEVER_CALCULATE: return cached_response else: QUERY_CACHE_HIT_COUNTER.labels(team_id=self.team.pk, cache_hit="miss").inc() # We have no cached result. If we aren't allowed to calculate, let's return the cache miss # – otherwise let's proceed to calculation - if execution_mode == ExecutionMode.CACHE_ONLY: + if execution_mode == ExecutionMode.CACHE_ONLY_NEVER_CALCULATE: return cached_response fresh_response_dict = cast(QueryResponse, self.calculate()).model_dump() @@ -403,7 +408,7 @@ def to_hogql(self) -> str: def toJSON(self) -> str: return self.query.model_dump_json(exclude_defaults=True, exclude_none=True) - def _cache_key(self) -> str: + def get_cache_key(self) -> str: modifiers = self.modifiers.model_dump_json(exclude_defaults=True, exclude_none=True) return generate_cache_key( f"query_{self.toJSON()}_{self.__class__.__name__}_{self.team.pk}_{self.team.timezone}_{modifiers}" @@ -417,13 +422,21 @@ def _is_stale(self, cached_result_package): def _refresh_frequency(self): raise NotImplementedError() - def apply_dashboard_filters(self, dashboard_filter: DashboardFilter) -> RunnableQueryNode: + def apply_dashboard_filters(self, dashboard_filter: DashboardFilter) -> Q: + # The default logic below applies to all insights and a lot of other queries + # Notable exception: `HogQLQuery`, which has `properties` and `dateRange` within `HogQLFilters` if hasattr(self.query, "properties") and hasattr(self.query, "dateRange"): - query_update: Dict[str, Any] = {} + query_update: dict[str, Any] = {} if dashboard_filter.properties: if self.query.properties: query_update["properties"] = PropertyGroupFilter( - type=FilterLogicalOperator.AND, values=[self.query.properties, dashboard_filter.properties] + type=FilterLogicalOperator.AND, + values=[ + PropertyGroupFilterValue(type=FilterLogicalOperator.AND, values=self.query.properties), + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND, values=dashboard_filter.properties + ), + ], ) else: query_update["properties"] = dashboard_filter.properties @@ -437,6 +450,6 @@ def apply_dashboard_filters(self, dashboard_filter: DashboardFilter) -> Runnable query_update["dateRange"] = self.query.dateRange.model_copy(update=date_range_update) else: query_update["dateRange"] = DateRange(**date_range_update) - return self.query.model_copy(update=query_update) # Shallow copy! + return cast(Q, self.query.model_copy(update=query_update)) # Shallow copy! raise NotImplementedError(f"{self.query.__class__.__name__} does not support dashboard filters out of the box") diff --git a/posthog/hogql_queries/sessions_timeline_query_runner.py b/posthog/hogql_queries/sessions_timeline_query_runner.py index cda9433d63efa..306ec02c93448 100644 --- a/posthog/hogql_queries/sessions_timeline_query_runner.py +++ b/posthog/hogql_queries/sessions_timeline_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta import json -from typing import Dict, cast +from typing import cast from posthog.api.element import ElementSerializer @@ -138,7 +138,7 @@ def calculate(self) -> SessionsTimelineQueryResponse: limit_context=self.limit_context, ) assert query_result.results is not None - timeline_entries_map: Dict[str, TimelineEntry] = {} + timeline_entries_map: dict[str, TimelineEntry] = {} for ( uuid, timestamp_parsed, diff --git a/posthog/hogql_queries/test/__snapshots__/test_sessions_timeline_query_runner.ambr b/posthog/hogql_queries/test/__snapshots__/test_sessions_timeline_query_runner.ambr index e0a0d7862edb8..a8168795892f3 100644 --- a/posthog/hogql_queries/test/__snapshots__/test_sessions_timeline_query_runner.ambr +++ b/posthog/hogql_queries/test/__snapshots__/test_sessions_timeline_query_runner.ambr @@ -60,7 +60,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_before_and_after_defaults @@ -124,7 +125,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_event_limit_and_has_more @@ -188,7 +190,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_formal_and_informal_sessions_global @@ -252,7 +255,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_formal_session_with_recording @@ -316,7 +320,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_formal_sessions_for_person @@ -380,7 +385,8 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- # name: TestSessionsTimelineQueryRunner.test_formal_sessions_global @@ -444,6 +450,7 @@ ORDER BY e.timestamp DESC LIMIT 100 SETTINGS readonly=2, max_execution_time=60, - allow_experimental_object_type=1 + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0 ''' # --- diff --git a/posthog/hogql_queries/test/test_events_query_runner.py b/posthog/hogql_queries/test/test_events_query_runner.py index 7c8c62c5fb0fc..1617919f984ff 100644 --- a/posthog/hogql_queries/test/test_events_query_runner.py +++ b/posthog/hogql_queries/test/test_events_query_runner.py @@ -1,4 +1,4 @@ -from typing import Tuple, Any, cast +from typing import Any, cast from freezegun import freeze_time @@ -25,7 +25,7 @@ class TestEventsQueryRunner(ClickhouseTestMixin, APIBaseTest): maxDiff = None - def _create_events(self, data: list[Tuple[str, str, Any]], event="$pageview"): + def _create_events(self, data: list[tuple[str, str, Any]], event="$pageview"): person_result = [] for distinct_id, timestamp, event_properties in data: with freeze_time(timestamp): diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py index 49f2a457c2c26..88d6128b00544 100644 --- a/posthog/hogql_queries/test/test_query_runner.py +++ b/posthog/hogql_queries/test/test_query_runner.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import Any, List, Literal, Optional +from typing import Any, Literal, Optional from zoneinfo import ZoneInfo from dateutil.parser import isoparse @@ -21,7 +21,7 @@ class TestQuery(BaseModel): kind: Literal["TestQuery"] = "TestQuery" some_attr: str - other_attr: Optional[List[Any]] = [] + other_attr: Optional[list[Any]] = [] class TestQueryRunner(BaseTest): @@ -94,7 +94,7 @@ def test_cache_key(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) - cache_key = runner._cache_key() + cache_key = runner.get_cache_key() self.assertEqual(cache_key, "cache_b6f14c97c218e0b9c9a8258f7460fd5b") def test_cache_key_runner_subclass(self): @@ -108,7 +108,7 @@ class TestSubclassQueryRunner(TestQueryRunner): runner = TestSubclassQueryRunner(query={"some_attr": "bla"}, team=team) - cache_key = runner._cache_key() + cache_key = runner.get_cache_key() self.assertEqual(cache_key, "cache_ec1c2f9715cf9c424b1284b94b1205e6") def test_cache_key_different_timezone(self): @@ -119,7 +119,7 @@ def test_cache_key_different_timezone(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) - cache_key = runner._cache_key() + cache_key = runner.get_cache_key() self.assertEqual(cache_key, "cache_a6614c0fb564f9c98b1d7b830928c7a1") def test_cache_response(self): @@ -129,29 +129,29 @@ def test_cache_response(self): with freeze_time(datetime(2023, 2, 4, 13, 37, 42)): # in cache-only mode, returns cache miss response if uncached - response = runner.run(execution_mode=ExecutionMode.CACHE_ONLY) + response = runner.run(execution_mode=ExecutionMode.CACHE_ONLY_NEVER_CALCULATE) self.assertIsInstance(response, CacheMissResponse) # returns fresh response if uncached - response = runner.run(execution_mode=ExecutionMode.CALCULATION_ONLY_IF_STALE) + response = runner.run(execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE) self.assertIsInstance(response, CachedQueryResponse) self.assertEqual(response.is_cached, False) self.assertEqual(response.last_refresh, "2023-02-04T13:37:42Z") self.assertEqual(response.next_allowed_client_refresh, "2023-02-04T13:41:42Z") # returns cached response afterwards - response = runner.run(execution_mode=ExecutionMode.CALCULATION_ONLY_IF_STALE) + response = runner.run(execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE) self.assertIsInstance(response, CachedQueryResponse) self.assertEqual(response.is_cached, True) # return fresh response if refresh requested - response = runner.run(execution_mode=ExecutionMode.CALCULATION_REQUESTED) + response = runner.run(execution_mode=ExecutionMode.CALCULATION_ALWAYS) self.assertIsInstance(response, CachedQueryResponse) self.assertEqual(response.is_cached, False) with freeze_time(datetime(2023, 2, 4, 13, 37 + 11, 42)): # returns fresh response if stale - response = runner.run(execution_mode=ExecutionMode.CALCULATION_ONLY_IF_STALE) + response = runner.run(execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_IF_STALE) self.assertIsInstance(response, CachedQueryResponse) self.assertEqual(response.is_cached, False) diff --git a/posthog/hogql_queries/utils/formula_ast.py b/posthog/hogql_queries/utils/formula_ast.py index 28e705827b7f8..922e283362a49 100644 --- a/posthog/hogql_queries/utils/formula_ast.py +++ b/posthog/hogql_queries/utils/formula_ast.py @@ -1,6 +1,6 @@ import ast import operator -from typing import Any, Dict, List +from typing import Any class FormulaAST: @@ -12,9 +12,9 @@ class FormulaAST: ast.Mod: operator.mod, ast.Pow: operator.pow, } - zipped_data: List[tuple[float]] + zipped_data: list[tuple[float]] - def __init__(self, data: List[List[float]]): + def __init__(self, data: list[list[float]]): self.zipped_data = list(zip(*data)) def call(self, node: str): @@ -27,8 +27,8 @@ def call(self, node: str): res.append(result) return res - def _evaluate(self, node, const_map: Dict[str, Any]): - if isinstance(node, (list, tuple)): + def _evaluate(self, node, const_map: dict[str, Any]): + if isinstance(node, list | tuple): return [self._evaluate(sub_node, const_map) for sub_node in node] elif isinstance(node, str): diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py index ab1f25fbb376c..ac9636c1e1ce5 100644 --- a/posthog/hogql_queries/utils/query_date_range.py +++ b/posthog/hogql_queries/utils/query_date_range.py @@ -1,7 +1,7 @@ import re from datetime import datetime, timedelta from functools import cached_property -from typing import Literal, Optional, Dict +from typing import Literal, Optional from zoneinfo import ZoneInfo from dateutil.parser import parse @@ -248,7 +248,7 @@ def date_to_with_extra_interval_hogql(self) -> ast.Call: args=[self.date_to_start_of_interval_hogql(self.date_to_as_hogql()), self.one_interval_period()], ) - def to_placeholders(self) -> Dict[str, ast.Expr]: + def to_placeholders(self) -> dict[str, ast.Expr]: return { "interval": self.interval_period_string_as_hogql_constant(), "one_interval_period": self.one_interval_period(), diff --git a/posthog/hogql_queries/utils/query_previous_period_date_range.py b/posthog/hogql_queries/utils/query_previous_period_date_range.py index 652a95c835eb7..c6dca63dc7d95 100644 --- a/posthog/hogql_queries/utils/query_previous_period_date_range.py +++ b/posthog/hogql_queries/utils/query_previous_period_date_range.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Optional, Dict, Tuple +from typing import Optional from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.team import Team @@ -28,7 +28,7 @@ def __init__( ) -> None: super().__init__(date_range, team, interval, now) - def date_from_delta_mappings(self) -> Dict[str, int] | None: + def date_from_delta_mappings(self) -> dict[str, int] | None: if self._date_range and isinstance(self._date_range.date_from, str) and self._date_range.date_from != "all": date_from = self._date_range.date_from else: @@ -41,7 +41,7 @@ def date_from_delta_mappings(self) -> Dict[str, int] | None: )[1] return delta_mapping - def date_to_delta_mappings(self) -> Dict[str, int] | None: + def date_to_delta_mappings(self) -> dict[str, int] | None: if self._date_range and self._date_range.date_to: delta_mapping = relative_date_parse_with_delta_mapping( self._date_range.date_to, @@ -52,7 +52,7 @@ def date_to_delta_mappings(self) -> Dict[str, int] | None: return delta_mapping return None - def dates(self) -> Tuple[datetime, datetime]: + def dates(self) -> tuple[datetime, datetime]: current_period_date_from = super().date_from() current_period_date_to = super().date_to() diff --git a/posthog/hogql_queries/web_analytics/test/test_web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/test/test_web_analytics_query_runner.py index 7ea8e864a3a65..3ea217606522c 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_analytics_query_runner.py @@ -1,4 +1,4 @@ -from typing import Union, List +from typing import Union from freezegun import freeze_time @@ -62,7 +62,7 @@ def _create__web_overview_query(self, date_from, date_to, properties): return WebOverviewQueryRunner(team=self.team, query=query) def test_sample_rate_cache_key_is_same_across_subclasses(self): - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] = [ + properties: list[Union[EventPropertyFilter, PersonPropertyFilter]] = [ EventPropertyFilter(key="$current_url", value="/a", operator=PropertyOperator.is_not), PersonPropertyFilter(key="$initial_utm_source", value="google", operator=PropertyOperator.is_not), ] @@ -75,10 +75,10 @@ def test_sample_rate_cache_key_is_same_across_subclasses(self): self.assertEqual(stats_key, overview_key) def test_sample_rate_cache_key_is_same_with_different_properties(self): - properties_a: List[Union[EventPropertyFilter, PersonPropertyFilter]] = [ + properties_a: list[Union[EventPropertyFilter, PersonPropertyFilter]] = [ EventPropertyFilter(key="$current_url", value="/a", operator=PropertyOperator.is_not), ] - properties_b: List[Union[EventPropertyFilter, PersonPropertyFilter]] = [ + properties_b: list[Union[EventPropertyFilter, PersonPropertyFilter]] = [ EventPropertyFilter(key="$current_url", value="/b", operator=PropertyOperator.is_not), ] date_from = "2023-12-08" @@ -90,7 +90,7 @@ def test_sample_rate_cache_key_is_same_with_different_properties(self): self.assertEqual(key_a, key_b) def test_sample_rate_cache_key_changes_with_date_range(self): - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] = [ + properties: list[Union[EventPropertyFilter, PersonPropertyFilter]] = [ EventPropertyFilter(key="$current_url", value="/a", operator=PropertyOperator.is_not), ] date_from_a = "2023-12-08" @@ -100,7 +100,7 @@ def test_sample_rate_cache_key_changes_with_date_range(self): key_a = self._create_web_stats_table_query(date_from_a, date_to, properties)._sample_rate_cache_key() key_b = self._create_web_stats_table_query(date_from_b, date_to, properties)._sample_rate_cache_key() - self.assertNotEquals(key_a, key_b) + self.assertNotEqual(key_a, key_b) def test_sample_rate_from_count(self): self.assertEqual(SamplingRate(numerator=1), _sample_rate_from_count(0)) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index ffb758858d151..cd6a218e7ea0f 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -2,7 +2,7 @@ from abc import ABC from datetime import timedelta from math import ceil -from typing import Optional, List, Union, Type +from typing import Optional, Union from django.conf import settings from django.core.cache import cache @@ -24,6 +24,7 @@ WebStatsTableQuery, PersonPropertyFilter, SamplingRate, + SessionPropertyFilter, ) from posthog.utils import generate_cache_key, get_safe_cache @@ -32,7 +33,7 @@ class WebAnalyticsQueryRunner(QueryRunner, ABC): query: WebQueryNode - query_type: Type[WebQueryNode] + query_type: type[WebQueryNode] @cached_property def query_date_range(self): @@ -51,7 +52,9 @@ def pathname_property_filter(self) -> Optional[EventPropertyFilter]: return None @cached_property - def property_filters_without_pathname(self) -> List[Union[EventPropertyFilter, PersonPropertyFilter]]: + def property_filters_without_pathname( + self, + ) -> list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]: return [p for p in self.query.properties if p.key != "$pathname"] def session_where(self, include_previous_period: Optional[bool] = None): @@ -244,8 +247,8 @@ def _unsample(self, n: Optional[int | float]): else n / self._sample_rate.numerator ) - def _cache_key(self) -> str: - original = super()._cache_key() + def get_cache_key(self) -> str: + original = super().get_cache_key() return f"{original}_{self.team.path_cleaning_filters}" diff --git a/posthog/jwt.py b/posthog/jwt.py index fa458ab2f5e3f..111a85d51df82 100644 --- a/posthog/jwt.py +++ b/posthog/jwt.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta, timezone from enum import Enum -from typing import Any, Dict +from typing import Any import jwt from django.conf import settings @@ -32,7 +32,7 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie return encoded_jwt -def decode_jwt(token: str, audience: PosthogJwtAudience) -> Dict[str, Any]: +def decode_jwt(token: str, audience: PosthogJwtAudience) -> dict[str, Any]: info = jwt.decode(token, settings.SECRET_KEY, audience=audience.value, algorithms=["HS256"]) return info diff --git a/posthog/kafka_client/client.py b/posthog/kafka_client/client.py index d29d9e9c0ae0d..3f58e572417b8 100644 --- a/posthog/kafka_client/client.py +++ b/posthog/kafka_client/client.py @@ -1,6 +1,7 @@ import json from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Optional +from collections.abc import Callable from django.conf import settings from kafka import KafkaConsumer as KC @@ -32,7 +33,7 @@ def send( topic: str, value: Any, key: Any = None, - headers: Optional[List[Tuple[str, bytes]]] = None, + headers: Optional[list[tuple[str, bytes]]] = None, ): produce_future = FutureProduceResult(topic_partition=TopicPartition(topic, 1)) future = FutureRecordMetadata( @@ -158,7 +159,7 @@ def produce( data: Any, key: Any = None, value_serializer: Optional[Callable[[Any], Any]] = None, - headers: Optional[List[Tuple[str, str]]] = None, + headers: Optional[list[tuple[str, str]]] = None, ): if not value_serializer: value_serializer = self.json_serializer @@ -258,7 +259,7 @@ class ClickhouseProducer: def __init__(self): self.producer = KafkaProducer() if not settings.TEST else None - def produce(self, sql: str, topic: str, data: Dict[str, Any], sync: bool = True): + def produce(self, sql: str, topic: str, data: dict[str, Any], sync: bool = True): if self.producer is not None: # TODO: this should be not sync and self.producer.produce(topic=topic, data=data) else: diff --git a/posthog/kafka_client/helper.py b/posthog/kafka_client/helper.py index 6084e991a100a..39cb9f038560f 100644 --- a/posthog/kafka_client/helper.py +++ b/posthog/kafka_client/helper.py @@ -39,9 +39,11 @@ def get_kafka_ssl_context(): # SSLContext inside the with so when it goes out of scope the files are removed which has them # existing for the shortest amount of time. As extra caution password # protect/encrypt the client key - with NamedTemporaryFile(suffix=".crt") as cert_file, NamedTemporaryFile( - suffix=".key" - ) as key_file, NamedTemporaryFile(suffix=".crt") as trust_file: + with ( + NamedTemporaryFile(suffix=".crt") as cert_file, + NamedTemporaryFile(suffix=".key") as key_file, + NamedTemporaryFile(suffix=".crt") as trust_file, + ): cert_file.write(base64.b64decode(os.environ["KAFKA_CLIENT_CERT_B64"].encode("utf-8"))) cert_file.flush() diff --git a/posthog/kafka_client/topics.py b/posthog/kafka_client/topics.py index 27e1ce307deae..4637a010df19a 100644 --- a/posthog/kafka_client/topics.py +++ b/posthog/kafka_client/topics.py @@ -20,6 +20,8 @@ KAFKA_PERSON_OVERRIDE = f"{KAFKA_PREFIX}clickhouse_person_override{SUFFIX}" KAFKA_LOG_ENTRIES = f"{KAFKA_PREFIX}log_entries{SUFFIX}" +KAFKA_CLICKHOUSE_HEATMAP_EVENTS = f"{KAFKA_PREFIX}clickhouse_heatmap_events{SUFFIX}" + # from capture to recordings consumer KAFKA_SESSION_RECORDING_EVENTS = f"{KAFKA_PREFIX}session_recording_events{SUFFIX}" # from capture to recordings blob ingestion consumer diff --git a/posthog/management/commands/backfill_distinct_id_overrides.py b/posthog/management/commands/backfill_distinct_id_overrides.py index 507e744a93d0e..4472ec6291658 100644 --- a/posthog/management/commands/backfill_distinct_id_overrides.py +++ b/posthog/management/commands/backfill_distinct_id_overrides.py @@ -2,7 +2,7 @@ import logging from dataclasses import dataclass -from typing import Sequence +from collections.abc import Sequence import structlog from django.core.management.base import BaseCommand, CommandError diff --git a/posthog/management/commands/create_channel_definitions_file.py b/posthog/management/commands/create_channel_definitions_file.py index 859bbe3c631ce..cab70bf31d360 100644 --- a/posthog/management/commands/create_channel_definitions_file.py +++ b/posthog/management/commands/create_channel_definitions_file.py @@ -4,7 +4,7 @@ from collections import OrderedDict from dataclasses import dataclass from enum import Enum -from typing import Optional, Tuple +from typing import Optional from django.core.management.base import BaseCommand @@ -40,7 +40,7 @@ def handle(self, *args, **options): input_arg = options.get("ga_sources") if not input_arg: raise ValueError("No input file specified") - with open(input_arg, "r", encoding="utf-8-sig") as input_file: + with open(input_arg, encoding="utf-8-sig") as input_file: input_str = input_file.read() split_items = re.findall(r"\S+\s+SOURCE_CATEGORY_\S+", input_str) @@ -59,7 +59,7 @@ def handle_entry(entry): base_type, type_if_paid, type_if_organic = types[raw_type] return (domain, EntryKind.source), SourceEntry(base_type, type_if_paid, type_if_organic) - entries: OrderedDict[Tuple[str, str], SourceEntry] = OrderedDict(map(handle_entry, split_items)) + entries: OrderedDict[tuple[str, str], SourceEntry] = OrderedDict(map(handle_entry, split_items)) # add google domains to this, from https://www.google.com/supported_domains for google_domain in [ diff --git a/posthog/management/commands/fix_person_distinct_ids_after_delete.py b/posthog/management/commands/fix_person_distinct_ids_after_delete.py index 842a4e5353ec8..4f0853dd001ba 100644 --- a/posthog/management/commands/fix_person_distinct_ids_after_delete.py +++ b/posthog/management/commands/fix_person_distinct_ids_after_delete.py @@ -1,5 +1,5 @@ import logging -from typing import List, Optional +from typing import Optional import structlog from django.core.management.base import BaseCommand @@ -50,7 +50,7 @@ def run(options, sync: bool = False): logger.info("Kafka producer queue flushed.") -def get_distinct_ids_tied_to_deleted_persons(team_id: int) -> List[str]: +def get_distinct_ids_tied_to_deleted_persons(team_id: int) -> list[str]: # find distinct_ids where the person is set to be deleted rows = sync_execute( """ diff --git a/posthog/management/commands/makemigrations.py b/posthog/management/commands/makemigrations.py index 8ff0a37bfaa34..a9e0ea4f98e5a 100644 --- a/posthog/management/commands/makemigrations.py +++ b/posthog/management/commands/makemigrations.py @@ -9,7 +9,7 @@ class Command(MakeMigrationsCommand): def handle(self, *app_labels, **options): # Generate a migrations manifest with latest migration on each app - super(Command, self).handle(*app_labels, **options) + super().handle(*app_labels, **options) loader = MigrationLoader(None, ignore_no_migrations=True) apps = sorted(loader.migrated_apps) diff --git a/posthog/management/commands/partition.py b/posthog/management/commands/partition.py index b17e958b0c1e1..4bb17e68b7851 100644 --- a/posthog/management/commands/partition.py +++ b/posthog/management/commands/partition.py @@ -6,7 +6,7 @@ def load_sql(filename): path = os.path.join(os.path.dirname(__file__), "../sql/", filename) - with open(path, "r", encoding="utf_8") as f: + with open(path, encoding="utf_8") as f: return f.read() diff --git a/posthog/management/commands/run_async_migrations.py b/posthog/management/commands/run_async_migrations.py index 611c6038fd43b..c8ee72ea352c6 100644 --- a/posthog/management/commands/run_async_migrations.py +++ b/posthog/management/commands/run_async_migrations.py @@ -1,5 +1,5 @@ import logging -from typing import List, Sequence +from collections.abc import Sequence import structlog from django.core.exceptions import ImproperlyConfigured @@ -31,7 +31,7 @@ def get_necessary_migrations() -> Sequence[AsyncMigration]: - necessary_migrations: List[AsyncMigration] = [] + necessary_migrations: list[AsyncMigration] = [] for migration_name, definition in sorted(ALL_ASYNC_MIGRATIONS.items()): if is_async_migration_complete(migration_name): continue @@ -144,10 +144,8 @@ def handle_plan(necessary_migrations: Sequence[AsyncMigration]): logger.info("Async migrations up to date!") else: logger.warning( - ( - f"Required async migration{' is' if len(necessary_migrations) == 1 else 's are'} not completed:\n" - "\n".join((f"- {migration.get_name_with_requirements()}" for migration in necessary_migrations)) - ) + f"Required async migration{' is' if len(necessary_migrations) == 1 else 's are'} not completed:\n" + "\n".join(f"- {migration.get_name_with_requirements()}" for migration in necessary_migrations) ) diff --git a/posthog/management/commands/sync_feature_flags.py b/posthog/management/commands/sync_feature_flags.py index df2e8d3257645..4e26061603691 100644 --- a/posthog/management/commands/sync_feature_flags.py +++ b/posthog/management/commands/sync_feature_flags.py @@ -1,4 +1,4 @@ -from typing import Dict, cast +from typing import cast from django.core.management.base import BaseCommand @@ -15,8 +15,8 @@ class Command(BaseCommand): help = "Add and enable all feature flags in frontend/src/lib/constants.tsx for all teams" def handle(self, *args, **options): - flags: Dict[str, str] = {} - with open("frontend/src/lib/constants.tsx", "r", encoding="utf_8") as f: + flags: dict[str, str] = {} + with open("frontend/src/lib/constants.tsx", encoding="utf_8") as f: lines = f.readlines() parsing_flags = False for line in lines: diff --git a/posthog/management/commands/sync_replicated_schema.py b/posthog/management/commands/sync_replicated_schema.py index e2c280bd41b39..642eae80d9bbf 100644 --- a/posthog/management/commands/sync_replicated_schema.py +++ b/posthog/management/commands/sync_replicated_schema.py @@ -1,7 +1,6 @@ import logging import re from collections import defaultdict -from typing import Dict, Set import structlog from django.conf import settings @@ -65,8 +64,8 @@ def analyze_cluster_tables(self): }, ) - host_tables: Dict[HostName, Set[TableName]] = defaultdict(set) - create_table_queries: Dict[TableName, Query] = {} + host_tables: dict[HostName, set[TableName]] = defaultdict(set) + create_table_queries: dict[TableName, Query] = {} for host, table_name, create_table_query in rows: host_tables[host].add(table_name) @@ -74,7 +73,7 @@ def analyze_cluster_tables(self): return host_tables, create_table_queries, self.get_out_of_sync_hosts(host_tables) - def get_out_of_sync_hosts(self, host_tables: Dict[HostName, Set[TableName]]) -> Dict[HostName, Set[TableName]]: + def get_out_of_sync_hosts(self, host_tables: dict[HostName, set[TableName]]) -> dict[HostName, set[TableName]]: table_names = list(map(get_table_name, CREATE_TABLE_QUERIES)) out_of_sync = {} @@ -87,8 +86,8 @@ def get_out_of_sync_hosts(self, host_tables: Dict[HostName, Set[TableName]]) -> def create_missing_tables( self, - out_of_sync_hosts: Dict[HostName, Set[TableName]], - create_table_queries: Dict[TableName, Query], + out_of_sync_hosts: dict[HostName, set[TableName]], + create_table_queries: dict[TableName, Query], ): missing_tables = {table for tables in out_of_sync_hosts.values() for table in tables} diff --git a/posthog/management/commands/test_migrations_are_safe.py b/posthog/management/commands/test_migrations_are_safe.py index 566533fd9fe69..a576b982b5089 100644 --- a/posthog/management/commands/test_migrations_are_safe.py +++ b/posthog/management/commands/test_migrations_are_safe.py @@ -1,6 +1,6 @@ import re import sys -from typing import List, Optional +from typing import Optional from django.core.management import call_command from django.core.management.base import BaseCommand, CommandError @@ -20,7 +20,7 @@ def _get_table(search_string: str, operation_sql: str) -> Optional[str]: def validate_migration_sql(sql) -> bool: new_tables = _get_new_tables(sql) operations = sql.split("\n") - tables_created_so_far: List[str] = [] + tables_created_so_far: list[str] = [] for operation_sql in operations: # Extract table name from queries of this format: ALTER TABLE TABLE "posthog_feature" table_being_altered: Optional[str] = ( diff --git a/posthog/middleware.py b/posthog/middleware.py index e43ef3a620f18..9b2bcb7dfd582 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -1,6 +1,7 @@ import time from ipaddress import ip_address, ip_network -from typing import Any, Callable, List, Optional, cast +from typing import Any, Optional, cast +from collections.abc import Callable from django.shortcuts import redirect import structlog @@ -66,7 +67,7 @@ class AllowIPMiddleware: - trusted_proxies: List[str] = [] + trusted_proxies: list[str] = [] def __init__(self, get_response): if not settings.ALLOWED_IP_BLOCKS: @@ -160,6 +161,13 @@ def __call__(self, request: HttpRequest): user = cast(User, request.user) if len(path_parts) >= 2 and path_parts[0] == "project" and path_parts[1].startswith("phc_"): + + def do_redirect(): + new_path = "/".join(path_parts) + search_params = request.GET.urlencode() + + return redirect(f"/{new_path}?{search_params}" if search_params else f"/{new_path}") + try: new_team = Team.objects.get(api_token=path_parts[1]) @@ -167,12 +175,12 @@ def __call__(self, request: HttpRequest): raise Team.DoesNotExist path_parts[1] = str(new_team.pk) - return redirect("/" + "/".join(path_parts)) + return do_redirect() except Team.DoesNotExist: if user.team: path_parts[1] = str(user.team.pk) - return redirect("/" + "/".join(path_parts)) + return do_redirect() if len(path_parts) >= 2 and path_parts[0] == "project" and path_parts[1].isdigit(): project_id_in_url = int(path_parts[1]) @@ -411,7 +419,7 @@ class CaptureMiddleware: def __init__(self, get_response): self.get_response = get_response - middlewares: List[Any] = [] + middlewares: list[Any] = [] # based on how we're using these middlewares, only middlewares that # have a process_request and process_response attribute can be valid here. # Or, middlewares that inherit from `middleware.util.deprecation.MiddlewareMixin` which diff --git a/posthog/migrations/0027_move_elements_to_group.py b/posthog/migrations/0027_move_elements_to_group.py index 51a65b1f5da39..1bc55cd985388 100644 --- a/posthog/migrations/0027_move_elements_to_group.py +++ b/posthog/migrations/0027_move_elements_to_group.py @@ -1,7 +1,6 @@ # Generated by Django 3.0.3 on 2020-02-27 18:13 import hashlib import json -from typing import List from django.db import migrations, models, transaction from django.forms.models import model_to_dict @@ -21,7 +20,7 @@ def forwards(apps, schema_editor): ElementGroup = apps.get_model("posthog", "ElementGroup") Element = apps.get_model("posthog", "Element") - hashes_seen: List[str] = [] + hashes_seen: list[str] = [] while Event.objects.filter(element__isnull=False, elements_hash__isnull=True, event="$autocapture").exists(): with transaction.atomic(): events = ( diff --git a/posthog/migrations/0132_team_test_account_filters.py b/posthog/migrations/0132_team_test_account_filters.py index 313de9f3355e4..a1aba896aa287 100644 --- a/posthog/migrations/0132_team_test_account_filters.py +++ b/posthog/migrations/0132_team_test_account_filters.py @@ -22,7 +22,7 @@ class GenericEmails: """ def __init__(self): - with open(get_absolute_path("../helpers/generic_emails.txt"), "r") as f: + with open(get_absolute_path("../helpers/generic_emails.txt")) as f: self.emails = {x.rstrip(): True for x in f} def is_generic(self, email: str) -> bool: diff --git a/posthog/migrations/0219_migrate_tags_v2.py b/posthog/migrations/0219_migrate_tags_v2.py index fef394a5cc0ea..dcd7375511e4f 100644 --- a/posthog/migrations/0219_migrate_tags_v2.py +++ b/posthog/migrations/0219_migrate_tags_v2.py @@ -1,5 +1,5 @@ # Generated by Django 3.2.5 on 2022-03-01 23:41 -from typing import Any, List, Tuple +from typing import Any from django.core.paginator import Paginator from django.db import migrations @@ -19,7 +19,7 @@ def forwards(apps, schema_editor): Insight = apps.get_model("posthog", "Insight") Dashboard = apps.get_model("posthog", "Dashboard") - createables: List[Tuple[Any, Any]] = [] + createables: list[tuple[Any, Any]] = [] batch_size = 1_000 # Collect insight tags and taggeditems diff --git a/posthog/migrations/0259_backfill_team_recording_domains.py b/posthog/migrations/0259_backfill_team_recording_domains.py index 1f0dcba4f08f8..12304cc70fd83 100644 --- a/posthog/migrations/0259_backfill_team_recording_domains.py +++ b/posthog/migrations/0259_backfill_team_recording_domains.py @@ -1,4 +1,3 @@ -from typing import Set from urllib.parse import urlparse import structlog @@ -20,7 +19,7 @@ def backfill_recording_domains(apps, _): teams_in_batch = all_teams[i : i + batch_size] for team in teams_in_batch: - recording_domains: Set[str] = set() + recording_domains: set[str] = set() for app_url in team.app_urls: # Extract just the domain from the URL parsed_url = urlparse(app_url) diff --git a/posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py b/posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py new file mode 100644 index 0000000000000..ac34ed62af7b9 --- /dev/null +++ b/posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py @@ -0,0 +1,47 @@ +# Generated by Django 4.2.11 on 2024-04-21 21:11 +from django.contrib.postgres.operations import AddConstraintNotValid +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0403_plugin_has_private_access"), + ] + + operations = [ + migrations.RemoveConstraint( + model_name="propertydefinition", + name="property_type_is_valid", + ), + migrations.AlterField( + model_name="propertydefinition", + name="property_type", + field=models.CharField( + blank=True, + choices=[ + ("DateTime", "DateTime"), + ("String", "String"), + ("Numeric", "Numeric"), + ("Boolean", "Boolean"), + ("Duration", "Duration"), + ], + max_length=50, + null=True, + ), + ), + migrations.AlterField( + model_name="propertydefinition", + name="type", + field=models.PositiveSmallIntegerField( + choices=[(1, "event"), (2, "person"), (3, "group"), (4, "session")], default=1 + ), + ), + # changed from migrations.AddConstraint. See migration 0405 for where we validate the constraint + AddConstraintNotValid( + model_name="propertydefinition", + constraint=models.CheckConstraint( + check=models.Q(("property_type__in", ["DateTime", "String", "Numeric", "Boolean", "Duration"])), + name="property_type_is_valid", + ), + ), + ] diff --git a/posthog/migrations/0405_team_heatmaps_opt_in.py b/posthog/migrations/0405_team_heatmaps_opt_in.py new file mode 100644 index 0000000000000..6c3d3e6e6f533 --- /dev/null +++ b/posthog/migrations/0405_team_heatmaps_opt_in.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.11 on 2024-04-24 12:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0404_remove_propertydefinition_property_type_is_valid_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="team", + name="heatmaps_opt_in", + field=models.BooleanField(blank=True, null=True), + ), + ] diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py index bd016535a88e0..49aefe15440f8 100644 --- a/posthog/models/action/action.py +++ b/posthog/models/action/action.py @@ -1,5 +1,5 @@ import json -from typing import List, Any +from typing import Any from django.db import models from django.db.models import Q @@ -51,10 +51,10 @@ def get_analytics_metadata(self): "deleted": self.deleted, } - def get_step_events(self) -> List[str]: + def get_step_events(self) -> list[str]: return [action_step.event for action_step in self.steps.all()] - def generate_bytecode(self) -> List[Any]: + def generate_bytecode(self) -> list[Any]: from posthog.hogql.property import action_to_expr from posthog.hogql.bytecode import create_bytecode diff --git a/posthog/models/action/util.py b/posthog/models/action/util.py index 54fda6ef5b95f..95cdca9721ceb 100644 --- a/posthog/models/action/util.py +++ b/posthog/models/action/util.py @@ -1,6 +1,6 @@ from collections import Counter -from typing import Counter as TCounter, Literal, Optional -from typing import Dict, List, Tuple +from typing import Literal, Optional +from collections import Counter as TCounter from posthog.constants import AUTOCAPTURE_EVENT, TREND_FILTER_TYPE_ACTIONS from posthog.hogql.hogql import HogQLContext @@ -15,7 +15,7 @@ def format_action_filter_event_only( action: Action, prepend: str = "action", -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: """Return SQL for prefiltering events by action, i.e. down to only the events and without any other filters.""" events = action.get_step_events() if not events: @@ -37,7 +37,7 @@ def format_action_filter( table_name: str = "", person_properties_mode: PersonPropertiesMode = PersonPropertiesMode.USING_SUBQUERY, person_id_joined_alias: str = "person_id", -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: """Return SQL for filtering events by action.""" # get action steps params = {"team_id": action.team.pk} if filter_by_team else {} @@ -48,7 +48,7 @@ def format_action_filter( or_queries = [] for index, step in enumerate(steps): - conditions: List[str] = [] + conditions: list[str] = [] # filter element if step.event == AUTOCAPTURE_EVENT: from posthog.models.property.util import ( @@ -118,7 +118,7 @@ def format_action_filter( def filter_event( step: ActionStep, prepend: str = "event", index: int = 0, table_name: str = "" -) -> Tuple[List[str], Dict]: +) -> tuple[list[str], dict]: from posthog.models.property.util import get_property_string_expr params = {} @@ -156,7 +156,7 @@ def format_entity_filter( person_id_joined_alias: str, prepend: str = "action", filter_by_team=True, -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: if entity.type == TREND_FILTER_TYPE_ACTIONS: action = entity.get_action() entity_filter, params = format_action_filter( diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index 074b53b2dd55b..141130ea4f80e 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -2,7 +2,7 @@ import json from datetime import datetime from decimal import Decimal -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Literal, Optional, Union import structlog from django.core.paginator import Paginator @@ -52,7 +52,7 @@ class Change: class Trigger: job_type: str job_id: str - payload: Dict + payload: dict @dataclasses.dataclass(frozen=True) @@ -62,13 +62,13 @@ class Detail: # The short_id if it has one short_id: Optional[str] = None type: Optional[str] = None - changes: Optional[List[Change]] = None + changes: Optional[list[Change]] = None trigger: Optional[Trigger] = None class ActivityDetailEncoder(json.JSONEncoder): def default(self, obj): - if isinstance(obj, (Detail, Change, Trigger)): + if isinstance(obj, Detail | Change | Trigger): return obj.__dict__ if isinstance(obj, datetime): return obj.isoformat() @@ -132,7 +132,7 @@ class Meta: ] -field_exclusions: Dict[ActivityScope, List[str]] = { +field_exclusions: dict[ActivityScope, list[str]] = { "Notebook": [ "text_content", ], @@ -199,7 +199,7 @@ class Meta: } -def describe_change(m: Any) -> Union[str, Dict]: +def describe_change(m: Any) -> Union[str, dict]: if isinstance(m, Dashboard): return {"id": m.id, "name": m.name} if isinstance(m, DashboardTile): @@ -213,7 +213,7 @@ def describe_change(m: Any) -> Union[str, Dict]: return str(m) -def _read_through_relation(relation: models.Manager) -> List[Union[Dict, str]]: +def _read_through_relation(relation: models.Manager) -> list[Union[dict, str]]: described_models = [describe_change(r) for r in relation.all()] if all(isinstance(elem, str) for elem in described_models): @@ -227,11 +227,11 @@ def changes_between( model_type: ActivityScope, previous: Optional[models.Model], current: Optional[models.Model], -) -> List[Change]: +) -> list[Change]: """ Identifies changes between two models by comparing fields """ - changes: List[Change] = [] + changes: list[Change] = [] if previous is None and current is None: # there are no changes between two things that don't exist @@ -282,14 +282,14 @@ def changes_between( def dict_changes_between( model_type: ActivityScope, - previous: Dict[Any, Any], - new: Dict[Any, Any], + previous: dict[Any, Any], + new: dict[Any, Any], use_field_exclusions: bool = False, -) -> List[Change]: +) -> list[Change]: """ Identifies changes between two dictionaries by comparing fields """ - changes: List[Change] = [] + changes: list[Change] = [] if previous == new: return changes @@ -395,7 +395,7 @@ class ActivityPage: limit: int has_next: bool has_previous: bool - results: List[ActivityLog] + results: list[ActivityLog] def get_activity_page(activity_query: models.QuerySet, limit: int = 10, page: int = 1) -> ActivityPage: @@ -430,7 +430,7 @@ def load_activity( return get_activity_page(activity_query, limit, page) -def load_all_activity(scope_list: List[ActivityScope], team_id: int, limit: int = 10, page: int = 1): +def load_all_activity(scope_list: list[ActivityScope], team_id: int, limit: int = 10, page: int = 1): activity_query = ( ActivityLog.objects.select_related("user").filter(team_id=team_id, scope__in=scope_list).order_by("-created_at") ) diff --git a/posthog/models/async_deletion/delete.py b/posthog/models/async_deletion/delete.py index 9846842b8e0d5..1ab75b353e898 100644 --- a/posthog/models/async_deletion/delete.py +++ b/posthog/models/async_deletion/delete.py @@ -1,6 +1,5 @@ from abc import ABC, abstractmethod from collections import defaultdict -from typing import Dict, List, Tuple import structlog from django.utils import timezone @@ -13,7 +12,7 @@ class AsyncDeletionProcess(ABC): CLICKHOUSE_MUTATION_CHUNK_SIZE = 1_000_000 CLICKHOUSE_VERIFY_CHUNK_SIZE = 1_000 - DELETION_TYPES: List[DeletionType] = [] + DELETION_TYPES: list[DeletionType] = [] def __init__(self) -> None: super().__init__() @@ -60,14 +59,14 @@ def _fetch_unverified_deletions_grouped(self): return result @abstractmethod - def process(self, deletions: List[AsyncDeletion]): + def process(self, deletions: list[AsyncDeletion]): raise NotImplementedError() @abstractmethod - def _verify_by_group(self, deletion_type: int, async_deletions: List[AsyncDeletion]) -> List[AsyncDeletion]: + def _verify_by_group(self, deletion_type: int, async_deletions: list[AsyncDeletion]) -> list[AsyncDeletion]: raise NotImplementedError() - def _conditions(self, async_deletions: List[AsyncDeletion]) -> Tuple[List[str], Dict]: + def _conditions(self, async_deletions: list[AsyncDeletion]) -> tuple[list[str], dict]: conditions, args = [], {} for i, row in enumerate(async_deletions): condition, arg = self._condition(row, str(i)) @@ -76,5 +75,5 @@ def _conditions(self, async_deletions: List[AsyncDeletion]) -> Tuple[List[str], return conditions, args @abstractmethod - def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, Dict]: + def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> tuple[str, dict]: raise NotImplementedError() diff --git a/posthog/models/async_deletion/delete_cohorts.py b/posthog/models/async_deletion/delete_cohorts.py index c2d452628ceb2..00f10aac6b82e 100644 --- a/posthog/models/async_deletion/delete_cohorts.py +++ b/posthog/models/async_deletion/delete_cohorts.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Set, Tuple +from typing import Any from posthog.client import sync_execute from posthog.models.async_deletion import AsyncDeletion, DeletionType @@ -9,7 +9,7 @@ class AsyncCohortDeletion(AsyncDeletionProcess): DELETION_TYPES = [DeletionType.Cohort_full, DeletionType.Cohort_stale] - def process(self, deletions: List[AsyncDeletion]): + def process(self, deletions: list[AsyncDeletion]): if len(deletions) == 0: logger.warn("No AsyncDeletion for cohorts to perform") return @@ -33,7 +33,7 @@ def process(self, deletions: List[AsyncDeletion]): workload=Workload.OFFLINE, ) - def _verify_by_group(self, deletion_type: int, async_deletions: List[AsyncDeletion]) -> List[AsyncDeletion]: + def _verify_by_group(self, deletion_type: int, async_deletions: list[AsyncDeletion]) -> list[AsyncDeletion]: if deletion_type == DeletionType.Cohort_stale or deletion_type == DeletionType.Cohort_full: cohort_ids_with_data = self._verify_by_column("team_id, cohort_id", async_deletions) return [ @@ -42,7 +42,7 @@ def _verify_by_group(self, deletion_type: int, async_deletions: List[AsyncDeleti else: return [] - def _verify_by_column(self, distinct_columns: str, async_deletions: List[AsyncDeletion]) -> Set[Tuple[Any, ...]]: + def _verify_by_column(self, distinct_columns: str, async_deletions: list[AsyncDeletion]) -> set[tuple[Any, ...]]: conditions, args = self._conditions(async_deletions) clickhouse_result = sync_execute( f""" @@ -62,7 +62,7 @@ def _column_name(self, async_deletion: AsyncDeletion): ) return "cohort_id" - def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, Dict]: + def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> tuple[str, dict]: team_id_param = f"team_id{suffix}" key_param = f"key{suffix}" version_param = f"version{suffix}" diff --git a/posthog/models/async_deletion/delete_events.py b/posthog/models/async_deletion/delete_events.py index 2486043a5b871..988161336cc56 100644 --- a/posthog/models/async_deletion/delete_events.py +++ b/posthog/models/async_deletion/delete_events.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Set, Tuple +from typing import Any from posthog.client import sync_execute from posthog.models.async_deletion import AsyncDeletion, DeletionType, CLICKHOUSE_ASYNC_DELETION_TABLE @@ -22,7 +22,7 @@ class AsyncEventDeletion(AsyncDeletionProcess): DELETION_TYPES = [DeletionType.Team, DeletionType.Person] - def process(self, deletions: List[AsyncDeletion]): + def process(self, deletions: list[AsyncDeletion]): if len(deletions) == 0: logger.debug("No AsyncDeletion to perform") return @@ -87,7 +87,7 @@ def process(self, deletions: List[AsyncDeletion]): workload=Workload.OFFLINE, ) - def _fill_table(self, deletions: List[AsyncDeletion], temp_table_name: str): + def _fill_table(self, deletions: list[AsyncDeletion], temp_table_name: str): sync_execute(f"DROP TABLE IF EXISTS {temp_table_name}", workload=Workload.OFFLINE) sync_execute( CLICKHOUSE_ASYNC_DELETION_TABLE.format(table_name=temp_table_name, cluster=CLICKHOUSE_CLUSTER), @@ -111,7 +111,7 @@ def _fill_table(self, deletions: List[AsyncDeletion], temp_table_name: str): workload=Workload.OFFLINE, ) - def _verify_by_group(self, deletion_type: int, async_deletions: List[AsyncDeletion]) -> List[AsyncDeletion]: + def _verify_by_group(self, deletion_type: int, async_deletions: list[AsyncDeletion]) -> list[AsyncDeletion]: if deletion_type == DeletionType.Team: team_ids_with_data = self._verify_by_column("team_id", async_deletions) return [row for row in async_deletions if (row.team_id,) not in team_ids_with_data] @@ -122,7 +122,7 @@ def _verify_by_group(self, deletion_type: int, async_deletions: List[AsyncDeleti else: return [] - def _verify_by_column(self, distinct_columns: str, async_deletions: List[AsyncDeletion]) -> Set[Tuple[Any, ...]]: + def _verify_by_column(self, distinct_columns: str, async_deletions: list[AsyncDeletion]) -> set[tuple[Any, ...]]: conditions, args = self._conditions(async_deletions) clickhouse_result = sync_execute( f""" @@ -142,7 +142,7 @@ def _column_name(self, async_deletion: AsyncDeletion): else: return f"$group_{async_deletion.group_type_index}" - def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, Dict]: + def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> tuple[str, dict]: if async_deletion.deletion_type == DeletionType.Team: return f"team_id = %(team_id{suffix})s", {f"team_id{suffix}": async_deletion.team_id} else: diff --git a/posthog/models/async_migration.py b/posthog/models/async_migration.py index 885f7ce397931..92d61fb5e3f33 100644 --- a/posthog/models/async_migration.py +++ b/posthog/models/async_migration.py @@ -1,5 +1,3 @@ -from typing import List - from django.db import models @@ -63,7 +61,7 @@ def get_all_running_async_migrations(): return AsyncMigration.objects.filter(status=MigrationStatus.Running) -def get_async_migrations_by_status(target_statuses: List[int]): +def get_async_migrations_by_status(target_statuses: list[int]): return AsyncMigration.objects.filter(status__in=target_statuses) diff --git a/posthog/models/channel_type/sql.py b/posthog/models/channel_type/sql.py index 15470601c2dfb..d631c276e55dd 100644 --- a/posthog/models/channel_type/sql.py +++ b/posthog/models/channel_type/sql.py @@ -37,7 +37,7 @@ f"TRUNCATE TABLE IF EXISTS {CHANNEL_DEFINITION_TABLE_NAME} ON CLUSTER '{CLICKHOUSE_CLUSTER}'" ) -with open(os.path.join(os.path.dirname(__file__), "channel_definitions.json"), "r") as f: +with open(os.path.join(os.path.dirname(__file__), "channel_definitions.json")) as f: CHANNEL_DEFINITIONS = json.loads(f.read()) @@ -54,7 +54,7 @@ def format_value(value): INSERT INTO channel_definition (domain, kind, domain_type, type_if_paid, type_if_organic) VALUES { ''', -'''.join((f'({" ,".join(map(format_value, x))})' for x in CHANNEL_DEFINITIONS))}, +'''.join(f'({" ,".join(map(format_value, x))})' for x in CHANNEL_DEFINITIONS)}, ; """ diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py index a10be159d5702..8f7867127a1fa 100644 --- a/posthog/models/cohort/cohort.py +++ b/posthog/models/cohort/cohort.py @@ -1,6 +1,6 @@ import time from datetime import datetime -from typing import Any, Dict, List, Literal, Optional, Union, cast +from typing import Any, Literal, Optional, Union, cast import structlog from django.conf import settings @@ -37,7 +37,7 @@ class Group: def __init__( self, - properties: Optional[Dict[str, Any]] = None, + properties: Optional[dict[str, Any]] = None, action_id: Optional[int] = None, event_id: Optional[str] = None, days: Optional[int] = None, @@ -59,7 +59,7 @@ def __init__( self.start_date = start_date self.end_date = end_date - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: dup = self.__dict__.copy() dup["start_date"] = self.start_date.isoformat() if self.start_date else self.start_date dup["end_date"] = self.end_date.isoformat() if self.end_date else self.end_date @@ -159,11 +159,11 @@ def properties(self) -> PropertyGroup: ) else: # invalid state - return PropertyGroup(PropertyOperatorType.AND, cast(List[Property], [])) + return PropertyGroup(PropertyOperatorType.AND, cast(list[Property], [])) return PropertyGroup(PropertyOperatorType.OR, property_groups) - return PropertyGroup(PropertyOperatorType.AND, cast(List[Property], [])) + return PropertyGroup(PropertyOperatorType.AND, cast(list[Property], [])) @property def has_complex_behavioral_filter(self) -> bool: @@ -241,7 +241,7 @@ def calculate_people_ch(self, pending_version: int, *, initiating_user_id: Optio clear_stale_cohort.delay(self.pk, before_version=pending_version) - def insert_users_by_list(self, items: List[str]) -> None: + def insert_users_by_list(self, items: list[str]) -> None: """ Items is a list of distinct_ids """ @@ -303,7 +303,7 @@ def insert_users_by_list(self, items: List[str]) -> None: self.save() capture_exception(err) - def insert_users_list_by_uuid(self, items: List[str], insert_in_clickhouse: bool = False, batchsize=1000) -> None: + def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool = False, batchsize=1000) -> None: from posthog.models.cohort.util import get_static_cohort_size, insert_static_cohort try: diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index 059bfb3813b8d..2a22c58ba24cd 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional, Set, Tuple, Union, cast +from typing import Any, Optional, Union, cast import structlog from dateutil import parser @@ -44,7 +44,7 @@ logger = structlog.get_logger(__name__) -def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) -> Tuple[str, Dict[str, Any]]: +def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) -> tuple[str, dict[str, Any]]: if cohort.is_static: return format_static_cohort_query(cohort, index, prepend="") @@ -72,7 +72,7 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str: from posthog.hogql_queries.query_runner import get_query_runner - persons_query = cast(Dict, cohort.query) + persons_query = cast(dict, cohort.query) persons_query["select"] = ["id as actor_id"] query = get_query_runner( persons_query, team=cast(Team, cohort.team), limit_context=LimitContext.COHORT_CALCULATION @@ -81,7 +81,7 @@ def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str return print_ast(query, context=hogql_context, dialect="clickhouse") -def format_static_cohort_query(cohort: Cohort, index: int, prepend: str) -> Tuple[str, Dict[str, Any]]: +def format_static_cohort_query(cohort: Cohort, index: int, prepend: str) -> tuple[str, dict[str, Any]]: cohort_id = cohort.pk return ( f"SELECT person_id as id FROM {PERSON_STATIC_COHORT_TABLE} WHERE cohort_id = %({prepend}_cohort_id_{index})s AND team_id = %(team_id)s", @@ -89,7 +89,7 @@ def format_static_cohort_query(cohort: Cohort, index: int, prepend: str) -> Tupl ) -def format_precalculated_cohort_query(cohort: Cohort, index: int, prepend: str = "") -> Tuple[str, Dict[str, Any]]: +def format_precalculated_cohort_query(cohort: Cohort, index: int, prepend: str = "") -> tuple[str, dict[str, Any]]: filter_query = GET_PERSON_ID_BY_PRECALCULATED_COHORT_ID.format(index=index, prepend=prepend) return ( filter_query, @@ -121,7 +121,7 @@ def get_entity_query( team_id: int, group_idx: Union[int, str], hogql_context: HogQLContext, -) -> Tuple[str, Dict[str, str]]: +) -> tuple[str, dict[str, str]]: if event_id: return f"event = %({f'event_{group_idx}'})s", {f"event_{group_idx}": event_id} elif action_id: @@ -139,9 +139,9 @@ def get_entity_query( def get_date_query( days: Optional[str], start_time: Optional[str], end_time: Optional[str] -) -> Tuple[str, Dict[str, str]]: +) -> tuple[str, dict[str, str]]: date_query: str = "" - date_params: Dict[str, str] = {} + date_params: dict[str, str] = {} if days: date_query, date_params = parse_entity_timestamps_in_days(int(days)) elif start_time or end_time: @@ -150,7 +150,7 @@ def get_date_query( return date_query, date_params -def parse_entity_timestamps_in_days(days: int) -> Tuple[str, Dict[str, str]]: +def parse_entity_timestamps_in_days(days: int) -> tuple[str, dict[str, str]]: curr_time = timezone.now() start_time = curr_time - timedelta(days=days) @@ -163,9 +163,9 @@ def parse_entity_timestamps_in_days(days: int) -> Tuple[str, Dict[str, str]]: ) -def parse_cohort_timestamps(start_time: Optional[str], end_time: Optional[str]) -> Tuple[str, Dict[str, str]]: +def parse_cohort_timestamps(start_time: Optional[str], end_time: Optional[str]) -> tuple[str, dict[str, str]]: clause = "AND " - params: Dict[str, str] = {} + params: dict[str, str] = {} if start_time: clause += "timestamp >= %(date_from)s" @@ -199,7 +199,7 @@ def format_filter_query( hogql_context: HogQLContext, id_column: str = "distinct_id", custom_match_field="person_id", -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: person_query, params = format_cohort_subquery(cohort, index, hogql_context, custom_match_field=custom_match_field) person_id_query = CALCULATE_COHORT_PEOPLE_SQL.format( @@ -215,7 +215,7 @@ def format_cohort_subquery( index: int, hogql_context: HogQLContext, custom_match_field="person_id", -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: is_precalculated = is_precalculated_query(cohort) if is_precalculated: query, params = format_precalculated_cohort_query(cohort, index) @@ -259,8 +259,8 @@ def get_person_ids_by_cohort_id( return [str(row[0]) for row in results] -def insert_static_cohort(person_uuids: List[Optional[uuid.UUID]], cohort_id: int, team: Team): - persons = ( +def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int, team: Team): + persons = [ { "id": str(uuid.uuid4()), "person_id": str(person_uuid), @@ -269,7 +269,7 @@ def insert_static_cohort(person_uuids: List[Optional[uuid.UUID]], cohort_id: int "_timestamp": datetime.now(), } for person_uuid in person_uuids - ) + ] sync_execute(INSERT_PERSON_STATIC_COHORT, persons) @@ -442,17 +442,17 @@ def simplified_cohort_filter_properties(cohort: Cohort, team: Team, is_negated=F return cohort.properties -def _get_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: +def _get_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> list[int]: res = sync_execute(GET_COHORTS_BY_PERSON_UUID, {"person_id": uuid, "team_id": team_id}) return [row[0] for row in res] -def _get_static_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: +def _get_static_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> list[int]: res = sync_execute(GET_STATIC_COHORTPEOPLE_BY_PERSON_UUID, {"person_id": uuid, "team_id": team_id}) return [row[0] for row in res] -def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: +def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> list[int]: cohort_ids = _get_cohort_ids_by_person_uuid(uuid, team_id) static_cohort_ids = _get_static_cohort_ids_by_person_uuid(uuid, team_id) return [*cohort_ids, *static_cohort_ids] @@ -461,8 +461,8 @@ def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: def get_dependent_cohorts( cohort: Cohort, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, -) -> List[Cohort]: + seen_cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, +) -> list[Cohort]: if seen_cohorts_cache is None: seen_cohorts_cache = {} @@ -508,7 +508,7 @@ def get_dependent_cohorts( return cohorts -def sort_cohorts_topologically(cohort_ids: Set[int], seen_cohorts_cache: Dict[int, CohortOrEmpty]) -> List[int]: +def sort_cohorts_topologically(cohort_ids: set[int], seen_cohorts_cache: dict[int, CohortOrEmpty]) -> list[int]: """ Sorts the given cohorts in an order where cohorts with no dependencies are placed first, followed by cohorts that depend on the preceding ones. It ensures that each cohort in the sorted list @@ -518,7 +518,7 @@ def sort_cohorts_topologically(cohort_ids: Set[int], seen_cohorts_cache: Dict[in if not cohort_ids: return [] - dependency_graph: Dict[int, List[int]] = {} + dependency_graph: dict[int, list[int]] = {} seen = set() # build graph (adjacency list) @@ -553,7 +553,7 @@ def dfs(node, seen, sorted_arr): sorted_arr.append(int(node)) seen.add(node) - sorted_cohort_ids: List[int] = [] + sorted_cohort_ids: list[int] = [] seen = set() for cohort_id in cohort_ids: if cohort_id not in seen: diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py index 86af344be038e..003201722a5c4 100644 --- a/posthog/models/dashboard.py +++ b/posthog/models/dashboard.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from django.contrib.postgres.fields import ArrayField from django.db import models @@ -81,7 +81,7 @@ class PrivilegeLevel(models.IntegerChoices): __repr__ = sane_repr("team_id", "id", "name") def __str__(self): - return self.name or self.id + return self.name or str(self.id) @property def is_sharing_enabled(self): @@ -93,7 +93,7 @@ def is_sharing_enabled(self): def url(self): return absolute_uri(f"/dashboard/{self.id}") - def get_analytics_metadata(self) -> Dict[str, Any]: + def get_analytics_metadata(self) -> dict[str, Any]: """ Returns serialized information about the object for analytics reporting. """ diff --git a/posthog/models/dashboard_tile.py b/posthog/models/dashboard_tile.py index 50af2868abf5b..9d39028e49bf0 100644 --- a/posthog/models/dashboard_tile.py +++ b/posthog/models/dashboard_tile.py @@ -1,5 +1,3 @@ -from typing import List - from django.core.exceptions import ValidationError from django.db import models from django.db.models import Q, QuerySet, UniqueConstraint @@ -112,7 +110,7 @@ def save(self, *args, **kwargs) -> None: if "update_fields" in kwargs: kwargs["update_fields"].append("filters_hash") - super(DashboardTile, self).save(*args, **kwargs) + super().save(*args, **kwargs) def copy_to_dashboard(self, dashboard: Dashboard) -> None: DashboardTile.objects.create( @@ -139,7 +137,7 @@ def dashboard_queryset(queryset: QuerySet) -> QuerySet: ) -def get_tiles_ordered_by_position(dashboard: Dashboard, size: str = "xs") -> List[DashboardTile]: +def get_tiles_ordered_by_position(dashboard: Dashboard, size: str = "xs") -> list[DashboardTile]: tiles = list( dashboard.tiles.select_related("insight", "text") .exclude(insight__deleted=True) diff --git a/posthog/models/element/element.py b/posthog/models/element/element.py index c1091932cd4c8..4beeb5400851b 100644 --- a/posthog/models/element/element.py +++ b/posthog/models/element/element.py @@ -1,5 +1,4 @@ import re -from typing import List from django.contrib.postgres.fields import ArrayField from django.db import models @@ -34,7 +33,7 @@ def _escape(input: str) -> str: return input.replace('"', r"\"") -def elements_to_string(elements: List[Element]) -> str: +def elements_to_string(elements: list[Element]) -> str: ret = [] for element in elements: el_string = "" @@ -58,7 +57,7 @@ def elements_to_string(elements: List[Element]) -> str: return ";".join(ret) -def chain_to_elements(chain: str) -> List[Element]: +def chain_to_elements(chain: str) -> list[Element]: elements = [] for idx, el_string in enumerate(re.findall(split_chain_regex, chain)): el_string_split = re.findall(split_class_attributes, el_string)[0] diff --git a/posthog/models/element_group.py b/posthog/models/element_group.py index 3d399f2559844..0a6a2545da0e5 100644 --- a/posthog/models/element_group.py +++ b/posthog/models/element_group.py @@ -1,6 +1,6 @@ import hashlib import json -from typing import Any, Dict, List +from typing import Any from django.db import models, transaction from django.forms.models import model_to_dict @@ -9,8 +9,8 @@ from posthog.models.team import Team -def hash_elements(elements: List) -> str: - elements_list: List[Dict] = [] +def hash_elements(elements: list) -> str: + elements_list: list[dict] = [] for element in elements: el_dict = model_to_dict(element) [el_dict.pop(key) for key in ["event", "id", "group"]] diff --git a/posthog/models/entity/entity.py b/posthog/models/entity/entity.py index 91865f9fa50f9..255edb0db4f3a 100644 --- a/posthog/models/entity/entity.py +++ b/posthog/models/entity/entity.py @@ -1,6 +1,6 @@ import inspect from collections import Counter -from typing import Any, Dict, Literal, Optional +from typing import Any, Literal, Optional from django.conf import settings from rest_framework.exceptions import ValidationError @@ -67,7 +67,7 @@ class Entity(PropertyMixin): id_field: Optional[str] timestamp_field: Optional[str] - def __init__(self, data: Dict[str, Any]) -> None: + def __init__(self, data: dict[str, Any]) -> None: self.id = data.get("id") if data.get("type") not in [ TREND_FILTER_TYPE_ACTIONS, @@ -102,7 +102,7 @@ def __init__(self, data: Dict[str, Any]) -> None: if self.type == TREND_FILTER_TYPE_EVENTS and not self.name: self.name = "All events" if self.id is None else str(self.id) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: return { "id": self.id, "type": self.type, @@ -180,10 +180,10 @@ class ExclusionEntity(Entity, FunnelFromToStepsMixin): with extra parameters for exclusion semantics. """ - def __init__(self, data: Dict[str, Any]) -> None: + def __init__(self, data: dict[str, Any]) -> None: super().__init__(data) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: ret = super().to_dict() for _, func in inspect.getmembers(self, inspect.ismethod): diff --git a/posthog/models/entity/util.py b/posthog/models/entity/util.py index 06abcda5d0167..ffcd8cda671a7 100644 --- a/posthog/models/entity/util.py +++ b/posthog/models/entity/util.py @@ -1,4 +1,5 @@ -from typing import Any, Dict, List, Sequence, Set, Tuple +from typing import Any +from collections.abc import Sequence from posthog.constants import TREND_FILTER_TYPE_ACTIONS from posthog.hogql.hogql import HogQLContext @@ -16,17 +17,17 @@ def get_entity_filtering_params( person_properties_mode: PersonPropertiesMode = PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN, person_id_joined_alias: str = "person_id", deep_filtering: bool = False, -) -> Tuple[Dict, Dict]: +) -> tuple[dict, dict]: """Return SQL condition for filtering events by allowed entities (events/actions). Events matching _at least one_ entity are included. If no entities are provided, _all_ events are included.""" if not allowed_entities: return {}, {} - params: Dict[str, Any] = {} - entity_clauses: List[str] = [] - action_ids_already_included: Set[int] = set() # Avoid duplicating action conditions - events_already_included: Set[str] = set() # Avoid duplicating event conditions + params: dict[str, Any] = {} + entity_clauses: list[str] = [] + action_ids_already_included: set[int] = set() # Avoid duplicating action conditions + events_already_included: set[str] = set() # Avoid duplicating event conditions for entity in allowed_entities: if entity.type == TREND_FILTER_TYPE_ACTIONS: if entity.id in action_ids_already_included or entity.id is None: diff --git a/posthog/models/event/event.py b/posthog/models/event/event.py index 59b2f3c0a032b..184fffb18afa6 100644 --- a/posthog/models/event/event.py +++ b/posthog/models/event/event.py @@ -2,7 +2,7 @@ import datetime import re from collections import defaultdict -from typing import Dict, List, Optional, Union +from typing import Optional, Union from dateutil.relativedelta import relativedelta from django.db import models @@ -13,10 +13,10 @@ SELECTOR_ATTRIBUTE_REGEX = r"([a-zA-Z]*)\[(.*)=[\'|\"](.*)[\'|\"]\]" -LAST_UPDATED_TEAM_ACTION: Dict[int, datetime.datetime] = {} -TEAM_EVENT_ACTION_QUERY_CACHE: Dict[int, Dict[str, tuple]] = defaultdict(dict) +LAST_UPDATED_TEAM_ACTION: dict[int, datetime.datetime] = {} +TEAM_EVENT_ACTION_QUERY_CACHE: dict[int, dict[str, tuple]] = defaultdict(dict) # TEAM_EVENT_ACTION_QUERY_CACHE looks like team_id -> event ex('$pageview') -> query -TEAM_ACTION_QUERY_CACHE: Dict[int, str] = {} +TEAM_ACTION_QUERY_CACHE: dict[int, str] = {} DEFAULT_EARLIEST_TIME_DELTA = relativedelta(weeks=1) @@ -26,8 +26,8 @@ class SelectorPart: def __init__(self, tag: str, direct_descendant: bool, escape_slashes: bool): self.direct_descendant = direct_descendant - self.data: Dict[str, Union[str, List]] = {} - self.ch_attributes: Dict[str, Union[str, List]] = {} # attributes for CH + self.data: dict[str, Union[str, list]] = {} + self.ch_attributes: dict[str, Union[str, list]] = {} # attributes for CH result = re.search(SELECTOR_ATTRIBUTE_REGEX, tag) if result and "[id=" in tag: @@ -58,9 +58,9 @@ def __init__(self, tag: str, direct_descendant: bool, escape_slashes: bool): self.data["tag_name"] = tag @property - def extra_query(self) -> Dict[str, List[Union[str, List[str]]]]: - where: List[Union[str, List[str]]] = [] - params: List[Union[str, List[str]]] = [] + def extra_query(self) -> dict[str, list[Union[str, list[str]]]]: + where: list[Union[str, list[str]]] = [] + params: list[Union[str, list[str]]] = [] for key, value in self.data.items(): if "attr__" in key: where.append(f"(attributes ->> 'attr__{key.split('attr__')[1]}') = %s") @@ -78,7 +78,7 @@ def _unescape_class(self, class_name): class Selector: - parts: List[SelectorPart] = [] + parts: list[SelectorPart] = [] def __init__(self, selector: str, escape_slashes=True): self.parts = [] @@ -98,7 +98,7 @@ def __init__(self, selector: str, escape_slashes=True): def _split(self, selector): in_attribute_selector = False in_quotes: Optional[str] = None - part: List[str] = [] + part: list[str] = [] for char in selector: if char == "[" and in_quotes is None: in_attribute_selector = True diff --git a/posthog/models/event/query_event_list.py b/posthog/models/event/query_event_list.py index ded739c9a81c0..1ecdbee021a7d 100644 --- a/posthog/models/event/query_event_list.py +++ b/posthog/models/event/query_event_list.py @@ -1,5 +1,5 @@ from datetime import timedelta, datetime, time -from typing import Dict, List, Optional, Tuple, Union +from typing import Optional, Union from zoneinfo import ZoneInfo from dateutil.parser import isoparse @@ -29,10 +29,10 @@ def parse_timestamp(timestamp: str, tzinfo: ZoneInfo) -> datetime: def parse_request_params( - conditions: Dict[str, Union[None, str, List[str]]], team: Team, tzinfo: ZoneInfo -) -> Tuple[str, Dict]: + conditions: dict[str, Union[None, str, list[str]]], team: Team, tzinfo: ZoneInfo +) -> tuple[str, dict]: result = "" - params: Dict[str, Union[str, List[str]]] = {} + params: dict[str, Union[str, list[str]]] = {} for k, v in conditions.items(): if not isinstance(v, str): continue @@ -58,13 +58,13 @@ def parse_request_params( def query_events_list( filter: Filter, team: Team, - request_get_query_dict: Dict, - order_by: List[str], + request_get_query_dict: dict, + order_by: list[str], action_id: Optional[str], unbounded_date_from: bool = False, limit: int = DEFAULT_RETURNED_ROWS, offset: int = 0, -) -> List: +) -> list: # Note: This code is inefficient and problematic, see https://github.com/PostHog/posthog/issues/13485 for details. # To isolate its impact from rest of the queries its queries are run on different nodes as part of "offline" workloads. hogql_context = HogQLContext(within_non_hogql_query=True, team_id=team.pk, enable_select_queries=True) diff --git a/posthog/models/event/sql.py b/posthog/models/event/sql.py index f8d2e543da4c7..50c8abdefa9d1 100644 --- a/posthog/models/event/sql.py +++ b/posthog/models/event/sql.py @@ -49,7 +49,7 @@ group2_created_at DateTime64, group3_created_at DateTime64, group4_created_at DateTime64, - person_mode Enum8('full' = 0, 'propertyless' = 1) + person_mode Enum8('full' = 0, 'propertyless' = 1, 'force_upgrade' = 2) {materialized_columns} {extra_fields} {indexes} diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py index c55094898016d..065d47da33161 100644 --- a/posthog/models/event/util.py +++ b/posthog/models/event/util.py @@ -1,7 +1,7 @@ import datetime as dt import json import uuid -from typing import Any, Dict, List, Literal, Optional, Set, Union +from typing import Any, Literal, Optional, Union from zoneinfo import ZoneInfo from dateutil.parser import isoparse @@ -31,16 +31,16 @@ def create_event( team: Team, distinct_id: str, timestamp: Optional[Union[timezone.datetime, str]] = None, - properties: Optional[Dict] = None, - elements: Optional[List[Element]] = None, + properties: Optional[dict] = None, + elements: Optional[list[Element]] = None, person_id: Optional[uuid.UUID] = None, - person_properties: Optional[Dict] = None, + person_properties: Optional[dict] = None, person_created_at: Optional[Union[timezone.datetime, str]] = None, - group0_properties: Optional[Dict] = None, - group1_properties: Optional[Dict] = None, - group2_properties: Optional[Dict] = None, - group3_properties: Optional[Dict] = None, - group4_properties: Optional[Dict] = None, + group0_properties: Optional[dict] = None, + group1_properties: Optional[dict] = None, + group2_properties: Optional[dict] = None, + group3_properties: Optional[dict] = None, + group4_properties: Optional[dict] = None, group0_created_at: Optional[Union[timezone.datetime, str]] = None, group1_created_at: Optional[Union[timezone.datetime, str]] = None, group2_created_at: Optional[Union[timezone.datetime, str]] = None, @@ -105,8 +105,8 @@ def format_clickhouse_timestamp( def bulk_create_events( - events: List[Dict[str, Any]], - person_mapping: Optional[Dict[str, Person]] = None, + events: list[dict[str, Any]], + person_mapping: Optional[dict[str, Person]] = None, ) -> None: """ TEST ONLY @@ -121,7 +121,7 @@ def bulk_create_events( if not TEST: raise Exception("This function is only meant for setting up tests") inserts = [] - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for index, event in enumerate(events): datetime64_default_timestamp = timezone.now().astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S") timestamp = event.get("timestamp") or dt.datetime.now() @@ -287,7 +287,7 @@ class Meta: ] -def parse_properties(properties: str, allow_list: Optional[Set[str]] = None) -> Dict: +def parse_properties(properties: str, allow_list: Optional[set[str]] = None) -> dict: # parse_constants gets called for any NaN, Infinity etc values # we just want those to be returned as None if allow_list is None: @@ -349,7 +349,7 @@ def get_elements_chain(self, event): return event["elements_chain"] -def get_agg_event_count_for_teams(team_ids: List[Union[str, int]]) -> int: +def get_agg_event_count_for_teams(team_ids: list[Union[str, int]]) -> int: result = sync_execute( """ SELECT count(1) as count @@ -362,7 +362,7 @@ def get_agg_event_count_for_teams(team_ids: List[Union[str, int]]) -> int: def get_agg_events_with_groups_count_for_teams_and_period( - team_ids: List[Union[str, int]], begin: timezone.datetime, end: timezone.datetime + team_ids: list[Union[str, int]], begin: timezone.datetime, end: timezone.datetime ) -> int: result = sync_execute( """ diff --git a/posthog/models/exported_asset.py b/posthog/models/exported_asset.py index ceebb2bc3db03..d07009be45b4e 100644 --- a/posthog/models/exported_asset.py +++ b/posthog/models/exported_asset.py @@ -1,6 +1,6 @@ import secrets from datetime import timedelta -from typing import List, Optional +from typing import Optional import structlog from django.conf import settings @@ -178,7 +178,7 @@ def save_content_to_exported_asset(exported_asset: ExportedAsset, content: bytes def save_content_to_object_storage(exported_asset: ExportedAsset, content: bytes) -> None: - path_parts: List[str] = [ + path_parts: list[str] = [ settings.OBJECT_STORAGE_EXPORTS_FOLDER, exported_asset.export_format.split("/")[1], f"team-{exported_asset.team.id}", diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index 67432e0b643eb..0a46a44d53b98 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -1,7 +1,7 @@ import json from django.http import HttpRequest import structlog -from typing import Dict, List, Optional, cast +from typing import Optional, cast from django.core.cache import cache from django.db import models @@ -59,7 +59,7 @@ class Meta: # whether a feature is sending us rich analytics, like views & interactions. has_enriched_analytics: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) - def get_analytics_metadata(self) -> Dict: + def get_analytics_metadata(self) -> dict: filter_count = sum(len(condition.get("properties", [])) for condition in self.conditions) variants_count = len(self.variants) payload_count = len(self._payloads) @@ -135,7 +135,7 @@ def get_filters(self): def transform_cohort_filters_for_easy_evaluation( self, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + seen_cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, ): """ Expands cohort filters into person property filters when possible. @@ -243,7 +243,7 @@ def transform_cohort_filters_for_easy_evaluation( if target_properties.type == PropertyOperatorType.AND: return self.conditions - for prop_group in cast(List[PropertyGroup], target_properties.values): + for prop_group in cast(list[PropertyGroup], target_properties.values): if ( len(prop_group.values) == 0 or not isinstance(prop_group.values[0], Property) @@ -264,9 +264,9 @@ def transform_cohort_filters_for_easy_evaluation( def get_cohort_ids( self, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + seen_cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, sort_by_topological_order=False, - ) -> List[int]: + ) -> list[int]: from posthog.models.cohort.util import get_dependent_cohorts, sort_cohorts_topologically if seen_cohorts_cache is None: @@ -398,9 +398,9 @@ class Meta: def set_feature_flags_for_team_in_cache( team_id: int, - feature_flags: Optional[List[FeatureFlag]] = None, + feature_flags: Optional[list[FeatureFlag]] = None, using_database: str = "default", -) -> List[FeatureFlag]: +) -> list[FeatureFlag]: from posthog.api.feature_flag import MinimalFeatureFlagSerializer if feature_flags is not None: @@ -422,7 +422,7 @@ def set_feature_flags_for_team_in_cache( return all_feature_flags -def get_feature_flags_for_team_in_cache(team_id: int) -> Optional[List[FeatureFlag]]: +def get_feature_flags_for_team_in_cache(team_id: int) -> Optional[list[FeatureFlag]]: try: flag_data = cache.get(f"team_feature_flags_{team_id}") except Exception: diff --git a/posthog/models/feature_flag/flag_analytics.py b/posthog/models/feature_flag/flag_analytics.py index d5f27d804ac48..f62ed1934eca8 100644 --- a/posthog/models/feature_flag/flag_analytics.py +++ b/posthog/models/feature_flag/flag_analytics.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from posthog.constants import FlagRequestType from posthog.helpers.dashboard_templates import ( add_enriched_insights_to_feature_flag_dashboard, @@ -45,7 +45,7 @@ def increment_request_count( capture_exception(error) -def _extract_total_count_for_key_from_redis_hash(client: redis.Redis, key: str) -> Tuple[int, int, int]: +def _extract_total_count_for_key_from_redis_hash(client: redis.Redis, key: str) -> tuple[int, int, int]: total_count = 0 existing_values = client.hgetall(key) time_buckets = existing_values.keys() diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 134af65dfdad7..0b4a6befebc94 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -3,7 +3,7 @@ from enum import Enum import time import structlog -from typing import Dict, List, Literal, Optional, Tuple, Union, cast +from typing import Literal, Optional, Union, cast from prometheus_client import Counter from django.conf import settings @@ -110,7 +110,7 @@ def __init__(self, team_id: int): self.failed_to_fetch_flags = False @cached_property - def group_types_to_indexes(self) -> Dict[GroupTypeName, GroupTypeIndex]: + def group_types_to_indexes(self) -> dict[GroupTypeName, GroupTypeIndex]: if self.failed_to_fetch_flags: raise DatabaseError("Failed to fetch group type mapping previously, not trying again.") try: @@ -124,7 +124,7 @@ def group_types_to_indexes(self) -> Dict[GroupTypeName, GroupTypeIndex]: raise err @cached_property - def group_type_index_to_name(self) -> Dict[GroupTypeIndex, GroupTypeName]: + def group_type_index_to_name(self) -> dict[GroupTypeIndex, GroupTypeName]: return {value: key for key, value in self.group_types_to_indexes.items()} @@ -133,15 +133,15 @@ class FeatureFlagMatcher: def __init__( self, - feature_flags: List[FeatureFlag], + feature_flags: list[FeatureFlag], distinct_id: str, - groups: Optional[Dict[GroupTypeName, str]] = None, + groups: Optional[dict[GroupTypeName, str]] = None, cache: Optional[FlagsMatcherCache] = None, - hash_key_overrides: Optional[Dict[str, str]] = None, - property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, - group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, + hash_key_overrides: Optional[dict[str, str]] = None, + property_value_overrides: Optional[dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[dict[str, dict[str, Union[str, int]]]] = None, skip_database_flags: bool = False, - cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, ): if group_property_value_overrides is None: group_property_value_overrides = {} @@ -231,7 +231,7 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch: payload=None, ) - def get_matches(self) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]: + def get_matches(self) -> tuple[dict[str, Union[str, bool]], dict[str, dict], dict[str, object], bool]: flag_values = {} flag_evaluation_reasons = {} faced_error_computing_flags = False @@ -287,7 +287,7 @@ def get_matching_payload( else: return None - def is_super_condition_match(self, feature_flag: FeatureFlag) -> Tuple[bool, bool, FeatureFlagMatchReason]: + def is_super_condition_match(self, feature_flag: FeatureFlag) -> tuple[bool, bool, FeatureFlagMatchReason]: # TODO: Right now super conditions with property overrides bork when the database is down, # because we're still going to the database in the line below. Ideally, we should not go to the database. # Don't skip test: test_super_condition_with_override_properties_doesnt_make_database_requests when this is fixed. @@ -320,8 +320,8 @@ def is_super_condition_match(self, feature_flag: FeatureFlag) -> Tuple[bool, boo return False, False, FeatureFlagMatchReason.NO_CONDITION_MATCH def is_condition_match( - self, feature_flag: FeatureFlag, condition: Dict, condition_index: int - ) -> Tuple[bool, FeatureFlagMatchReason]: + self, feature_flag: FeatureFlag, condition: dict, condition_index: int + ) -> tuple[bool, FeatureFlagMatchReason]: rollout_percentage = condition.get("rollout_percentage") if len(condition.get("properties", [])) > 0: properties = Filter(data=condition).property_groups.flat @@ -405,12 +405,12 @@ def variant_lookup_table(self, feature_flag: FeatureFlag): return lookup_table @cached_property - def query_conditions(self) -> Dict[str, bool]: + def query_conditions(self) -> dict[str, bool]: try: # Some extra wiggle room here for timeouts because this depends on the number of flags as well, # and not just the database query. with execute_with_timeout(FLAG_MATCHING_QUERY_TIMEOUT_MS * 2, DATABASE_FOR_FLAG_MATCHING): - all_conditions: Dict = {} + all_conditions: dict = {} team_id = self.feature_flags[0].team_id person_query: QuerySet = Person.objects.using(DATABASE_FOR_FLAG_MATCHING).filter( team_id=team_id, @@ -418,7 +418,7 @@ def query_conditions(self) -> Dict[str, bool]: persondistinctid__team_id=team_id, ) basic_group_query: QuerySet = Group.objects.using(DATABASE_FOR_FLAG_MATCHING).filter(team_id=team_id) - group_query_per_group_type_mapping: Dict[GroupTypeIndex, Tuple[QuerySet, List[str]]] = {} + group_query_per_group_type_mapping: dict[GroupTypeIndex, tuple[QuerySet, list[str]]] = {} # :TRICKY: Create a queryset for each group type that uniquely identifies a group, based on the groups passed in. # If no groups for a group type are passed in, we can skip querying for that group type, # since the result will always be `false`. @@ -431,7 +431,7 @@ def query_conditions(self) -> Dict[str, bool]: [], ) - person_fields: List[str] = [] + person_fields: list[str] = [] for existence_condition_key in self.has_pure_is_not_conditions: if existence_condition_key == PERSON_KEY: @@ -637,7 +637,7 @@ def get_hash(self, feature_flag: FeatureFlag, salt="") -> float: def can_compute_locally( self, - properties: List[Property], + properties: list[Property], group_type_index: Optional[GroupTypeIndex] = None, ) -> bool: target_properties = self.property_value_overrides @@ -682,10 +682,10 @@ def has_pure_is_not_conditions(self) -> set[Literal["person"] | GroupTypeIndex]: def get_feature_flag_hash_key_overrides( team_id: int, - distinct_ids: List[str], + distinct_ids: list[str], using_database: str = "default", - person_id_to_distinct_id_mapping: Optional[Dict[int, str]] = None, -) -> Dict[str, str]: + person_id_to_distinct_id_mapping: Optional[dict[int, str]] = None, +) -> dict[str, str]: feature_flag_to_key_overrides = {} # Priority to the first distinctID's values, to keep this function deterministic @@ -716,15 +716,15 @@ def get_feature_flag_hash_key_overrides( # Return a Dict with all flags and their values def _get_all_feature_flags( - feature_flags: List[FeatureFlag], + feature_flags: list[FeatureFlag], team_id: int, distinct_id: str, - person_overrides: Optional[Dict[str, str]] = None, - groups: Optional[Dict[GroupTypeName, str]] = None, - property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, - group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, + person_overrides: Optional[dict[str, str]] = None, + groups: Optional[dict[GroupTypeName, str]] = None, + property_value_overrides: Optional[dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[dict[str, dict[str, Union[str, int]]]] = None, skip_database_flags: bool = False, -) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]: +) -> tuple[dict[str, Union[str, bool]], dict[str, dict], dict[str, object], bool]: if group_property_value_overrides is None: group_property_value_overrides = {} if property_value_overrides is None: @@ -752,11 +752,11 @@ def _get_all_feature_flags( def get_all_feature_flags( team_id: int, distinct_id: str, - groups: Optional[Dict[GroupTypeName, str]] = None, + groups: Optional[dict[GroupTypeName, str]] = None, hash_key_override: Optional[str] = None, - property_value_overrides: Optional[Dict[str, Union[str, int]]] = None, - group_property_value_overrides: Optional[Dict[str, Dict[str, Union[str, int]]]] = None, -) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dict[str, object], bool]: + property_value_overrides: Optional[dict[str, Union[str, int]]] = None, + group_property_value_overrides: Optional[dict[str, dict[str, Union[str, int]]]] = None, +) -> tuple[dict[str, Union[str, bool]], dict[str, dict], dict[str, object], bool]: if group_property_value_overrides is None: group_property_value_overrides = {} if property_value_overrides is None: @@ -907,7 +907,7 @@ def get_all_feature_flags( ) -def set_feature_flag_hash_key_overrides(team_id: int, distinct_ids: List[str], hash_key_override: str) -> bool: +def set_feature_flag_hash_key_overrides(team_id: int, distinct_ids: list[str], hash_key_override: str) -> bool: # As a product decision, the first override wins, i.e consistency matters for the first walkthrough. # Thus, we don't need to do upserts here. @@ -1004,7 +1004,7 @@ def parse_exception_for_error_message(err: Exception): return reason -def key_and_field_for_property(property: Property) -> Tuple[str, str]: +def key_and_field_for_property(property: Property) -> tuple[str, str]: column = "group_properties" if property.type == "group" else "properties" key = property.key sanitized_key = sanitize_property_key(key) @@ -1016,8 +1016,8 @@ def key_and_field_for_property(property: Property) -> Tuple[str, str]: def get_all_properties_with_math_operators( - properties: List[Property], cohorts_cache: Dict[int, CohortOrEmpty], team_id: int -) -> List[Tuple[str, str]]: + properties: list[Property], cohorts_cache: dict[int, CohortOrEmpty], team_id: int +) -> list[tuple[str, str]]: all_keys_and_fields = [] for prop in properties: diff --git a/posthog/models/filters/base_filter.py b/posthog/models/filters/base_filter.py index ca2ef9e4c575f..f4d46c9acaf4b 100644 --- a/posthog/models/filters/base_filter.py +++ b/posthog/models/filters/base_filter.py @@ -1,6 +1,6 @@ import inspect import json -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from rest_framework import request @@ -17,14 +17,14 @@ class BaseFilter(BaseParamMixin): - _data: Dict + _data: dict team: Optional["Team"] - kwargs: Dict + kwargs: dict hogql_context: HogQLContext def __init__( self, - data: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, request: Optional[request.Request] = None, *, team: Optional["Team"] = None, @@ -69,7 +69,7 @@ def __init__( simplified_filter = self.simplify(self.team) self._data = simplified_filter._data - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: ret = {} for _, func in inspect.getmembers(self, inspect.ismethod): @@ -78,20 +78,20 @@ def to_dict(self) -> Dict[str, Any]: return ret - def to_params(self) -> Dict[str, str]: + def to_params(self) -> dict[str, str]: return encode_get_request_params(data=self.to_dict()) def toJSON(self): return json.dumps(self.to_dict(), default=lambda o: o.__dict__, sort_keys=True, indent=4) - def shallow_clone(self, overrides: Dict[str, Any]): + def shallow_clone(self, overrides: dict[str, Any]): "Clone the filter's data while sharing the HogQL context" return type(self)( data={**self._data, **overrides}, **{**self.kwargs, "team": self.team, "hogql_context": self.hogql_context}, ) - def query_tags(self) -> Dict[str, Any]: + def query_tags(self) -> dict[str, Any]: ret = {} for _, func in inspect.getmembers(self, inspect.ismethod): diff --git a/posthog/models/filters/lifecycle_filter.py b/posthog/models/filters/lifecycle_filter.py index 576cf499f30d9..34775ac98f883 100644 --- a/posthog/models/filters/lifecycle_filter.py +++ b/posthog/models/filters/lifecycle_filter.py @@ -1,5 +1,5 @@ import datetime -from typing import Any, Dict, Optional +from typing import Any, Optional from posthog.models import Filter from posthog.utils import relative_date_parse from rest_framework.request import Request @@ -12,7 +12,7 @@ class LifecycleFilter(Filter): def __init__( self, - data: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, request: Optional[Request] = None, **kwargs, ) -> None: diff --git a/posthog/models/filters/mixins/base.py b/posthog/models/filters/mixins/base.py index b0c79566f72d5..a4640f0aae129 100644 --- a/posthog/models/filters/mixins/base.py +++ b/posthog/models/filters/mixins/base.py @@ -1,4 +1,4 @@ -from typing import Dict, Literal +from typing import Literal BreakdownType = Literal["event", "person", "cohort", "group", "session", "hogql"] IntervalType = Literal["hour", "day", "week", "month"] @@ -6,4 +6,4 @@ class BaseParamMixin: - _data: Dict + _data: dict diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py index 8ab2c1ac7fcdf..65be03514030c 100644 --- a/posthog/models/filters/mixins/common.py +++ b/posthog/models/filters/mixins/common.py @@ -2,7 +2,7 @@ import json import re from math import ceil -from typing import Any, Dict, List, Literal, Optional, Union, cast +from typing import Any, Literal, Optional, Union, cast from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta @@ -142,7 +142,7 @@ def formula_to_dict(self): class BreakdownMixin(BaseParamMixin): @cached_property - def breakdown(self) -> Optional[Union[str, List[Union[str, int]]]]: + def breakdown(self) -> Optional[Union[str, list[Union[str, int]]]]: breakdown = self._data.get(BREAKDOWN) if not isinstance(breakdown, str): @@ -171,11 +171,11 @@ def breakdown_attribution_value(self) -> Optional[int]: return int(attribution_value) if attribution_value is not None else None @cached_property - def breakdowns(self) -> Optional[List[Dict[str, Any]]]: + def breakdowns(self) -> Optional[list[dict[str, Any]]]: breakdowns = self._data.get(BREAKDOWNS) try: - if isinstance(breakdowns, List): + if isinstance(breakdowns, list): return breakdowns elif isinstance(breakdowns, str): return json.loads(breakdowns) @@ -226,7 +226,7 @@ def breakdown_hide_other_aggregation(self) -> Optional[bool]: @include_dict def breakdown_to_dict(self): - result: Dict = {} + result: dict = {} if self.breakdown: result[BREAKDOWN] = self.breakdown if self.breakdowns: @@ -346,8 +346,8 @@ def compare_to_dict(self): class DateMixin(BaseParamMixin): - date_from_delta_mapping: Optional[Dict[str, int]] - date_to_delta_mapping: Optional[Dict[str, int]] + date_from_delta_mapping: Optional[dict[str, int]] + date_to_delta_mapping: Optional[dict[str, int]] @cached_property def _date_from(self) -> Optional[Union[str, datetime.datetime]]: @@ -417,7 +417,7 @@ def use_explicit_dates(self) -> bool: return process_bool(self._data.get(EXPLICIT_DATE)) @include_dict - def date_to_dict(self) -> Dict: + def date_to_dict(self) -> dict: result_dict = {} if self._date_from: result_dict.update( @@ -455,8 +455,8 @@ def query_tags_dates(self): class EntitiesMixin(BaseParamMixin): @cached_property - def entities(self) -> List[Entity]: - processed_entities: List[Entity] = [] + def entities(self) -> list[Entity]: + processed_entities: list[Entity] = [] if self._data.get(ACTIONS): actions = self._data.get(ACTIONS, []) if isinstance(actions, str): @@ -487,20 +487,20 @@ def query_tags_entities(self): return {"number_of_entities": len(self.entities)} @cached_property - def actions(self) -> List[Entity]: + def actions(self) -> list[Entity]: return [entity for entity in self.entities if entity.type == TREND_FILTER_TYPE_ACTIONS] @cached_property - def events(self) -> List[Entity]: + def events(self) -> list[Entity]: return [entity for entity in self.entities if entity.type == TREND_FILTER_TYPE_EVENTS] @cached_property - def data_warehouse_entities(self) -> List[Entity]: + def data_warehouse_entities(self) -> list[Entity]: return [entity for entity in self.entities if entity.type == TREND_FILTER_TYPE_DATA_WAREHOUSE] @cached_property - def exclusions(self) -> List[ExclusionEntity]: - _exclusions: List[ExclusionEntity] = [] + def exclusions(self) -> list[ExclusionEntity]: + _exclusions: list[ExclusionEntity] = [] if self._data.get(EXCLUSIONS): exclusion_list = self._data.get(EXCLUSIONS, []) if isinstance(exclusion_list, str): diff --git a/posthog/models/filters/mixins/funnel.py b/posthog/models/filters/mixins/funnel.py index 91312a5030478..3baf5f15b50da 100644 --- a/posthog/models/filters/mixins/funnel.py +++ b/posthog/models/filters/mixins/funnel.py @@ -1,6 +1,6 @@ import datetime import json -from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Union +from typing import TYPE_CHECKING, Literal, Optional, Union from posthog.models.property import Property @@ -111,7 +111,7 @@ def funnel_window_interval_unit(self) -> Optional[FunnelWindowIntervalType]: @include_dict def funnel_window_to_dict(self): - dict_part: Dict = {} + dict_part: dict = {} if self.funnel_window_interval is not None: dict_part[FUNNEL_WINDOW_INTERVAL] = self.funnel_window_interval if self.funnel_window_interval_unit is not None: @@ -154,7 +154,7 @@ def funnel_step(self) -> Optional[int]: return int(_step_as_string) @cached_property - def funnel_custom_steps(self) -> List[int]: + def funnel_custom_steps(self) -> list[int]: """ Custom step numbers to get persons for. This overrides FunnelPersonsStepMixin::funnel_step """ @@ -176,7 +176,7 @@ def funnel_step_to_dict(self): class FunnelPersonsStepBreakdownMixin(BaseParamMixin): @cached_property - def funnel_step_breakdown(self) -> Optional[Union[List[str], int, str]]: + def funnel_step_breakdown(self) -> Optional[Union[list[str], int, str]]: """ The breakdown value for which to get persons for. @@ -241,7 +241,7 @@ def funnel_viz_type(self) -> Optional[FunnelVizType]: @include_dict def funnel_type_to_dict(self): - result: Dict[str, str] = {} + result: dict[str, str] = {} if self.funnel_order_type: result[FUNNEL_ORDER_TYPE] = self.funnel_order_type if self.funnel_viz_type: @@ -277,7 +277,7 @@ def drop_off(self) -> Optional[bool]: @include_dict def funnel_trends_persons_to_dict(self): - result_dict: Dict = {} + result_dict: dict = {} if self.entrance_period_start: result_dict[ENTRANCE_PERIOD_START] = self.entrance_period_start.isoformat() if self.drop_off is not None: @@ -298,7 +298,7 @@ def correlation_type(self) -> Optional[FunnelCorrelationType]: return None @cached_property - def correlation_property_names(self) -> List[str]: + def correlation_property_names(self) -> list[str]: # Person Property names for which to run Person Properties correlation property_names = self._data.get(FUNNEL_CORRELATION_NAMES, []) if isinstance(property_names, str): @@ -306,7 +306,7 @@ def correlation_property_names(self) -> List[str]: return property_names @cached_property - def correlation_property_exclude_names(self) -> List[str]: + def correlation_property_exclude_names(self) -> list[str]: # Person Property names to exclude from Person Properties correlation property_names = self._data.get(FUNNEL_CORRELATION_EXCLUDE_NAMES, []) if isinstance(property_names, str): @@ -314,7 +314,7 @@ def correlation_property_exclude_names(self) -> List[str]: return property_names @cached_property - def correlation_event_names(self) -> List[str]: + def correlation_event_names(self) -> list[str]: # Event names for which to run EventWithProperties correlation event_names = self._data.get(FUNNEL_CORRELATION_EVENT_NAMES, []) if isinstance(event_names, str): @@ -322,7 +322,7 @@ def correlation_event_names(self) -> List[str]: return event_names @cached_property - def correlation_event_exclude_names(self) -> List[str]: + def correlation_event_exclude_names(self) -> list[str]: # Exclude event names from Event correlation property_names = self._data.get(FUNNEL_CORRELATION_EXCLUDE_EVENT_NAMES, []) if isinstance(property_names, str): @@ -330,7 +330,7 @@ def correlation_event_exclude_names(self) -> List[str]: return property_names @cached_property - def correlation_event_exclude_property_names(self) -> List[str]: + def correlation_event_exclude_property_names(self) -> list[str]: # Event Property names to exclude from EventWithProperties correlation property_names = self._data.get(FUNNEL_CORRELATION_EVENT_EXCLUDE_PROPERTY_NAMES, []) if isinstance(property_names, str): @@ -339,7 +339,7 @@ def correlation_event_exclude_property_names(self) -> List[str]: @include_dict def funnel_correlation_to_dict(self): - result_dict: Dict = {} + result_dict: dict = {} if self.correlation_type: result_dict[FUNNEL_CORRELATION_TYPE] = self.correlation_type if self.correlation_property_names: @@ -370,7 +370,7 @@ def correlation_person_entity(self) -> Optional["Entity"]: return Entity(event) if event else None @cached_property - def correlation_property_values(self) -> Optional[List[Property]]: + def correlation_property_values(self) -> Optional[list[Property]]: # Used for property correlations persons _props = self._data.get(FUNNEL_CORRELATION_PROPERTY_VALUES) @@ -421,7 +421,7 @@ def correlation_persons_converted(self) -> Optional[bool]: @include_dict def funnel_correlation_persons_to_dict(self): - result_dict: Dict = {} + result_dict: dict = {} if self.correlation_person_entity: result_dict[FUNNEL_CORRELATION_PERSON_ENTITY] = self.correlation_person_entity.to_dict() if self.correlation_property_values: diff --git a/posthog/models/filters/mixins/paths.py b/posthog/models/filters/mixins/paths.py index 393b1f7140a70..8249a7015d1bc 100644 --- a/posthog/models/filters/mixins/paths.py +++ b/posthog/models/filters/mixins/paths.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List, Literal, Optional +from typing import Literal, Optional from posthog.constants import ( CUSTOM_EVENT, @@ -84,21 +84,21 @@ def paths_hogql_expression_to_dict(self): class TargetEventsMixin(BaseParamMixin): @cached_property - def target_events(self) -> List[str]: + def target_events(self) -> list[str]: target_events = self._data.get(PATHS_INCLUDE_EVENT_TYPES, []) if isinstance(target_events, str): return json.loads(target_events) return target_events @cached_property - def custom_events(self) -> List[str]: + def custom_events(self) -> list[str]: custom_events = self._data.get(PATHS_INCLUDE_CUSTOM_EVENTS, []) if isinstance(custom_events, str): return json.loads(custom_events) return custom_events @cached_property - def exclude_events(self) -> List[str]: + def exclude_events(self) -> list[str]: _exclude_events = self._data.get(PATHS_EXCLUDE_EVENTS, []) if isinstance(_exclude_events, str): return json.loads(_exclude_events) @@ -160,7 +160,7 @@ def funnel_paths_to_dict(self): class PathGroupingMixin(BaseParamMixin): @cached_property - def path_groupings(self) -> Optional[List[str]]: + def path_groupings(self) -> Optional[list[str]]: path_groupings = self._data.get(PATH_GROUPINGS, None) if isinstance(path_groupings, str): return json.loads(path_groupings) @@ -193,7 +193,7 @@ def path_replacements_to_dict(self): class LocalPathCleaningFiltersMixin(BaseParamMixin): @cached_property - def local_path_cleaning_filters(self) -> Optional[List[Dict[str, str]]]: + def local_path_cleaning_filters(self) -> Optional[list[dict[str, str]]]: local_path_cleaning_filters = self._data.get(LOCAL_PATH_CLEANING_FILTERS, None) if isinstance(local_path_cleaning_filters, str): return json.loads(local_path_cleaning_filters) diff --git a/posthog/models/filters/mixins/property.py b/posthog/models/filters/mixins/property.py index ff4cb56fee91a..2ffc984754b35 100644 --- a/posthog/models/filters/mixins/property.py +++ b/posthog/models/filters/mixins/property.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Optional, Union, cast from rest_framework.exceptions import ValidationError @@ -15,7 +15,7 @@ class PropertyMixin(BaseParamMixin): @cached_property - def old_properties(self) -> List[Property]: + def old_properties(self) -> list[Property]: _props = self._data.get(PROPERTIES) if isinstance(_props, str): @@ -64,7 +64,7 @@ def property_groups(self) -> PropertyGroup: # old properties return PropertyGroup(type=PropertyOperatorType.AND, values=self.old_properties) - def _parse_properties(self, properties: Optional[Any]) -> List[Property]: + def _parse_properties(self, properties: Optional[Any]) -> list[Property]: if isinstance(properties, list): _properties = [] for prop_params in properties: @@ -94,19 +94,19 @@ def _parse_properties(self, properties: Optional[Any]) -> List[Property]: ) return ret - def _parse_property_group(self, group: Optional[Dict]) -> PropertyGroup: + def _parse_property_group(self, group: Optional[dict]) -> PropertyGroup: if group and "type" in group and "values" in group: return PropertyGroup( PropertyOperatorType(group["type"].upper()), self._parse_property_group_list(group["values"]), ) - return PropertyGroup(PropertyOperatorType.AND, cast(List[Property], [])) + return PropertyGroup(PropertyOperatorType.AND, cast(list[Property], [])) - def _parse_property_group_list(self, prop_list: Optional[List]) -> Union[List[Property], List[PropertyGroup]]: + def _parse_property_group_list(self, prop_list: Optional[list]) -> Union[list[Property], list[PropertyGroup]]: if not prop_list: # empty prop list - return cast(List[Property], []) + return cast(list[Property], []) has_property_groups = False has_simple_properties = False diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py index eeec027c4f817..044278f014275 100644 --- a/posthog/models/filters/mixins/retention.py +++ b/posthog/models/filters/mixins/retention.py @@ -1,6 +1,6 @@ import json from datetime import datetime, timedelta -from typing import Literal, Optional, Tuple, Union +from typing import Literal, Optional, Union from dateutil.relativedelta import relativedelta from django.utils import timezone @@ -112,7 +112,7 @@ def period_increment(self) -> Union[timedelta, relativedelta]: @staticmethod def determine_time_delta( total_intervals: int, period: str - ) -> Tuple[Union[timedelta, relativedelta], Union[timedelta, relativedelta]]: + ) -> tuple[Union[timedelta, relativedelta], Union[timedelta, relativedelta]]: if period == "Hour": return timedelta(hours=total_intervals), timedelta(hours=1) elif period == "Week": diff --git a/posthog/models/filters/mixins/session_recordings.py b/posthog/models/filters/mixins/session_recordings.py index 8779ea92e6bec..83d9bb40245a6 100644 --- a/posthog/models/filters/mixins/session_recordings.py +++ b/posthog/models/filters/mixins/session_recordings.py @@ -1,5 +1,5 @@ import json -from typing import List, Optional, Literal +from typing import Optional, Literal from posthog.constants import PERSON_UUID_FILTER, SESSION_RECORDINGS_FILTER_IDS from posthog.models.filters.mixins.common import BaseParamMixin @@ -19,7 +19,7 @@ def console_search_query(self) -> str | None: return self._data.get("console_search_query", None) @cached_property - def console_logs_filter(self) -> List[Literal["error", "warn", "info"]]: + def console_logs_filter(self) -> list[Literal["error", "warn", "info"]]: user_value = self._data.get("console_logs", None) or [] if isinstance(user_value, str): user_value = json.loads(user_value) @@ -43,7 +43,7 @@ def recording_duration_filter(self) -> Optional[Property]: return None @cached_property - def session_ids(self) -> Optional[List[str]]: + def session_ids(self) -> Optional[list[str]]: # Can be ['a', 'b'] or "['a', 'b']" or "a,b" session_ids_str = self._data.get(SESSION_RECORDINGS_FILTER_IDS, None) diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py index 3b1e0eb426ba1..72d8d184539ef 100644 --- a/posthog/models/filters/mixins/simplify.py +++ b/posthog/models/filters/mixins/simplify.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Literal, TypeVar, cast +from typing import TYPE_CHECKING, Any, Literal, TypeVar, cast from posthog.constants import PropertyOperatorType from posthog.models.property import GroupTypeIndex, PropertyGroup @@ -67,9 +67,9 @@ def _simplify_entity( self, team: "Team", entity_type: Literal["events", "actions", "exclusions"], - entity_params: Dict, + entity_params: dict, **kwargs, - ) -> Dict: + ) -> dict: from posthog.models.entity import Entity, ExclusionEntity EntityClass = ExclusionEntity if entity_type == "exclusions" else Entity @@ -82,7 +82,7 @@ def _simplify_entity( return EntityClass({**entity_params, "properties": properties}).to_dict() - def _simplify_properties(self, team: "Team", properties: List["Property"], **kwargs) -> "PropertyGroup": + def _simplify_properties(self, team: "Team", properties: list["Property"], **kwargs) -> "PropertyGroup": simplified_properties_values = [] for prop in properties: simplified_properties_values.append(self._simplify_property(team, prop, **kwargs)) diff --git a/posthog/models/filters/mixins/stickiness.py b/posthog/models/filters/mixins/stickiness.py index 0dfca1d834c83..1b659481b98ea 100644 --- a/posthog/models/filters/mixins/stickiness.py +++ b/posthog/models/filters/mixins/stickiness.py @@ -1,5 +1,6 @@ from datetime import datetime -from typing import TYPE_CHECKING, Callable, Optional, Union +from typing import TYPE_CHECKING, Optional, Union +from collections.abc import Callable from rest_framework.exceptions import ValidationError diff --git a/posthog/models/filters/mixins/utils.py b/posthog/models/filters/mixins/utils.py index a297cdcfa6320..5b5fe6d422d92 100644 --- a/posthog/models/filters/mixins/utils.py +++ b/posthog/models/filters/mixins/utils.py @@ -1,5 +1,6 @@ from functools import lru_cache -from typing import Callable, Optional, TypeVar, Union +from typing import Optional, TypeVar, Union +from collections.abc import Callable from posthog.utils import str_to_bool diff --git a/posthog/models/filters/path_filter.py b/posthog/models/filters/path_filter.py index 5ef9395d82da4..df7a0ca928581 100644 --- a/posthog/models/filters/path_filter.py +++ b/posthog/models/filters/path_filter.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any, Optional from rest_framework.request import Request @@ -76,7 +76,7 @@ class PathFilter( ): def __init__( self, - data: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, request: Optional[Request] = None, **kwargs, ) -> None: diff --git a/posthog/models/filters/retention_filter.py b/posthog/models/filters/retention_filter.py index 338d3d87e3e64..6f73aeb69d3f5 100644 --- a/posthog/models/filters/retention_filter.py +++ b/posthog/models/filters/retention_filter.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Optional, Union from rest_framework.request import Request @@ -48,7 +48,7 @@ class RetentionFilter( SampleMixin, BaseFilter, ): - def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: + def __init__(self, data: Optional[dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: if data is None: data = {} if data: @@ -58,7 +58,7 @@ def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Requ super().__init__(data, request, **kwargs) @cached_property - def breakdown_values(self) -> Optional[Tuple[Union[str, int], ...]]: + def breakdown_values(self) -> Optional[tuple[Union[str, int], ...]]: raw_value = self._data.get("breakdown_values", None) if raw_value is None: return None diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py index 4674c4ceeb3d9..cde6d8020928f 100644 --- a/posthog/models/filters/stickiness_filter.py +++ b/posthog/models/filters/stickiness_filter.py @@ -1,4 +1,5 @@ -from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union +from collections.abc import Callable from django.db.models.functions.datetime import ( TruncDay, @@ -62,7 +63,7 @@ class StickinessFilter( def __init__( self, - data: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, request: Optional[Request] = None, **kwargs, ) -> None: diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 6fd1a58ba6a05..cf3444b9cc484 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -27,6 +27,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -87,6 +88,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -147,6 +149,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -207,6 +210,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -267,6 +271,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py index 63a947bca6770..eb99a3ac42941 100644 --- a/posthog/models/filters/test/test_filter.py +++ b/posthog/models/filters/test/test_filter.py @@ -1,6 +1,7 @@ import datetime import json -from typing import Any, Callable, Dict, List, Optional, cast +from typing import Any, Optional, cast +from collections.abc import Callable from django.db.models import Q, Func, F, CharField from freezegun import freeze_time @@ -993,8 +994,8 @@ def filter_persons_with_annotation(filter: Filter, team: Team): def filter_persons_with_property_group( - filter: Filter, team: Team, property_overrides: Optional[Dict[str, Any]] = None -) -> List[str]: + filter: Filter, team: Team, property_overrides: Optional[dict[str, Any]] = None +) -> list[str]: if property_overrides is None: property_overrides = {} flush_persons_and_events() diff --git a/posthog/models/filters/test/test_path_filter.py b/posthog/models/filters/test/test_path_filter.py index df8ffac45aaec..3f66e0b9b7392 100644 --- a/posthog/models/filters/test/test_path_filter.py +++ b/posthog/models/filters/test/test_path_filter.py @@ -18,7 +18,7 @@ def test_to_dict(self): } ) - self.assertEquals( + self.assertEqual( filter.to_dict(), filter.to_dict() | { @@ -51,7 +51,7 @@ def test_to_dict_hogql(self): } ) - self.assertEquals( + self.assertEqual( filter.to_dict(), filter.to_dict() | { diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py index 427c883a2e920..0b9c0fb9724c3 100644 --- a/posthog/models/group/util.py +++ b/posthog/models/group/util.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Dict, Optional, Union +from typing import Optional, Union from zoneinfo import ZoneInfo from dateutil.parser import isoparse @@ -17,7 +17,7 @@ def raw_create_group_ch( team_id: int, group_type_index: GroupTypeIndex, group_key: str, - properties: Dict, + properties: dict, created_at: datetime.datetime, timestamp: Optional[datetime.datetime] = None, sync: bool = False, @@ -44,7 +44,7 @@ def create_group( team_id: int, group_type_index: GroupTypeIndex, group_key: str, - properties: Optional[Dict] = None, + properties: Optional[dict] = None, timestamp: Optional[Union[datetime.datetime, str]] = None, sync: bool = False, ) -> Group: diff --git a/posthog/models/instance_setting.py b/posthog/models/instance_setting.py index 749975e5d5e72..0ad0ca5bde0bf 100644 --- a/posthog/models/instance_setting.py +++ b/posthog/models/instance_setting.py @@ -1,6 +1,6 @@ import json from contextlib import contextmanager -from typing import Any, List +from typing import Any from django.db import models @@ -29,7 +29,7 @@ def get_instance_setting(key: str) -> Any: return CONSTANCE_CONFIG[key][0] # Get the default value -def get_instance_settings(keys: List[str]) -> Any: +def get_instance_settings(keys: list[str]) -> Any: for key in keys: assert key in CONSTANCE_CONFIG, f"Unknown dynamic setting: {repr(key)}" diff --git a/posthog/models/integration.py b/posthog/models/integration.py index 8ce1c9d6ef7c7..6e313ea179ff6 100644 --- a/posthog/models/integration.py +++ b/posthog/models/integration.py @@ -2,7 +2,7 @@ import hmac import time from datetime import timedelta -from typing import Dict, List, Literal +from typing import Literal from django.db import models from rest_framework.request import Request @@ -50,7 +50,7 @@ def __init__(self, integration: Integration) -> None: def client(self) -> WebClient: return WebClient(self.integration.sensitive_config["access_token"]) - def list_channels(self) -> List[Dict]: + def list_channels(self) -> list[dict]: # NOTE: Annoyingly the Slack API has no search so we have to load all channels... # We load public and private channels separately as when mixed, the Slack API pagination is buggy public_channels = self._list_channels_by_type("public_channel") @@ -59,7 +59,7 @@ def list_channels(self) -> List[Dict]: return sorted(channels, key=lambda x: x["name"]) - def _list_channels_by_type(self, type: Literal["public_channel", "private_channel"]) -> List[Dict]: + def _list_channels_by_type(self, type: Literal["public_channel", "private_channel"]) -> list[dict]: max_page = 10 channels = [] cursor = None @@ -76,7 +76,7 @@ def _list_channels_by_type(self, type: Literal["public_channel", "private_channe return channels @classmethod - def integration_from_slack_response(cls, team_id: str, created_by: User, params: Dict[str, str]) -> Integration: + def integration_from_slack_response(cls, team_id: str, created_by: User, params: dict[str, str]) -> Integration: client = WebClient() slack_config = cls.slack_config() diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 8740a0f34c453..cdb4ee7ccd926 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -1,6 +1,6 @@ import json import sys -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, TypedDict, Union +from typing import TYPE_CHECKING, Any, Optional, TypedDict, Union import structlog from django.conf import settings @@ -45,7 +45,7 @@ class OrganizationUsageInfo(TypedDict): events: Optional[OrganizationUsageResource] recordings: Optional[OrganizationUsageResource] rows_synced: Optional[OrganizationUsageResource] - period: Optional[List[str]] + period: Optional[list[str]] class OrganizationManager(models.Manager): @@ -56,9 +56,9 @@ def bootstrap( self, user: Optional["User"], *, - team_fields: Optional[Dict[str, Any]] = None, + team_fields: Optional[dict[str, Any]] = None, **kwargs, - ) -> Tuple["Organization", Optional["OrganizationMembership"], "Team"]: + ) -> tuple["Organization", Optional["OrganizationMembership"], "Team"]: """Instead of doing the legwork of creating an organization yourself, delegate the details with bootstrap.""" from .project import Project # Avoiding circular import @@ -157,7 +157,7 @@ def __str__(self): __repr__ = sane_repr("name") @property - def _billing_plan_details(self) -> Tuple[Optional[str], Optional[str]]: + def _billing_plan_details(self) -> tuple[Optional[str], Optional[str]]: """ Obtains details on the billing plan for the organization. Returns a tuple with (billing_plan_key, billing_realm) @@ -176,7 +176,7 @@ def _billing_plan_details(self) -> Tuple[Optional[str], Optional[str]]: return (license.plan, "ee") return (None, None) - def update_available_features(self) -> List[Union[AvailableFeature, str]]: + def update_available_features(self) -> list[Union[AvailableFeature, str]]: """Updates field `available_features`. Does not `save()`.""" if is_cloud() or self.usage: # Since billing V2 we just use the available features which are updated when the billing service is called diff --git a/posthog/models/organization_domain.py b/posthog/models/organization_domain.py index 416b2d560f310..5d49d8a64ac91 100644 --- a/posthog/models/organization_domain.py +++ b/posthog/models/organization_domain.py @@ -1,5 +1,5 @@ import secrets -from typing import Optional, Tuple +from typing import Optional import dns.resolver import structlog @@ -151,13 +151,13 @@ def has_saml(self) -> bool: """ return bool(self.saml_entity_id) and bool(self.saml_acs_url) and bool(self.saml_x509_cert) - def _complete_verification(self) -> Tuple["OrganizationDomain", bool]: + def _complete_verification(self) -> tuple["OrganizationDomain", bool]: self.last_verification_retry = None self.verified_at = timezone.now() self.save() return (self, True) - def attempt_verification(self) -> Tuple["OrganizationDomain", bool]: + def attempt_verification(self) -> tuple["OrganizationDomain", bool]: """ Performs a DNS verification for a specific domain. """ diff --git a/posthog/models/person/missing_person.py b/posthog/models/person/missing_person.py new file mode 100644 index 0000000000000..428e8f4c9d3b2 --- /dev/null +++ b/posthog/models/person/missing_person.py @@ -0,0 +1,27 @@ +from uuid import uuid5, UUID + + +PERSON_UUIDV5_NAMESPACE = UUID("932979b4-65c3-4424-8467-0b66ec27bc22") + + +def uuidFromDistinctId(team_id: int, distinct_id: str) -> UUID: + """ + Deterministically create a UUIDv5 based on the (team_id, distinct_id) pair. + """ + return uuid5(PERSON_UUIDV5_NAMESPACE, f"{team_id}:{distinct_id}") + + +class MissingPerson: + uuid: UUID + properties: dict = {} + + def __init__(self, team_id: int, distinct_id: str): + """ + This is loosely based on the plugin-server `person-state.ts` file and is meant to represent a person that is "missing" + """ + self.team_id = team_id + self.distinct_id = distinct_id + self.uuid = uuidFromDistinctId(team_id, distinct_id) + + def __str__(self): + return f"MissingPerson({self.team_id}, {self.distinct_id})" diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py index a04565423335b..20f9dd7675487 100644 --- a/posthog/models/person/person.py +++ b/posthog/models/person/person.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import Any, Optional from django.db import models, transaction from django.db.models import F, Q @@ -21,15 +21,15 @@ def create(self, *args: Any, **kwargs: Any): return person @staticmethod - def distinct_ids_exist(team_id: int, distinct_ids: List[str]) -> bool: + def distinct_ids_exist(team_id: int, distinct_ids: list[str]) -> bool: return PersonDistinctId.objects.filter(team_id=team_id, distinct_id__in=distinct_ids).exists() class Person(models.Model): - _distinct_ids: Optional[List[str]] + _distinct_ids: Optional[list[str]] @property - def distinct_ids(self) -> List[str]: + def distinct_ids(self) -> list[str]: if hasattr(self, "distinct_ids_cache"): return [id.distinct_id for id in self.distinct_ids_cache] if hasattr(self, "_distinct_ids") and self._distinct_ids: @@ -46,7 +46,7 @@ def add_distinct_id(self, distinct_id: str) -> None: PersonDistinctId.objects.create(person=self, distinct_id=distinct_id, team_id=self.team_id) # :DEPRECATED: This should happen through the plugin server - def _add_distinct_ids(self, distinct_ids: List[str]) -> None: + def _add_distinct_ids(self, distinct_ids: list[str]) -> None: for distinct_id in distinct_ids: self.add_distinct_id(distinct_id) @@ -274,7 +274,7 @@ class Meta: ] -def get_distinct_ids_for_subquery(person: Person | None, team: Team) -> List[str]: +def get_distinct_ids_for_subquery(person: Person | None, team: Team) -> list[str]: """_summary_ Fetching distinct_ids for a person from CH is slow, so we fetch them from PG for certain queries. Therfore we need diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py index f6bcc60ebc333..0e1efa7bdb2c9 100644 --- a/posthog/models/person/util.py +++ b/posthog/models/person/util.py @@ -1,7 +1,7 @@ import datetime import json from contextlib import ExitStack -from typing import Dict, List, Optional, Union +from typing import Optional, Union from uuid import UUID from zoneinfo import ZoneInfo @@ -80,7 +80,7 @@ def person_distinct_id_deleted(sender, instance: PersonDistinctId, **kwargs): except: pass - def bulk_create_persons(persons_list: List[Dict]): + def bulk_create_persons(persons_list: list[dict]): persons = [] person_mapping = {} for _person in persons_list: @@ -127,7 +127,7 @@ def create_person( team_id: int, version: int, uuid: Optional[str] = None, - properties: Optional[Dict] = None, + properties: Optional[dict] = None, sync: bool = False, is_identified: bool = False, is_deleted: bool = False, @@ -217,7 +217,7 @@ def create_person_override( ) -def get_persons_by_distinct_ids(team_id: int, distinct_ids: List[str]) -> QuerySet: +def get_persons_by_distinct_ids(team_id: int, distinct_ids: list[str]) -> QuerySet: return Person.objects.filter( team_id=team_id, persondistinctid__team_id=team_id, @@ -225,7 +225,7 @@ def get_persons_by_distinct_ids(team_id: int, distinct_ids: List[str]) -> QueryS ) -def get_persons_by_uuids(team: Team, uuids: List[str]) -> QuerySet: +def get_persons_by_uuids(team: Team, uuids: list[str]) -> QuerySet: return Person.objects.filter(team_id=team.pk, uuid__in=uuids) @@ -254,7 +254,7 @@ def _delete_person( ) -def _get_distinct_ids_with_version(person: Person) -> Dict[str, int]: +def _get_distinct_ids_with_version(person: Person) -> dict[str, int]: return { distinct_id: int(version or 0) for distinct_id, version in PersonDistinctId.objects.filter(person=person, team_id=person.team_id) diff --git a/posthog/models/personal_api_key.py b/posthog/models/personal_api_key.py index 047471f4fe8a8..23bb04e0b4242 100644 --- a/posthog/models/personal_api_key.py +++ b/posthog/models/personal_api_key.py @@ -1,4 +1,4 @@ -from typing import Optional, Literal, Tuple, get_args +from typing import Optional, Literal, get_args import hashlib from django.contrib.auth.hashers import PBKDF2PasswordHasher @@ -111,5 +111,5 @@ class PersonalAPIKey(models.Model): ] -API_SCOPE_OBJECTS: Tuple[APIScopeObject, ...] = get_args(APIScopeObject) -API_SCOPE_ACTIONS: Tuple[APIScopeActions, ...] = get_args(APIScopeActions) +API_SCOPE_OBJECTS: tuple[APIScopeObject, ...] = get_args(APIScopeObject) +API_SCOPE_ACTIONS: tuple[APIScopeActions, ...] = get_args(APIScopeActions) diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index 900b1abec7741..06971c1ce7cca 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass from enum import Enum -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from uuid import UUID from django.conf import settings @@ -52,13 +52,13 @@ def raise_if_plugin_installed(url: str, organization_id: str): raise ValidationError(f'Plugin from URL "{url_without_private_key}" already installed!') -def update_validated_data_from_url(validated_data: Dict[str, Any], url: str) -> Dict[str, Any]: +def update_validated_data_from_url(validated_data: dict[str, Any], url: str) -> dict[str, Any]: """If remote plugin, download the archive and get up-to-date validated_data from there. Returns plugin.json.""" - plugin_json: Optional[Dict[str, Any]] + plugin_json: Optional[dict[str, Any]] if url.startswith("file:"): plugin_path = url[5:] plugin_json_path = os.path.join(plugin_path, "plugin.json") - plugin_json = cast(Optional[Dict[str, Any]], load_json_file(plugin_json_path)) + plugin_json = cast(Optional[dict[str, Any]], load_json_file(plugin_json_path)) if not plugin_json: raise ValidationError(f"Could not load plugin.json from: {plugin_json_path}") validated_data["plugin_type"] = "local" @@ -81,7 +81,7 @@ def update_validated_data_from_url(validated_data: Dict[str, Any], url: str) -> validated_data["latest_tag"] = parsed_url.get("tag", None) validated_data["archive"] = download_plugin_archive(validated_data["url"], validated_data["tag"]) plugin_json = cast( - Optional[Dict[str, Any]], + Optional[dict[str, Any]], get_file_from_archive(validated_data["archive"], "plugin.json"), ) if not plugin_json: @@ -124,7 +124,7 @@ class PluginManager(models.Manager): def install(self, **kwargs) -> "Plugin": if "organization_id" not in kwargs and "organization" in kwargs: kwargs["organization_id"] = kwargs["organization"].id - plugin_json: Optional[Dict[str, Any]] = None + plugin_json: Optional[dict[str, Any]] = None if kwargs.get("plugin_type", None) != Plugin.PluginType.SOURCE: plugin_json = update_validated_data_from_url(kwargs, kwargs["url"]) raise_if_plugin_installed(kwargs["url"], kwargs["organization_id"]) @@ -204,8 +204,8 @@ class PluginType(models.TextChoices): objects: PluginManager = PluginManager() - def get_default_config(self) -> Dict[str, Any]: - config: Dict[str, Any] = {} + def get_default_config(self) -> dict[str, Any]: + config: dict[str, Any] = {} config_schema = self.config_schema if isinstance(config_schema, dict): for key, config_entry in config_schema.items(): @@ -296,8 +296,8 @@ class PluginLogEntryType(str, Enum): class PluginSourceFileManager(models.Manager): def sync_from_plugin_archive( - self, plugin: Plugin, plugin_json_parsed: Optional[Dict[str, Any]] = None - ) -> Tuple[ + self, plugin: Plugin, plugin_json_parsed: Optional[dict[str, Any]] = None + ) -> tuple[ "PluginSourceFile", Optional["PluginSourceFile"], Optional["PluginSourceFile"], @@ -426,12 +426,12 @@ def fetch_plugin_log_entries( before: Optional[timezone.datetime] = None, search: Optional[str] = None, limit: Optional[int] = None, - type_filter: Optional[List[PluginLogEntryType]] = None, -) -> List[PluginLogEntry]: + type_filter: Optional[list[PluginLogEntryType]] = None, +) -> list[PluginLogEntry]: if type_filter is None: type_filter = [] - clickhouse_where_parts: List[str] = [] - clickhouse_kwargs: Dict[str, Any] = {} + clickhouse_where_parts: list[str] = [] + clickhouse_kwargs: dict[str, Any] = {} if team_id is not None: clickhouse_where_parts.append("team_id = %(team_id)s") clickhouse_kwargs["team_id"] = team_id @@ -457,7 +457,7 @@ def fetch_plugin_log_entries( return [PluginLogEntry(*result) for result in cast(list, sync_execute(clickhouse_query, clickhouse_kwargs))] -def validate_plugin_job_payload(plugin: Plugin, job_type: str, payload: Dict[str, Any], *, is_staff: bool): +def validate_plugin_job_payload(plugin: Plugin, job_type: str, payload: dict[str, Any], *, is_staff: bool): if not plugin.public_jobs: raise ValidationError("Plugin has no public jobs") if job_type not in plugin.public_jobs: diff --git a/posthog/models/project.py b/posthog/models/project.py index c4ead260fb780..030bd4669a6c8 100644 --- a/posthog/models/project.py +++ b/posthog/models/project.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from django.db import models from django.db import transaction from django.core.validators import MinLengthValidator @@ -8,7 +8,7 @@ class ProjectManager(models.Manager): - def create_with_team(self, team_fields: Optional[dict] = None, **kwargs) -> Tuple["Project", "Team"]: + def create_with_team(self, team_fields: Optional[dict] = None, **kwargs) -> tuple["Project", "Team"]: from .team import Team with transaction.atomic(): diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index defd098cd7ef7..74ef611e257a3 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -2,11 +2,8 @@ from enum import Enum from typing import ( Any, - Dict, - List, Literal, Optional, - Tuple, Union, cast, ) @@ -27,7 +24,7 @@ class BehavioralPropertyType(str, Enum): RESTARTED_PERFORMING_EVENT = "restarted_performing_event" -ValueT = Union[str, int, List[str]] +ValueT = Union[str, int, list[str]] PropertyType = Literal[ "event", "feature", @@ -78,7 +75,7 @@ class BehavioralPropertyType(str, Enum): OperatorInterval = Literal["day", "week", "month", "year"] GroupTypeName = str -PropertyIdentifier = Tuple[PropertyName, PropertyType, Optional[GroupTypeIndex]] +PropertyIdentifier = tuple[PropertyName, PropertyType, Optional[GroupTypeIndex]] NEGATED_OPERATORS = ["is_not", "not_icontains", "not_regex", "is_not_set"] CLICKHOUSE_ONLY_PROPERTY_TYPES = [ @@ -187,7 +184,7 @@ class Property: # Type of `key` event_type: Optional[Literal["events", "actions"]] # Any extra filters on the event - event_filters: Optional[List["Property"]] + event_filters: Optional[list["Property"]] # Query people who did event '$pageview' 20 times in the last 30 days # translates into: # key = '$pageview', value = 'performed_event_multiple' @@ -216,7 +213,7 @@ class Property: total_periods: Optional[int] min_periods: Optional[int] negation: Optional[bool] = False - _data: Dict + _data: dict def __init__( self, @@ -239,7 +236,7 @@ def __init__( seq_time_value: Optional[int] = None, seq_time_interval: Optional[OperatorInterval] = None, negation: Optional[bool] = None, - event_filters: Optional[List["Property"]] = None, + event_filters: Optional[list["Property"]] = None, **kwargs, ) -> None: self.key = key @@ -298,7 +295,7 @@ def __repr__(self): params_repr = ", ".join(f"{key}={repr(value)}" for key, value in self.to_dict().items()) return f"Property({params_repr})" - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: return {key: value for key, value in vars(self).items() if value is not None} @staticmethod @@ -331,17 +328,17 @@ def _parse_value(value: ValueT, convert_to_number: bool = False) -> Any: class PropertyGroup: type: PropertyOperatorType - values: Union[List[Property], List["PropertyGroup"]] + values: Union[list[Property], list["PropertyGroup"]] def __init__( self, type: PropertyOperatorType, - values: Union[List[Property], List["PropertyGroup"]], + values: Union[list[Property], list["PropertyGroup"]], ) -> None: self.type = type self.values = values - def combine_properties(self, operator: PropertyOperatorType, properties: List[Property]) -> "PropertyGroup": + def combine_properties(self, operator: PropertyOperatorType, properties: list[Property]) -> "PropertyGroup": if not properties: return self @@ -375,7 +372,7 @@ def __repr__(self): return f"PropertyGroup(type={self.type}-{params_repr})" @cached_property - def flat(self) -> List[Property]: + def flat(self) -> list[Property]: return list(self._property_groups_flat(self)) def _property_groups_flat(self, prop_group: "PropertyGroup"): diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py index cae1be3340eac..de2602539e6ec 100644 --- a/posthog/models/property/util.py +++ b/posthog/models/property/util.py @@ -1,17 +1,15 @@ import re from collections import Counter -from typing import Any, Callable -from typing import Counter as TCounter +from typing import Any +from collections.abc import Callable +from collections import Counter as TCounter from typing import ( - Dict, - Iterable, - List, Literal, Optional, - Tuple, Union, cast, ) +from collections.abc import Iterable from rest_framework import exceptions @@ -88,7 +86,7 @@ def parse_prop_grouped_clauses( person_id_joined_alias: str = "person_id", group_properties_joined: bool = True, _top_level: bool = True, -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: """Translate the given property filter group into an SQL condition clause (+ SQL params).""" if not property_group or len(property_group.values) == 0: return "", {} @@ -119,7 +117,7 @@ def parse_prop_grouped_clauses( _final = f"{property_group.type} ".join(group_clauses) else: _final, final_params = parse_prop_clauses( - filters=cast(List[Property], property_group.values), + filters=cast(list[Property], property_group.values), prepend=f"{prepend}", table_name=table_name, allow_denormalized_props=allow_denormalized_props, @@ -151,7 +149,7 @@ def is_property_group(group: Union[Property, "PropertyGroup"]): def parse_prop_clauses( team_id: int, - filters: List[Property], + filters: list[Property], *, hogql_context: Optional[HogQLContext], prepend: str = "global", @@ -162,10 +160,10 @@ def parse_prop_clauses( person_id_joined_alias: str = "person_id", group_properties_joined: bool = True, property_operator: PropertyOperatorType = PropertyOperatorType.AND, -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: """Translate the given property filter into an SQL condition clause (+ SQL params).""" final = [] - params: Dict[str, Any] = {} + params: dict[str, Any] = {} table_formatted = table_name if table_formatted != "": @@ -411,7 +409,7 @@ def prop_filter_json_extract( property_operator: str = PropertyOperatorType.AND, table_name: Optional[str] = None, use_event_column: Optional[str] = None, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: # TODO: Once all queries are migrated over we can get rid of allow_denormalized_props if transform_expression is not None: prop_var = transform_expression(prop_var) @@ -433,7 +431,7 @@ def prop_filter_json_extract( if prop.negation: operator = negate_operator(operator or "exact") - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if operator == "is_not": params = { @@ -649,7 +647,7 @@ def get_single_or_multi_property_string_expr( allow_denormalized_props=True, materialised_table_column: str = "properties", normalize_url: bool = False, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: """ When querying for breakdown properties: * If the breakdown provided is a string, we extract the JSON from the properties object stored in the DB @@ -663,7 +661,7 @@ def get_single_or_multi_property_string_expr( no alias will be appended. """ - breakdown_params: Dict[str, Any] = {} + breakdown_params: dict[str, Any] = {} if isinstance(breakdown, str) or isinstance(breakdown, int): breakdown_key = f"breakdown_param_{len(breakdown_params) + 1}" breakdown_key = f"breakdown_param_{len(breakdown_params) + 1}" @@ -719,7 +717,7 @@ def get_property_string_expr( allow_denormalized_props: bool = True, table_alias: Optional[str] = None, materialised_table_column: str = "properties", -) -> Tuple[str, bool]: +) -> tuple[str, bool]: """ :param table: @@ -752,8 +750,8 @@ def get_property_string_expr( return trim_quotes_expr(f"JSONExtractRaw({table_string}{column}, {var})"), False -def box_value(value: Any, remove_spaces=False) -> List[Any]: - if not isinstance(value, List): +def box_value(value: Any, remove_spaces=False) -> list[Any]: + if not isinstance(value, list): value = [value] return [str(value).replace(" ", "") if remove_spaces else str(value) for value in value] @@ -764,19 +762,19 @@ def filter_element( *, operator: Optional[OperatorType] = None, prepend: str = "", -) -> Tuple[str, Dict]: +) -> tuple[str, dict]: if operator is None: operator = "exact" params = {} - combination_conditions: List[str] = [] + combination_conditions: list[str] = [] if key == "selector": if operator not in ("exact", "is_not"): raise exceptions.ValidationError( 'Filtering by element selector only supports operators "equals" and "doesn\'t equal" currently.' ) - selectors = cast(List[str | int], value) if isinstance(value, list) else [value] + selectors = cast(list[str | int], value) if isinstance(value, list) else [value] for idx, query in enumerate(selectors): if not query: # Skip empty selectors continue @@ -792,7 +790,7 @@ def filter_element( raise exceptions.ValidationError( 'Filtering by element tag only supports operators "equals" and "doesn\'t equal" currently.' ) - tag_names = cast(List[str | int], value) if isinstance(value, list) else [value] + tag_names = cast(list[str | int], value) if isinstance(value, list) else [value] for idx, tag_name in enumerate(tag_names): if not tag_name: # Skip empty tags continue @@ -824,12 +822,12 @@ def filter_element( return "0 = 191" if operator not in NEGATED_OPERATORS else "", {} -def process_ok_values(ok_values: Any, operator: OperatorType) -> List[str]: +def process_ok_values(ok_values: Any, operator: OperatorType) -> list[str]: if operator.endswith("_set"): return [r'[^"]+'] else: # Make sure ok_values is a list - ok_values = cast(List[str], [str(val) for val in ok_values]) if isinstance(ok_values, list) else [ok_values] + ok_values = cast(list[str], [str(val) for val in ok_values]) if isinstance(ok_values, list) else [ok_values] # Escape double quote characters, since e.g. text 'foo="bar"' is represented as text="foo=\"bar\"" # in the elements chain ok_values = [text.replace('"', r"\"") for text in ok_values] @@ -869,8 +867,8 @@ def build_selector_regex(selector: Selector) -> str: class HogQLPropertyChecker(TraversingVisitor): def __init__(self): - self.event_properties: List[str] = [] - self.person_properties: List[str] = [] + self.event_properties: list[str] = [] + self.person_properties: list[str] = [] def visit_field(self, node: ast.Field): if len(node.chain) > 1 and node.chain[0] == "properties": @@ -888,8 +886,8 @@ def visit_field(self, node: ast.Field): self.person_properties.append(node.chain[3]) -def extract_tables_and_properties(props: List[Property]) -> TCounter[PropertyIdentifier]: - counters: List[tuple] = [] +def extract_tables_and_properties(props: list[Property]) -> TCounter[PropertyIdentifier]: + counters: list[tuple] = [] for prop in props: if prop.type == "hogql": counters.extend(count_hogql_properties(prop.key)) @@ -917,7 +915,7 @@ def count_hogql_properties( return counter -def get_session_property_filter_statement(prop: Property, idx: int, prepend: str = "") -> Tuple[str, Dict[str, Any]]: +def get_session_property_filter_statement(prop: Property, idx: int, prepend: str = "") -> tuple[str, dict[str, Any]]: if prop.key == "$session_duration": try: duration = float(prop.value) # type: ignore @@ -932,7 +930,7 @@ def get_session_property_filter_statement(prop: Property, idx: int, prepend: str ) else: - raise exceptions.ValidationError(f"Property '{prop.key}' is not allowed in session property filters.") + raise exceptions.ValidationError(f"Session property '{prop.key}' is only valid in HogQL queries.") def clear_excess_levels(prop: Union["PropertyGroup", "Property"], skip=False): diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index 0a6f89354a639..8c8b9d6c773b4 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -12,6 +12,7 @@ class PropertyType(models.TextChoices): String = "String", "String" Numeric = "Numeric", "Numeric" Boolean = "Boolean", "Boolean" + Duration = "Duration", "Duration" class PropertyFormat(models.TextChoices): @@ -34,6 +35,7 @@ class Type(models.IntegerChoices): EVENT = 1, "event" PERSON = 2, "person" GROUP = 3, "group" + SESSION = 4, "session" team: models.ForeignKey = models.ForeignKey( Team, diff --git a/posthog/models/sessions/sql.py b/posthog/models/sessions/sql.py index 6bebc73e023f4..22d3431099f94 100644 --- a/posthog/models/sessions/sql.py +++ b/posthog/models/sessions/sql.py @@ -260,3 +260,44 @@ def source_column(column_name: str) -> str: GROUP BY session_id, team_id """ ) + +SELECT_SESSION_PROP_STRING_VALUES_SQL = """ +SELECT + value, + count(value) +FROM ( + SELECT + {property_expr} as value + FROM + sessions + WHERE + team_id = %(team_id)s AND + {property_expr} IS NOT NULL AND + {property_expr} != '' + ORDER BY session_id DESC + LIMIT 100000 +) +GROUP BY value +ORDER BY count(value) DESC +LIMIT 20 +""" + +SELECT_SESSION_PROP_STRING_VALUES_SQL_WITH_FILTER = """ +SELECT + value, + count(value) +FROM ( + SELECT + {property_expr} as value + FROM + sessions + WHERE + team_id = %(team_id)s AND + {property_expr} ILIKE %(value)s + ORDER BY session_id DESC + LIMIT 100000 +) +GROUP BY value +ORDER BY count(value) DESC +LIMIT 20 +""" diff --git a/posthog/models/sharing_configuration.py b/posthog/models/sharing_configuration.py index 48ea711f02a1f..7bbacc453559d 100644 --- a/posthog/models/sharing_configuration.py +++ b/posthog/models/sharing_configuration.py @@ -1,5 +1,5 @@ import secrets -from typing import List, cast +from typing import cast from django.db import models @@ -48,7 +48,7 @@ def can_access_object(self, obj: models.Model): return False - def get_connected_insight_ids(self) -> List[int]: + def get_connected_insight_ids(self) -> list[int]: if self.insight: if self.insight.deleted: return [] diff --git a/posthog/models/subscription.py b/posthog/models/subscription.py index f7b8a90a7e492..a0aa65ed9f668 100644 --- a/posthog/models/subscription.py +++ b/posthog/models/subscription.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from datetime import timedelta -from typing import Any, Dict, Optional +from typing import Any, Optional from dateutil.rrule import ( FR, @@ -134,7 +134,7 @@ def save(self, *args, **kwargs) -> None: self.set_next_delivery_date() if "update_fields" in kwargs: kwargs["update_fields"].append("next_delivery_date") - super(Subscription, self).save(*args, **kwargs) + super().save(*args, **kwargs) @property def url(self): @@ -187,7 +187,7 @@ def summary(self): capture_exception(e) return "sent on a schedule" - def get_analytics_metadata(self) -> Dict[str, Any]: + def get_analytics_metadata(self) -> dict[str, Any]: """ Returns serialized information about the object for analytics reporting. """ diff --git a/posthog/models/tagged_item.py b/posthog/models/tagged_item.py index 612f2f39399c3..302adcdb24f23 100644 --- a/posthog/models/tagged_item.py +++ b/posthog/models/tagged_item.py @@ -1,4 +1,5 @@ -from typing import Iterable, List, Union +from typing import Union +from collections.abc import Iterable from django.core.exceptions import ValidationError from django.db import models @@ -18,7 +19,7 @@ # Checks that exactly one object field is populated def build_check(related_objects: Iterable[str]): - built_check_list: List[Union[Q, Q]] = [] + built_check_list: list[Union[Q, Q]] = [] for field in related_objects: built_check_list.append( Q( @@ -117,7 +118,7 @@ def clean(self): def save(self, *args, **kwargs): self.full_clean() - return super(TaggedItem, self).save(*args, **kwargs) + return super().save(*args, **kwargs) def __str__(self) -> str: return str(self.tag) diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 6f5f927fe000a..80271396fb1b9 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -1,7 +1,7 @@ import re from decimal import Decimal from functools import lru_cache -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional import posthoganalytics import pydantic @@ -64,7 +64,7 @@ class TeamManager(models.Manager): def get_queryset(self): return super().get_queryset().defer(*DEPRECATED_ATTRS) - def set_test_account_filters(self, organization: Optional[Any]) -> List: + def set_test_account_filters(self, organization: Optional[Any]) -> list: filters = [ { "key": "$host", @@ -150,7 +150,7 @@ def increment_id_sequence(self) -> int: return result[0] -def get_default_data_attributes() -> List[str]: +def get_default_data_attributes() -> list[str]: return ["data-attr"] @@ -218,6 +218,7 @@ class Team(UUIDClassicModel): capture_console_log_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) capture_performance_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) surveys_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) + heatmaps_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) session_recording_version: models.CharField = models.CharField(null=True, blank=True, max_length=24) signup_token: models.CharField = models.CharField(max_length=200, null=True, blank=True) is_demo: models.BooleanField = models.BooleanField(default=False) @@ -477,7 +478,7 @@ def groups_on_events_querying_enabled(): def check_is_feature_available_for_team(team_id: int, feature_key: str, current_usage: Optional[int] = None): - available_product_features: Optional[List[Dict[str, str]]] = ( + available_product_features: Optional[list[dict[str, str]]] = ( Team.objects.select_related("organization") .values_list("organization__available_product_features", flat=True) .get(id=team_id) diff --git a/posthog/models/team/util.py b/posthog/models/team/util.py index a21b75ab80384..5756c8da211aa 100644 --- a/posthog/models/team/util.py +++ b/posthog/models/team/util.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Any, List +from typing import Any from posthog.temporal.common.client import sync_connect from posthog.batch_exports.service import batch_export_delete_schedule @@ -7,7 +7,7 @@ from posthog.models.async_migration import is_async_migration_complete -def delete_bulky_postgres_data(team_ids: List[int]): +def delete_bulky_postgres_data(team_ids: list[int]): "Efficiently delete large tables for teams from postgres. Using normal CASCADE delete here can time out" from posthog.models.cohort import CohortPeople @@ -29,7 +29,7 @@ def _raw_delete(queryset: Any): queryset._raw_delete(queryset.db) -def delete_batch_exports(team_ids: List[int]): +def delete_batch_exports(team_ids: list[int]): """Delete BatchExports for deleted teams. Using normal CASCADE doesn't trigger a delete from Temporal. diff --git a/posthog/models/test/test_dashboard_tile_model.py b/posthog/models/test/test_dashboard_tile_model.py index be13ba06975c3..79f4a085a24c7 100644 --- a/posthog/models/test/test_dashboard_tile_model.py +++ b/posthog/models/test/test_dashboard_tile_model.py @@ -1,5 +1,4 @@ import datetime -from typing import Dict, List from django.core.exceptions import ValidationError from django.db.utils import IntegrityError @@ -19,7 +18,7 @@ class TestDashboardTileModel(APIBaseTest): dashboard: Dashboard asset: ExportedAsset - tiles: List[DashboardTile] + tiles: list[DashboardTile] def setUp(self) -> None: self.dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) @@ -64,7 +63,7 @@ def test_cannot_add_a_tile_with_insight_and_text_on_validation(self) -> None: DashboardTile.objects.create(dashboard=self.dashboard, insight=insight, text=text) def test_cannot_set_caching_data_for_text_tiles(self) -> None: - tile_fields: List[Dict] = [ + tile_fields: list[dict] = [ {"filters_hash": "123"}, {"refreshing": True}, {"refresh_attempt": 2}, diff --git a/posthog/models/test/test_missing_person_model.py b/posthog/models/test/test_missing_person_model.py new file mode 100644 index 0000000000000..2692b76b15652 --- /dev/null +++ b/posthog/models/test/test_missing_person_model.py @@ -0,0 +1,10 @@ +from uuid import UUID +from posthog.models.person.missing_person import MissingPerson +from posthog.test.base import BaseTest + + +class TestMissingPersonModel(BaseTest): + def test_generates_deterministic_uuid(self): + assert MissingPerson(1, "test").uuid == UUID("246f7a43-5507-564f-b687-793ee3c2dd79") + assert MissingPerson(2, "test").uuid == UUID("00ce873a-549c-548e-bbec-cc804a385dd8") + assert MissingPerson(1, "test2").uuid == UUID("45c17302-ee44-5596-916a-0eba21f4b638") diff --git a/posthog/models/uploaded_media.py b/posthog/models/uploaded_media.py index 0161b71beb4f6..2b31f348263cb 100644 --- a/posthog/models/uploaded_media.py +++ b/posthog/models/uploaded_media.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional import structlog from django.conf import settings @@ -72,7 +72,7 @@ def save_content( def save_content_to_object_storage(uploaded_media: UploadedMedia, content: bytes) -> None: - path_parts: List[str] = [ + path_parts: list[str] = [ settings.OBJECT_STORAGE_MEDIA_UPLOADS_FOLDER, f"team-{uploaded_media.team.pk}", f"media-{uploaded_media.pk}", diff --git a/posthog/models/user.py b/posthog/models/user.py index cb4b1063cc961..c2d5b0f8d5551 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -1,5 +1,6 @@ from functools import cached_property -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypedDict +from typing import Any, Optional, TypedDict +from collections.abc import Callable from django.contrib.auth.models import AbstractUser, BaseUserManager from django.db import models, transaction @@ -36,7 +37,7 @@ class UserManager(BaseUserManager): def get_queryset(self): return super().get_queryset().defer(*DEFERED_ATTRS) - model: Type["User"] + model: type["User"] use_in_migrations = True @@ -58,12 +59,12 @@ def bootstrap( email: str, password: Optional[str], first_name: str = "", - organization_fields: Optional[Dict[str, Any]] = None, - team_fields: Optional[Dict[str, Any]] = None, + organization_fields: Optional[dict[str, Any]] = None, + team_fields: Optional[dict[str, Any]] = None, create_team: Optional[Callable[["Organization", "User"], "Team"]] = None, is_staff: bool = False, **user_fields, - ) -> Tuple["Organization", "Team", "User"]: + ) -> tuple["Organization", "Team", "User"]: """Instead of doing the legwork of creating a user from scratch, delegate the details with bootstrap.""" with transaction.atomic(): organization_fields = organization_fields or {} @@ -112,7 +113,7 @@ def get_from_personal_api_key(self, key_value: str) -> Optional["User"]: return personal_api_key.user -def events_column_config_default() -> Dict[str, Any]: +def events_column_config_default() -> dict[str, Any]: return {"active": "DEFAULT"} @@ -124,7 +125,7 @@ class ThemeMode(models.TextChoices): class User(AbstractUser, UUIDClassicModel): USERNAME_FIELD = "email" - REQUIRED_FIELDS: List[str] = [] + REQUIRED_FIELDS: list[str] = [] DISABLED = "disabled" TOOLBAR = "toolbar" diff --git a/posthog/models/utils.py b/posthog/models/utils.py index a093cf1e4ebde..c832cc8f044eb 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -5,7 +5,8 @@ from contextlib import contextmanager from random import Random, choice from time import time -from typing import Any, Callable, Dict, Iterator, Optional, Set, Type, TypeVar +from typing import Any, Optional, TypeVar +from collections.abc import Callable, Iterator from django.db import IntegrityError, connections, models, transaction from django.db.backends.utils import CursorWrapper @@ -40,7 +41,7 @@ class UUIDT(uuid.UUID): (https://blog.twitter.com/engineering/en_us/a/2010/announcing-snowflake.html). """ - current_series_per_ms: Dict[int, int] = defaultdict(int) + current_series_per_ms: dict[int, int] = defaultdict(int) def __init__( self, @@ -205,10 +206,10 @@ def create_with_slug(create_func: Callable[..., T], default_slug: str = "", *arg def get_deferred_field_set_for_model( - model: Type[models.Model], - fields_not_deferred: Optional[Set[str]] = None, + model: type[models.Model], + fields_not_deferred: Optional[set[str]] = None, field_prefix: str = "", -) -> Set[str]: +) -> set[str]: """Return a set of field names to be deferred for a given model. Used with `.defer()` after `select_related` Why? `select_related` fetches the entire related objects - not allowing you to specify which fields diff --git a/posthog/plugins/site.py b/posthog/plugins/site.py index 9cb2b3023f80e..0f5feda2df2c7 100644 --- a/posthog/plugins/site.py +++ b/posthog/plugins/site.py @@ -1,6 +1,6 @@ from dataclasses import asdict, dataclass from hashlib import md5 -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from posthog.models import Team @@ -11,7 +11,7 @@ class WebJsSource: id: int source: str token: str - config_schema: List[dict] + config_schema: list[dict] config: dict @@ -48,7 +48,7 @@ def get_transpiled_site_source(id: int, token: str) -> Optional[WebJsSource]: return WebJsSource(*(list(response))) # type: ignore -def get_decide_site_apps(team: "Team", using_database: str = "default") -> List[dict]: +def get_decide_site_apps(team: "Team", using_database: str = "default") -> list[dict]: from posthog.models import PluginConfig, PluginSourceFile sources = ( @@ -70,13 +70,13 @@ def get_decide_site_apps(team: "Team", using_database: str = "default") -> List[ ) def site_app_url(source: tuple) -> str: - hash = md5(f"{source[2]}-{source[3]}-{source[4]}".encode("utf-8")).hexdigest() + hash = md5(f"{source[2]}-{source[3]}-{source[4]}".encode()).hexdigest() return f"/site_app/{source[0]}/{source[1]}/{hash}/" return [asdict(WebJsUrl(source[0], site_app_url(source))) for source in sources] -def get_site_config_from_schema(config_schema: Optional[List[dict]], config: Optional[dict]): +def get_site_config_from_schema(config_schema: Optional[list[dict]], config: Optional[dict]): if not config or not config_schema: return {} return { diff --git a/posthog/plugins/utils.py b/posthog/plugins/utils.py index 2610d8b2eb17d..602f775447bfa 100644 --- a/posthog/plugins/utils.py +++ b/posthog/plugins/utils.py @@ -4,7 +4,7 @@ import re import tarfile from tarfile import ReadError -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional from urllib.parse import parse_qs, quote from zipfile import ZIP_DEFLATED, BadZipFile, Path, ZipFile @@ -12,7 +12,7 @@ from django.conf import settings -def parse_github_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Optional[str]]]: +def parse_github_url(url: str, get_latest_if_none=False) -> Optional[dict[str, Optional[str]]]: url, private_token = split_url_and_private_token(url) match = re.search( r"^https?://(?:www\.)?github\.com/([A-Za-z0-9_.-]+)/([A-Za-z0-9_.-]+)(/(commit|tree|releases/tag)/([A-Za-z0-9_.\-]+)/?([A-Za-z0-9_.\-/]+)?)?$", @@ -27,7 +27,7 @@ def parse_github_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O if not match: return None - parsed: Dict[str, Optional[str]] = { + parsed: dict[str, Optional[str]] = { "type": "github", "root_url": f"https://github.com/{match.group(1)}/{match.group(2)}", "user": match.group(1), @@ -76,13 +76,13 @@ def parse_github_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O return parsed -def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Optional[str]]]: +def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[dict[str, Optional[str]]]: url, private_token = split_url_and_private_token(url) match = re.search(r"^https?://(?:www\.)?gitlab\.com/([A-Za-z0-9_.\-/]+)$", url) if not match: return None - parsed: Dict[str, Optional[str]] = { + parsed: dict[str, Optional[str]] = { "type": "gitlab", "project": match.group(1), "tag": None, @@ -127,7 +127,7 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O return parsed -def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Optional[str]]]: +def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[dict[str, Optional[str]]]: url, private_token = split_url_and_private_token(url) match = re.search( r"^https?://(?:www\.)?npmjs\.com/package/([@a-z0-9_-]+(/[a-z0-9_-]+)?)?/?(v/([A-Za-z0-9_.-]+)/?|)$", @@ -135,7 +135,7 @@ def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Opti ) if not match: return None - parsed: Dict[str, Optional[str]] = { + parsed: dict[str, Optional[str]] = { "type": "npm", "pkg": match.group(1), "tag": match.group(4), @@ -166,7 +166,7 @@ def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Opti return parsed -def parse_url(url: str, get_latest_if_none=False) -> Dict[str, Optional[str]]: +def parse_url(url: str, get_latest_if_none=False) -> dict[str, Optional[str]]: parsed_url = parse_github_url(url, get_latest_if_none) if parsed_url: return parsed_url @@ -179,7 +179,7 @@ def parse_url(url: str, get_latest_if_none=False) -> Dict[str, Optional[str]]: raise Exception("Must be a GitHub/GitLab repository or npm package URL!") -def split_url_and_private_token(url: str) -> Tuple[str, Optional[str]]: +def split_url_and_private_token(url: str) -> tuple[str, Optional[str]]: private_token = None if "?" in url: url, query = url.split("?") @@ -242,7 +242,7 @@ def download_plugin_archive(url: str, tag: Optional[str] = None) -> bytes: def load_json_file(filename: str): try: - with open(filename, "r", encoding="utf_8") as reader: + with open(filename, encoding="utf_8") as reader: return json.loads(reader.read()) except FileNotFoundError: return None @@ -313,8 +313,8 @@ def find_index_ts_in_archive(archive: bytes, main_filename: Optional[str] = None def extract_plugin_code( - archive: bytes, plugin_json_parsed: Optional[Dict[str, Any]] = None -) -> Tuple[str, Optional[str], Optional[str], Optional[str]]: + archive: bytes, plugin_json_parsed: Optional[dict[str, Any]] = None +) -> tuple[str, Optional[str], Optional[str], Optional[str]]: """Extract plugin.json, index.ts (which can be aliased) and frontend.tsx out of an archive. If plugin.json has already been parsed before this is called, its value can be passed in as an optimization.""" diff --git a/posthog/queries/actor_base_query.py b/posthog/queries/actor_base_query.py index 66c476cd814cd..f23b4c4ff05da 100644 --- a/posthog/queries/actor_base_query.py +++ b/posthog/queries/actor_base_query.py @@ -2,12 +2,8 @@ from datetime import datetime, timedelta from typing import ( Any, - Dict, - List, Literal, Optional, - Set, - Tuple, TypedDict, Union, cast, @@ -34,14 +30,14 @@ class EventInfoForRecording(TypedDict): class MatchedRecording(TypedDict): session_id: str - events: List[EventInfoForRecording] + events: list[EventInfoForRecording] class CommonActor(TypedDict): id: Union[uuid.UUID, str] created_at: Optional[str] - properties: Dict[str, Any] - matched_recordings: List[MatchedRecording] + properties: dict[str, Any] + matched_recordings: list[MatchedRecording] value_at_data_point: Optional[float] @@ -50,7 +46,7 @@ class SerializedPerson(CommonActor): uuid: Union[uuid.UUID, str] is_identified: Optional[bool] name: str - distinct_ids: List[str] + distinct_ids: list[str] class SerializedGroup(CommonActor): @@ -81,7 +77,7 @@ def __init__( self.entity = entity self._filter = filter - def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: + def actor_query(self, limit_actors: Optional[bool] = True) -> tuple[str, dict]: """Implemented by subclasses. Must provide query and params. The query must return list of uuids. Can be group uuids (group_key) or person uuids""" raise NotImplementedError() @@ -96,9 +92,9 @@ def is_aggregating_by_groups(self) -> bool: def get_actors( self, - ) -> Tuple[ + ) -> tuple[ Union[QuerySet[Person], QuerySet[Group]], - Union[List[SerializedGroup], List[SerializedPerson]], + Union[list[SerializedGroup], list[SerializedPerson]], int, ]: """Get actors in data model and dict formats. Builds query and executes""" @@ -124,10 +120,10 @@ def get_actors( def query_for_session_ids_with_recordings( self, - session_ids: Set[str], + session_ids: set[str], date_from: datetime | None, date_to: datetime | None, - ) -> Set[str]: + ) -> set[str]: """Filters a list of session_ids to those that actually have recordings""" query = """ SELECT DISTINCT session_id @@ -166,9 +162,9 @@ def query_for_session_ids_with_recordings( def add_matched_recordings_to_serialized_actors( self, - serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]], + serialized_actors: Union[list[SerializedGroup], list[SerializedPerson]], raw_result, - ) -> Union[List[SerializedGroup], List[SerializedPerson]]: + ) -> Union[list[SerializedGroup], list[SerializedPerson]]: all_session_ids = set() session_events_column_index = 2 if self.ACTOR_VALUES_INCLUDED else 1 @@ -192,9 +188,9 @@ def add_matched_recordings_to_serialized_actors( ) session_ids_with_recordings = session_ids_with_all_recordings.difference(session_ids_with_deleted_recordings) - matched_recordings_by_actor_id: Dict[Union[uuid.UUID, str], List[MatchedRecording]] = {} + matched_recordings_by_actor_id: dict[Union[uuid.UUID, str], list[MatchedRecording]] = {} for row in raw_result: - recording_events_by_session_id: Dict[str, List[EventInfoForRecording]] = {} + recording_events_by_session_id: dict[str, list[EventInfoForRecording]] = {} if len(row) > session_events_column_index - 1: for event in row[session_events_column_index]: event_session_id = event[2] @@ -211,7 +207,7 @@ def add_matched_recordings_to_serialized_actors( # Casting Union[SerializedActor, SerializedGroup] as SerializedPerson because mypy yells # when you do an indexed assignment on a Union even if all items in the Union support it - serialized_actors = cast(List[SerializedPerson], serialized_actors) + serialized_actors = cast(list[SerializedPerson], serialized_actors) serialized_actors_with_recordings = [] for actor in serialized_actors: actor["matched_recordings"] = matched_recordings_by_actor_id[actor["id"]] @@ -221,12 +217,12 @@ def add_matched_recordings_to_serialized_actors( def get_actors_from_result( self, raw_result - ) -> Tuple[ + ) -> tuple[ Union[QuerySet[Person], QuerySet[Group]], - Union[List[SerializedGroup], List[SerializedPerson]], + Union[list[SerializedGroup], list[SerializedPerson]], ]: actors: Union[QuerySet[Person], QuerySet[Group]] - serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]] + serialized_actors: Union[list[SerializedGroup], list[SerializedPerson]] actor_ids = [row[0] for row in raw_result] value_per_actor_id = {str(row[0]): row[1] for row in raw_result} if self.ACTOR_VALUES_INCLUDED else None @@ -255,9 +251,9 @@ def get_actors_from_result( def get_groups( team_id: int, group_type_index: int, - group_ids: List[Any], - value_per_actor_id: Optional[Dict[str, float]] = None, -) -> Tuple[QuerySet[Group], List[SerializedGroup]]: + group_ids: list[Any], + value_per_actor_id: Optional[dict[str, float]] = None, +) -> tuple[QuerySet[Group], list[SerializedGroup]]: """Get groups from raw SQL results in data model and dict formats""" groups: QuerySet[Group] = Group.objects.filter( team_id=team_id, group_type_index=group_type_index, group_key__in=group_ids @@ -267,10 +263,10 @@ def get_groups( def get_people( team: Team, - people_ids: List[Any], - value_per_actor_id: Optional[Dict[str, float]] = None, + people_ids: list[Any], + value_per_actor_id: Optional[dict[str, float]] = None, distinct_id_limit=1000, -) -> Tuple[QuerySet[Person], List[SerializedPerson]]: +) -> tuple[QuerySet[Person], list[SerializedPerson]]: """Get people from raw SQL results in data model and dict formats""" distinct_id_subquery = Subquery( PersonDistinctId.objects.filter(person_id=OuterRef("person_id")).values_list("id", flat=True)[ @@ -294,9 +290,9 @@ def get_people( def serialize_people( team: Team, - data: Union[QuerySet[Person], List[Person]], - value_per_actor_id: Optional[Dict[str, float]] = None, -) -> List[SerializedPerson]: + data: Union[QuerySet[Person], list[Person]], + value_per_actor_id: Optional[dict[str, float]] = None, +) -> list[SerializedPerson]: from posthog.api.person import get_person_name return [ @@ -316,7 +312,7 @@ def serialize_people( ] -def serialize_groups(data: QuerySet[Group], value_per_actor_id: Optional[Dict[str, float]]) -> List[SerializedGroup]: +def serialize_groups(data: QuerySet[Group], value_per_actor_id: Optional[dict[str, float]]) -> list[SerializedGroup]: return [ SerializedGroup( id=group.group_key, diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py index cbf22d480156b..5fd32a06ec2da 100644 --- a/posthog/queries/app_metrics/historical_exports.py +++ b/posthog/queries/app_metrics/historical_exports.py @@ -1,6 +1,6 @@ import json from datetime import timedelta -from typing import Dict, Optional +from typing import Optional from zoneinfo import ZoneInfo @@ -26,7 +26,7 @@ def historical_exports_activity(team_id: int, plugin_config_id: int, job_id: Opt **({"detail__trigger__job_id": job_id} if job_id is not None else {}), ) - by_category: Dict = {"job_triggered": {}, "export_success": {}, "export_fail": {}} + by_category: dict = {"job_triggered": {}, "export_success": {}, "export_fail": {}} for entry in entries: by_category[entry.activity][entry.detail["trigger"]["job_id"]] = entry diff --git a/posthog/queries/app_metrics/test/test_app_metrics.py b/posthog/queries/app_metrics/test/test_app_metrics.py index e6c50b08ae525..2368961b507cb 100644 --- a/posthog/queries/app_metrics/test/test_app_metrics.py +++ b/posthog/queries/app_metrics/test/test_app_metrics.py @@ -1,6 +1,6 @@ import json from datetime import datetime -from typing import Dict, Optional +from typing import Optional from freezegun.api import freeze_time @@ -34,7 +34,7 @@ def create_app_metric( failures=0, error_uuid: Optional[str] = None, error_type: Optional[str] = None, - error_details: Optional[Dict] = None, + error_details: Optional[dict] = None, ): timestamp = cast_timestamp_or_now(timestamp) data = { diff --git a/posthog/queries/base.py b/posthog/queries/base.py index 7dff88f602099..e5cf6e717444b 100644 --- a/posthog/queries/base.py +++ b/posthog/queries/base.py @@ -3,14 +3,12 @@ import re from typing import ( Any, - Callable, - Dict, - List, Optional, TypeVar, Union, cast, ) +from collections.abc import Callable from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from dateutil import parser @@ -47,7 +45,7 @@ def determine_compared_filter(filter: FilterType) -> FilterType: return filter.shallow_clone({"date_from": date_from.isoformat(), "date_to": date_to.isoformat()}) -def convert_to_comparison(trend_entities: List[Dict[str, Any]], filter, label: str) -> List[Dict[str, Any]]: +def convert_to_comparison(trend_entities: list[dict[str, Any]], filter, label: str) -> list[dict[str, Any]]: for entity in trend_entities: labels = [ "{} {}".format(filter.interval if filter.interval is not None else "day", i) @@ -72,7 +70,7 @@ def convert_to_comparison(trend_entities: List[Dict[str, Any]], filter, label: s """ -def handle_compare(filter, func: Callable, team: Team, **kwargs) -> List: +def handle_compare(filter, func: Callable, team: Team, **kwargs) -> list: all_entities = [] base_entitites = func(filter=filter, team=team, **kwargs) if filter.compare: @@ -88,7 +86,7 @@ def handle_compare(filter, func: Callable, team: Team, **kwargs) -> List: return all_entities -def match_property(property: Property, override_property_values: Dict[str, Any]) -> bool: +def match_property(property: Property, override_property_values: dict[str, Any]) -> bool: # only looks for matches where key exists in override_property_values # doesn't support operator is_not_set @@ -276,8 +274,8 @@ def lookup_q(key: str, value: Any) -> Q: def property_to_Q( team_id: int, property: Property, - override_property_values: Optional[Dict[str, Any]] = None, - cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + override_property_values: Optional[dict[str, Any]] = None, + cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: if override_property_values is None: @@ -382,8 +380,8 @@ def property_to_Q( def property_group_to_Q( team_id: int, property_group: PropertyGroup, - override_property_values: Optional[Dict[str, Any]] = None, - cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + override_property_values: Optional[dict[str, Any]] = None, + cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: if override_property_values is None: @@ -426,9 +424,9 @@ def property_group_to_Q( def properties_to_Q( team_id: int, - properties: List[Property], - override_property_values: Optional[Dict[str, Any]] = None, - cohorts_cache: Optional[Dict[int, CohortOrEmpty]] = None, + properties: list[Property], + override_property_values: Optional[dict[str, Any]] = None, + cohorts_cache: Optional[dict[int, CohortOrEmpty]] = None, using_database: str = "default", ) -> Q: """ diff --git a/posthog/queries/breakdown_props.py b/posthog/queries/breakdown_props.py index fffb0aef0f2f0..96cf6afa9596c 100644 --- a/posthog/queries/breakdown_props.py +++ b/posthog/queries/breakdown_props.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast from django.forms import ValidationError @@ -50,7 +50,7 @@ def get_breakdown_prop_values( column_optimizer: Optional[ColumnOptimizer] = None, person_properties_mode: PersonPropertiesMode = PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN, use_all_funnel_entities: bool = False, -) -> Tuple[List[Any], bool]: +) -> tuple[list[Any], bool]: """ Returns the top N breakdown prop values for event/person breakdown @@ -77,13 +77,13 @@ def get_breakdown_prop_values( props_to_filter = filter.property_groups person_join_clauses = "" - person_join_params: Dict = {} + person_join_params: dict = {} groups_join_clause = "" - groups_join_params: Dict = {} + groups_join_params: dict = {} sessions_join_clause = "" - sessions_join_params: Dict = {} + sessions_join_params: dict = {} null_person_filter = ( f"AND notEmpty(e.person_id)" if team.person_on_events_mode != PersonsOnEventsMode.disabled else "" @@ -248,14 +248,14 @@ def get_breakdown_prop_values( def _to_value_expression( breakdown_type: Optional[BREAKDOWN_TYPES], - breakdown: Union[str, List[Union[str, int]], None], + breakdown: Union[str, list[Union[str, int]], None], breakdown_group_type_index: Optional[GroupTypeIndex], hogql_context: HogQLContext, breakdown_normalize_url: bool = False, direct_on_events: bool = False, cast_as_float: bool = False, -) -> Tuple[str, Dict]: - params: Dict[str, Any] = {} +) -> tuple[str, dict]: + params: dict[str, Any] = {} if breakdown_type == "session": if breakdown == "$session_duration": # Return the session duration expression right away because it's already an number, @@ -321,7 +321,7 @@ def _to_bucketing_expression(bin_count: int) -> str: return f"arrayCompact(arrayMap(x -> floor(x, 2), {qunatile_expression}))" -def _format_all_query(team: Team, filter: Filter, **kwargs) -> Tuple[str, Dict]: +def _format_all_query(team: Team, filter: Filter, **kwargs) -> tuple[str, dict]: entity = kwargs.pop("entity", None) date_params = {} @@ -354,7 +354,7 @@ def _format_all_query(team: Team, filter: Filter, **kwargs) -> Tuple[str, Dict]: return query, {**date_params, **prop_filter_params} -def format_breakdown_cohort_join_query(team: Team, filter: Filter, **kwargs) -> Tuple[str, List, Dict]: +def format_breakdown_cohort_join_query(team: Team, filter: Filter, **kwargs) -> tuple[str, list, dict]: entity = kwargs.pop("entity", None) cohorts = ( Cohort.objects.filter(team_id=team.pk, pk__in=[b for b in filter.breakdown if b != "all"]) @@ -371,9 +371,9 @@ def format_breakdown_cohort_join_query(team: Team, filter: Filter, **kwargs) -> return " UNION ALL ".join(cohort_queries), ids, params -def _parse_breakdown_cohorts(cohorts: List[Cohort], hogql_context: HogQLContext) -> Tuple[List[str], Dict]: +def _parse_breakdown_cohorts(cohorts: list[Cohort], hogql_context: HogQLContext) -> tuple[list[str], dict]: queries = [] - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for idx, cohort in enumerate(cohorts): person_id_query, cohort_filter_params = format_filter_query(cohort, idx, hogql_context) diff --git a/posthog/queries/column_optimizer/foss_column_optimizer.py b/posthog/queries/column_optimizer/foss_column_optimizer.py index 98dfb1b54c418..b3e73d3178c5e 100644 --- a/posthog/queries/column_optimizer/foss_column_optimizer.py +++ b/posthog/queries/column_optimizer/foss_column_optimizer.py @@ -1,6 +1,7 @@ from collections import Counter -from typing import Counter as TCounter -from typing import Generator, List, Set, Union, cast +from collections import Counter as TCounter +from typing import Union, cast +from collections.abc import Generator from posthog.clickhouse.materialized_columns import ColumnName, get_materialized_columns from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType @@ -48,19 +49,19 @@ def __init__( self.property_optimizer = PropertyOptimizer() @cached_property - def event_columns_to_query(self) -> Set[ColumnName]: + def event_columns_to_query(self) -> set[ColumnName]: "Returns a list of event table columns containing materialized properties that this query needs" return self.columns_to_query("events", set(self.used_properties_with_type("event"))) @cached_property - def person_on_event_columns_to_query(self) -> Set[ColumnName]: + def person_on_event_columns_to_query(self) -> set[ColumnName]: "Returns a list of event table person columns containing materialized properties that this query needs" return self.columns_to_query("events", set(self.used_properties_with_type("person")), "person_properties") @cached_property - def person_columns_to_query(self) -> Set[ColumnName]: + def person_columns_to_query(self) -> set[ColumnName]: "Returns a list of person table columns containing materialized properties that this query needs" return self.columns_to_query("person", set(self.used_properties_with_type("person"))) @@ -68,9 +69,9 @@ def person_columns_to_query(self) -> Set[ColumnName]: def columns_to_query( self, table: TableWithProperties, - used_properties: Set[PropertyIdentifier], + used_properties: set[PropertyIdentifier], table_column: str = "properties", - ) -> Set[ColumnName]: + ) -> set[ColumnName]: "Transforms a list of property names to what columns are needed for that query" materialized_columns = get_materialized_columns(table) @@ -92,11 +93,11 @@ def is_using_cohort_propertes(self) -> bool: ) @cached_property - def group_types_to_query(self) -> Set[GroupTypeIndex]: + def group_types_to_query(self) -> set[GroupTypeIndex]: return set() @cached_property - def group_on_event_columns_to_query(self) -> Set[ColumnName]: + def group_on_event_columns_to_query(self) -> set[ColumnName]: return set() @cached_property @@ -171,7 +172,7 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: counter += get_action_tables_and_properties(entity.get_action()) if ( - not isinstance(self.filter, (StickinessFilter, PropertiesTimelineFilter)) + not isinstance(self.filter, StickinessFilter | PropertiesTimelineFilter) and self.filter.correlation_type == FunnelCorrelationType.PROPERTIES and self.filter.correlation_property_names ): @@ -195,7 +196,7 @@ def used_properties_with_type(self, property_type: PropertyType) -> TCounter[Pro def entities_used_in_filter(self) -> Generator[Entity, None, None]: yield from self.filter.entities - yield from cast(List[Entity], self.filter.exclusions) + yield from cast(list[Entity], self.filter.exclusions) if isinstance(self.filter, RetentionFilter): yield self.filter.target_entity diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py index 8737876d00116..49d4565a943ec 100644 --- a/posthog/queries/event_query/event_query.py +++ b/posthog/queries/event_query/event_query.py @@ -1,5 +1,5 @@ from abc import ABCMeta, abstractmethod -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from posthog.clickhouse.materialized_columns import ColumnName from posthog.models import Cohort, Filter, Property @@ -38,9 +38,9 @@ class EventQuery(metaclass=ABCMeta): _should_join_persons = False _should_join_sessions = False _should_round_interval = False - _extra_fields: List[ColumnName] - _extra_event_properties: List[PropertyName] - _extra_person_fields: List[ColumnName] + _extra_fields: list[ColumnName] + _extra_event_properties: list[PropertyName] + _extra_person_fields: list[ColumnName] _person_id_alias: str _session_id_alias: Optional[str] @@ -60,9 +60,9 @@ def __init__( should_join_persons=False, should_join_sessions=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: Optional[List[ColumnName]] = None, - extra_event_properties: Optional[List[PropertyName]] = None, - extra_person_fields: Optional[List[ColumnName]] = None, + extra_fields: Optional[list[ColumnName]] = None, + extra_event_properties: Optional[list[PropertyName]] = None, + extra_person_fields: Optional[list[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, **kwargs, @@ -79,7 +79,7 @@ def __init__( self._extra_event_properties = extra_event_properties self._column_optimizer = ColumnOptimizer(self._filter, self._team_id) self._extra_person_fields = extra_person_fields - self.params: Dict[str, Any] = { + self.params: dict[str, Any] = { "team_id": self._team_id, "timezone": team.timezone, } @@ -118,7 +118,7 @@ def __init__( self._person_id_alias = self._get_person_id_alias(person_on_events_mode) @abstractmethod - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: pass @abstractmethod @@ -206,7 +206,7 @@ def _person_query(self) -> PersonQuery: extra_fields=self._extra_person_fields, ) - def _get_person_query(self) -> Tuple[str, Dict]: + def _get_person_query(self) -> tuple[str, dict]: if self._should_join_persons: person_query, params = self._person_query.get_query() return ( @@ -219,7 +219,7 @@ def _get_person_query(self) -> Tuple[str, Dict]: else: return "", {} - def _get_groups_query(self) -> Tuple[str, Dict]: + def _get_groups_query(self) -> tuple[str, dict]: return "", {} @cached_property @@ -232,7 +232,7 @@ def _sessions_query(self) -> SessionQuery: session_id_alias=self._session_id_alias, ) - def _get_sessions_query(self) -> Tuple[str, Dict]: + def _get_sessions_query(self) -> tuple[str, dict]: if self._should_join_sessions: session_query, session_params = self._sessions_query.get_query() @@ -246,7 +246,7 @@ def _get_sessions_query(self) -> Tuple[str, Dict]: ) return "", {} - def _get_date_filter(self) -> Tuple[str, Dict]: + def _get_date_filter(self) -> tuple[str, dict]: date_params = {} query_date_range = QueryDateRange( filter=self._filter, team=self._team, should_round=self._should_round_interval @@ -270,7 +270,7 @@ def _get_prop_groups( person_id_joined_alias="person_id", prepend="global", allow_denormalized_props=True, - ) -> Tuple[str, Dict]: + ) -> tuple[str, dict]: if not prop_group: return "", {} diff --git a/posthog/queries/foss_cohort_query.py b/posthog/queries/foss_cohort_query.py index 352fc19ee13cf..847f6737c9f3f 100644 --- a/posthog/queries/foss_cohort_query.py +++ b/posthog/queries/foss_cohort_query.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast from zoneinfo import ZoneInfo from posthog.clickhouse.materialized_columns import ColumnName @@ -26,8 +26,8 @@ from posthog.schema import PersonsOnEventsMode from posthog.utils import relative_date_parse -Relative_Date = Tuple[int, OperatorInterval] -Event = Tuple[str, Union[str, int]] +Relative_Date = tuple[int, OperatorInterval] +Event = tuple[str, Union[str, int]] INTERVAL_TO_SECONDS = { @@ -40,7 +40,7 @@ } -def relative_date_to_seconds(date: Tuple[Optional[int], Union[OperatorInterval, None]]): +def relative_date_to_seconds(date: tuple[Optional[int], Union[OperatorInterval, None]]): if date[0] is None or date[1] is None: raise ValueError("Time value and time interval must be specified") @@ -66,7 +66,7 @@ def parse_and_validate_positive_integer(value: Optional[int], value_name: str) - return parsed_value -def validate_entity(possible_event: Tuple[Optional[str], Optional[Union[int, str]]]) -> Event: +def validate_entity(possible_event: tuple[Optional[str], Optional[Union[int, str]]]) -> Event: event_type = possible_event[0] event_val = possible_event[1] if event_type is None or event_val is None: @@ -83,7 +83,7 @@ def relative_date_is_greater(date_1: Relative_Date, date_2: Relative_Date) -> bo return relative_date_to_seconds(date_1) > relative_date_to_seconds(date_2) -def convert_to_entity_params(events: List[Event]) -> Tuple[List, List]: +def convert_to_entity_params(events: list[Event]) -> tuple[list, list]: res_events = [] res_actions = [] @@ -124,8 +124,8 @@ class FOSSCohortQuery(EventQuery): BEHAVIOR_QUERY_ALIAS = "behavior_query" FUNNEL_QUERY_ALIAS = "funnel_query" SEQUENCE_FIELD_ALIAS = "steps" - _fields: List[str] - _events: List[str] + _fields: list[str] + _events: list[str] _earliest_time_for_event_query: Optional[Relative_Date] _restrict_event_query_by_time: bool @@ -139,9 +139,9 @@ def __init__( should_join_distinct_ids=False, should_join_persons=False, # Extra events/person table columns to fetch since parent query needs them - extra_fields: Optional[List[ColumnName]] = None, - extra_event_properties: Optional[List[PropertyName]] = None, - extra_person_fields: Optional[List[ColumnName]] = None, + extra_fields: Optional[list[ColumnName]] = None, + extra_event_properties: Optional[list[PropertyName]] = None, + extra_person_fields: Optional[list[ColumnName]] = None, override_aggregate_users_by_distinct_id: Optional[bool] = None, **kwargs, ) -> None: @@ -187,14 +187,14 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper if not negate_group: return PropertyGroup( type=property_group.type, - values=[_unwrap(v) for v in cast(List[PropertyGroup], property_group.values)], + values=[_unwrap(v) for v in cast(list[PropertyGroup], property_group.values)], ) else: return PropertyGroup( type=PropertyOperatorType.AND if property_group.type == PropertyOperatorType.OR else PropertyOperatorType.OR, - values=[_unwrap(v, True) for v in cast(List[PropertyGroup], property_group.values)], + values=[_unwrap(v, True) for v in cast(list[PropertyGroup], property_group.values)], ) elif isinstance(property_group.values[0], Property): @@ -202,7 +202,7 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper # if any single one is a cohort property, unwrap it into a property group # which implies converting everything else in the list into a property group too - new_property_group_list: List[PropertyGroup] = [] + new_property_group_list: list[PropertyGroup] = [] for prop in property_group.values: prop = cast(Property, prop) current_negation = prop.negation or False @@ -258,7 +258,7 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper return filter.shallow_clone({"properties": new_props.to_dict()}) # Implemented in /ee - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: if not self._outer_property_groups: # everything is pushed down, no behavioral stuff to do # thus, use personQuery directly @@ -294,7 +294,7 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: return final_query, self.params - def _build_sources(self, subq: List[Tuple[str, str]]) -> Tuple[str, str]: + def _build_sources(self, subq: list[tuple[str, str]]) -> tuple[str, str]: q = "" filtered_queries = [(q, alias) for (q, alias) in subq if q and len(q)] @@ -325,7 +325,7 @@ def _build_sources(self, subq: List[Tuple[str, str]]) -> Tuple[str, str]: return q, fields - def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]: + def _get_behavior_subquery(self) -> tuple[str, dict[str, Any], str]: # # Get the subquery for the cohort query. # @@ -371,7 +371,7 @@ def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]: return query, params, self.BEHAVIOR_QUERY_ALIAS - def _get_persons_query(self, prepend: str = "") -> Tuple[str, Dict[str, Any], str]: + def _get_persons_query(self, prepend: str = "") -> tuple[str, dict[str, Any], str]: query, params = "", {} if self._should_join_persons: person_query, person_params = self._person_query.get_query(prepend=prepend) @@ -387,9 +387,9 @@ def should_pushdown_persons(self) -> bool: prop.type for prop in getattr(self._outer_property_groups, "flat", []) ] and "static-cohort" not in [prop.type for prop in getattr(self._outer_property_groups, "flat", [])] - def _get_date_condition(self) -> Tuple[str, Dict[str, Any]]: + def _get_date_condition(self) -> tuple[str, dict[str, Any]]: date_query = "" - date_params: Dict[str, Any] = {} + date_params: dict[str, Any] = {} earliest_time_param = f"earliest_time_{self._cohort_pk}" if self._earliest_time_for_event_query and self._restrict_event_query_by_time: @@ -404,7 +404,7 @@ def _check_earliest_date(self, relative_date: Relative_Date) -> None: elif relative_date_is_greater(relative_date, self._earliest_time_for_event_query): self._earliest_time_for_event_query = relative_date - def _get_conditions(self) -> Tuple[str, Dict[str, Any]]: + def _get_conditions(self) -> tuple[str, dict[str, Any]]: def build_conditions(prop: Optional[Union[PropertyGroup, Property]], prepend="level", num=0): if not prop: return "", {} @@ -426,9 +426,9 @@ def build_conditions(prop: Optional[Union[PropertyGroup, Property]], prepend="le return f"AND ({conditions})" if conditions else "", params # Implemented in /ee - def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: res: str = "" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if prop.type == "behavioral": if prop.value == "performed_event": @@ -446,7 +446,7 @@ def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> return res, params - def get_person_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_person_condition(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: if self._outer_property_groups and len(self._outer_property_groups.flat): return prop_filter_json_extract( prop, @@ -459,7 +459,7 @@ def get_person_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[ else: return "", {} - def get_static_cohort_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_static_cohort_condition(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: # If we reach this stage, it means there are no cyclic dependencies # They should've been caught by API update validation # and if not there, `simplifyFilter` would've failed @@ -467,8 +467,8 @@ def get_static_cohort_condition(self, prop: Property, prepend: str, idx: int) -> query, params = format_static_cohort_query(cohort, idx, prepend) return f"id {'NOT' if prop.negation else ''} IN ({query})", params - def _get_entity_event_filters(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: - params: Dict[str, Any] = {} + def _get_entity_event_filters(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: + params: dict[str, Any] = {} if prop.event_filters: prop_query, prop_params = parse_prop_grouped_clauses( @@ -491,7 +491,7 @@ def _get_relative_interval_from_explicit_date(self, datetime: datetime, timezone # one extra day for any partial days return (delta.days + 1, "day") - def _get_entity_datetime_filters(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def _get_entity_datetime_filters(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: if prop.explicit_datetime: # Explicit datetime filter, can be a relative or absolute date, follows same convention # as all analytics datetime filters @@ -512,7 +512,7 @@ def _get_entity_datetime_filters(self, prop: Property, prepend: str, idx: int) - return f"timestamp > now() - INTERVAL %({date_param})s {date_interval}", {f"{date_param}": date_value} - def get_performed_event_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_performed_event_condition(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) column_name = f"performed_event_condition_{prepend}_{idx}" @@ -530,7 +530,7 @@ def get_performed_event_condition(self, prop: Property, prepend: str, idx: int) **entity_filters_params, } - def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: + def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) -> tuple[str, dict[str, Any]]: event = (prop.event_type, prop.key) column_name = f"performed_event_multiple_condition_{prepend}_{idx}" @@ -591,12 +591,12 @@ def _validate_negations(self) -> None: def _get_entity( self, - event: Tuple[Optional[str], Optional[Union[int, str]]], + event: tuple[Optional[str], Optional[Union[int, str]]], prepend: str, idx: int, - ) -> Tuple[str, Dict[str, Any]]: + ) -> tuple[str, dict[str, Any]]: res: str = "" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if event[0] is None or event[1] is None: raise ValueError("Event type and key must be specified") diff --git a/posthog/queries/funnels/base.py b/posthog/queries/funnels/base.py index c4258c6f6eb9f..a6de14b050cfa 100644 --- a/posthog/queries/funnels/base.py +++ b/posthog/queries/funnels/base.py @@ -1,7 +1,7 @@ import urllib.parse import uuid from abc import ABC -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast from rest_framework.exceptions import ValidationError @@ -44,9 +44,9 @@ class ClickhouseFunnelBase(ABC): _team: Team _include_timestamp: Optional[bool] _include_preceding_timestamp: Optional[bool] - _extra_event_fields: List[ColumnName] - _extra_event_properties: List[PropertyName] - _include_properties: List[str] + _extra_event_fields: list[ColumnName] + _extra_event_properties: list[PropertyName] + _include_properties: list[str] def __init__( self, @@ -55,7 +55,7 @@ def __init__( include_timestamp: Optional[bool] = None, include_preceding_timestamp: Optional[bool] = None, base_uri: str = "/", - include_properties: Optional[List[str]] = None, + include_properties: Optional[list[str]] = None, ) -> None: self._filter = filter self._team = team @@ -92,8 +92,8 @@ def __init__( self.params.update({OFFSET: self._filter.offset}) - self._extra_event_fields: List[ColumnName] = [] - self._extra_event_properties: List[PropertyName] = [] + self._extra_event_fields: list[ColumnName] = [] + self._extra_event_properties: list[PropertyName] = [] if self._filter.include_recordings: self._extra_event_fields = ["uuid"] self._extra_event_properties = ["$session_id", "$window_id"] @@ -111,9 +111,9 @@ def _serialize_step( self, step: Entity, count: int, - people: Optional[List[uuid.UUID]] = None, + people: Optional[list[uuid.UUID]] = None, sampling_factor: Optional[float] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: if step.type == TREND_FILTER_TYPE_ACTIONS: name = step.get_action().name else: @@ -135,7 +135,7 @@ def extra_event_fields_and_properties(self): def _update_filters(self): # format default dates - data: Dict[str, Any] = {} + data: dict[str, Any] = {} if not self._filter._date_from: data.update({"date_from": relative_date_parse("-7d", self._team.timezone_info)}) @@ -153,7 +153,7 @@ def _update_filters(self): # # Once multi property breakdown is implemented in Trends this becomes unnecessary - if isinstance(self._filter.breakdowns, List) and self._filter.breakdown_type in [ + if isinstance(self._filter.breakdowns, list) and self._filter.breakdown_type in [ "person", "event", "hogql", @@ -167,7 +167,7 @@ def _update_filters(self): "hogql", None, ]: - boxed_breakdown: List[Union[str, int]] = box_value(self._filter.breakdown) + boxed_breakdown: list[Union[str, int]] = box_value(self._filter.breakdown) data.update({"breakdown": boxed_breakdown}) for exclusion in self._filter.exclusions: @@ -270,7 +270,7 @@ def _format_results(self, results): else: return self._format_single_funnel(results[0]) - def _exec_query(self) -> List[Tuple]: + def _exec_query(self) -> list[tuple]: self._filter.team = self._team query = self.get_query() return insight_sync_execute( @@ -289,7 +289,7 @@ def _get_timestamp_outer_select(self) -> str: else: return "" - def _get_timestamp_selects(self) -> Tuple[str, str]: + def _get_timestamp_selects(self) -> tuple[str, str]: """ Returns timestamp selectors for the target step and optionally the preceding step. In the former case, always returns the timestamp for the first and last step as well. @@ -328,7 +328,7 @@ def _get_timestamp_selects(self) -> Tuple[str, str]: return "", "" def _get_step_times(self, max_steps: int): - conditions: List[str] = [] + conditions: list[str] = [] for i in range(1, max_steps): conditions.append( f"if(isNotNull(latest_{i}) AND latest_{i} <= latest_{i-1} + INTERVAL {self._filter.funnel_window_interval} {self._filter.funnel_window_interval_unit_ch()}, " @@ -339,7 +339,7 @@ def _get_step_times(self, max_steps: int): return f", {formatted}" if formatted else "" def _get_partition_cols(self, level_index: int, max_steps: int): - cols: List[str] = [] + cols: list[str] = [] for i in range(0, max_steps): cols.append(f"step_{i}") if i < level_index: @@ -397,7 +397,7 @@ def _get_sorting_condition(self, curr_index: int, max_steps: int): if curr_index == 1: return "1" - conditions: List[str] = [] + conditions: list[str] = [] for i in range(1, curr_index): duplicate_event = ( True @@ -444,7 +444,7 @@ def _get_inner_event_query( else: steps_conditions = self._get_steps_conditions(length=len(entities_to_use)) - all_step_cols: List[str] = [] + all_step_cols: list[str] = [] for index, entity in enumerate(entities_to_use): step_cols = self._get_step_col(entity, index, entity_name) all_step_cols.extend(step_cols) @@ -521,7 +521,7 @@ def _add_breakdown_attribution_subquery(self, inner_query: str) -> str: """ def _get_steps_conditions(self, length: int) -> str: - step_conditions: List[str] = [] + step_conditions: list[str] = [] for index in range(length): step_conditions.append(f"step_{index} = 1") @@ -531,10 +531,10 @@ def _get_steps_conditions(self, length: int) -> str: return " OR ".join(step_conditions) - def _get_step_col(self, entity: Entity, index: int, entity_name: str, step_prefix: str = "") -> List[str]: + def _get_step_col(self, entity: Entity, index: int, entity_name: str, step_prefix: str = "") -> list[str]: # step prefix is used to distinguish actual steps, and exclusion steps # without the prefix, we get the same parameter binding for both, which borks things up - step_cols: List[str] = [] + step_cols: list[str] = [] condition = self._build_step_query(entity, index, entity_name, step_prefix) step_cols.append(f"if({condition}, 1, 0) as {step_prefix}step_{index}") step_cols.append(f"if({step_prefix}step_{index} = 1, timestamp, null) as {step_prefix}latest_{index}") @@ -637,7 +637,7 @@ def _get_funnel_person_step_events(self): return "" def _get_count_columns(self, max_steps: int): - cols: List[str] = [] + cols: list[str] = [] for i in range(max_steps): cols.append(f"countIf(steps = {i + 1}) step_{i + 1}") @@ -680,7 +680,7 @@ def _get_matching_events(self, max_steps: int): return "" def _get_step_time_avgs(self, max_steps: int, inner_query: bool = False): - conditions: List[str] = [] + conditions: list[str] = [] for i in range(1, max_steps): conditions.append( f"avg(step_{i}_conversion_time) step_{i}_average_conversion_time_inner" @@ -692,7 +692,7 @@ def _get_step_time_avgs(self, max_steps: int, inner_query: bool = False): return f", {formatted}" if formatted else "" def _get_step_time_median(self, max_steps: int, inner_query: bool = False): - conditions: List[str] = [] + conditions: list[str] = [] for i in range(1, max_steps): conditions.append( f"median(step_{i}_conversion_time) step_{i}_median_conversion_time_inner" @@ -720,9 +720,9 @@ def get_step_counts_query(self) -> str: def get_step_counts_without_aggregation_query(self) -> str: raise NotImplementedError() - def _get_breakdown_select_prop(self) -> Tuple[str, Dict[str, Any]]: + def _get_breakdown_select_prop(self) -> tuple[str, dict[str, Any]]: basic_prop_selector = "" - basic_prop_params: Dict[str, Any] = {} + basic_prop_params: dict[str, Any] = {} if not self._filter.breakdown: return basic_prop_selector, basic_prop_params @@ -837,7 +837,7 @@ def _get_cohort_breakdown_join(self) -> str: ON events.distinct_id = cohort_join.distinct_id """ - def _get_breakdown_conditions(self) -> Optional[List[str]]: + def _get_breakdown_conditions(self) -> Optional[list[str]]: """ For people, pagination sets the offset param, which is common across filters and gives us the wrong breakdown values here, so we override it. diff --git a/posthog/queries/funnels/funnel.py b/posthog/queries/funnels/funnel.py index e1ac23f00d637..c72a7f1608ea6 100644 --- a/posthog/queries/funnels/funnel.py +++ b/posthog/queries/funnels/funnel.py @@ -1,4 +1,4 @@ -from typing import List, cast +from typing import cast from posthog.queries.funnels.base import ClickhouseFunnelBase @@ -74,7 +74,7 @@ def get_step_counts_without_aggregation_query(self): """ def _get_comparison_at_step(self, index: int, level_index: int): - or_statements: List[str] = [] + or_statements: list[str] = [] for i in range(level_index, index + 1): or_statements.append(f"latest_{i} < latest_{level_index - 1}") @@ -86,7 +86,7 @@ def get_comparison_cols(self, level_index: int, max_steps: int): level_index: The current smallest comparison step. Everything before level index is already at the minimum ordered timestamps. """ - cols: List[str] = [] + cols: list[str] = [] for i in range(0, max_steps): cols.append(f"step_{i}") if i < level_index: diff --git a/posthog/queries/funnels/funnel_event_query.py b/posthog/queries/funnels/funnel_event_query.py index 2c8ad72524f70..9f0ad134257e8 100644 --- a/posthog/queries/funnels/funnel_event_query.py +++ b/posthog/queries/funnels/funnel_event_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Set, Tuple, Union +from typing import Any, Union from posthog.constants import TREND_FILTER_TYPE_ACTIONS from posthog.hogql.hogql import translate_hogql @@ -17,7 +17,7 @@ def get_query( entities=None, entity_name="events", skip_entity_filter=False, - ) -> Tuple[str, Dict[str, Any]]: + ) -> tuple[str, dict[str, Any]]: # Aggregating by group if self._filter.aggregation_group_type_index is not None: aggregation_target = get_aggregation_target_field( @@ -81,7 +81,7 @@ def get_query( if skip_entity_filter: entity_query = "" - entity_params: Dict[str, Any] = {} + entity_params: dict[str, Any] = {} else: entity_query, entity_params = self._get_entity_query(entities, entity_name) @@ -145,8 +145,8 @@ def _determine_should_join_persons(self) -> None: if self._person_on_events_mode != PersonsOnEventsMode.disabled: self._should_join_persons = False - def _get_entity_query(self, entities=None, entity_name="events") -> Tuple[str, Dict[str, Any]]: - events: Set[Union[int, str, None]] = set() + def _get_entity_query(self, entities=None, entity_name="events") -> tuple[str, dict[str, Any]]: + events: set[Union[int, str, None]] = set() entities_to_use = entities or self._filter.entities for entity in entities_to_use: diff --git a/posthog/queries/funnels/funnel_persons.py b/posthog/queries/funnels/funnel_persons.py index 5cebef5fb7dcd..c221727866e3a 100644 --- a/posthog/queries/funnels/funnel_persons.py +++ b/posthog/queries/funnels/funnel_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.models.filters.filter import Filter from posthog.models.filters.mixins.utils import cached_property @@ -18,7 +18,7 @@ def aggregation_group_type_index(self): def actor_query( self, limit_actors: Optional[bool] = True, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( diff --git a/posthog/queries/funnels/funnel_strict.py b/posthog/queries/funnels/funnel_strict.py index 38b5d3a4c6a09..cb9f97d191870 100644 --- a/posthog/queries/funnels/funnel_strict.py +++ b/posthog/queries/funnels/funnel_strict.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.queries.funnels.base import ClickhouseFunnelBase @@ -57,7 +55,7 @@ def get_step_counts_without_aggregation_query(self): return formatted_query def _get_partition_cols(self, level_index: int, max_steps: int): - cols: List[str] = [] + cols: list[str] = [] for i in range(0, max_steps): cols.append(f"step_{i}") if i < level_index: diff --git a/posthog/queries/funnels/funnel_strict_persons.py b/posthog/queries/funnels/funnel_strict_persons.py index cca6f8e598dc8..2ad13822f5464 100644 --- a/posthog/queries/funnels/funnel_strict_persons.py +++ b/posthog/queries/funnels/funnel_strict_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.models.filters.filter import Filter from posthog.models.filters.mixins.utils import cached_property @@ -18,7 +18,7 @@ def aggregation_group_type_index(self): def actor_query( self, limit_actors: Optional[bool] = True, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( diff --git a/posthog/queries/funnels/funnel_trends.py b/posthog/queries/funnels/funnel_trends.py index d67b24ae78bbc..cb8ecbe7c8227 100644 --- a/posthog/queries/funnels/funnel_trends.py +++ b/posthog/queries/funnels/funnel_trends.py @@ -1,6 +1,6 @@ from datetime import datetime from itertools import groupby -from typing import List, Optional, Tuple +from typing import Optional from posthog.models.cohort import Cohort from posthog.models.filters.filter import Filter @@ -147,7 +147,7 @@ def get_query(self) -> str: return query - def get_steps_reached_conditions(self) -> Tuple[str, str, str]: + def get_steps_reached_conditions(self) -> tuple[str, str, str]: # How many steps must have been done to count for the denominator of a funnel trends data point from_step = self._filter.funnel_from_step or 0 # How many steps must have been done to count for the numerator of a funnel trends data point @@ -180,7 +180,7 @@ def _summarize_data(self, results): if breakdown_clause: if isinstance(period_row[-1], str) or ( - isinstance(period_row[-1], List) and all(isinstance(item, str) for item in period_row[-1]) + isinstance(period_row[-1], list) and all(isinstance(item, str) for item in period_row[-1]) ): serialized_result.update({"breakdown_value": (period_row[-1])}) else: diff --git a/posthog/queries/funnels/funnel_unordered.py b/posthog/queries/funnels/funnel_unordered.py index ac3a6d939b09f..ee984b9462a75 100644 --- a/posthog/queries/funnels/funnel_unordered.py +++ b/posthog/queries/funnels/funnel_unordered.py @@ -1,5 +1,5 @@ import uuid -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from rest_framework.exceptions import ValidationError @@ -40,9 +40,9 @@ def _serialize_step( self, step: Entity, count: int, - people: Optional[List[uuid.UUID]] = None, + people: Optional[list[uuid.UUID]] = None, sampling_factor: Optional[float] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: return { "action_id": None, "name": f"Completed {step.index+1} step{'s' if step.index != 0 else ''}", @@ -119,7 +119,7 @@ def get_step_counts_without_aggregation_query(self): return " UNION ALL ".join(union_queries) def _get_step_times(self, max_steps: int): - conditions: List[str] = [] + conditions: list[str] = [] conversion_times_elements = [] for i in range(max_steps): @@ -146,7 +146,7 @@ def get_sorting_condition(self, max_steps: int): conditions.append(f"arraySort([{','.join(event_times_elements)}]) as event_times") # replacement of latest_i for whatever query part requires it, just like conversion_times - basic_conditions: List[str] = [] + basic_conditions: list[str] = [] for i in range(1, max_steps): basic_conditions.append( f"if(latest_0 < latest_{i} AND latest_{i} <= latest_0 + INTERVAL {self._filter.funnel_window_interval} {self._filter.funnel_window_interval_unit_ch()}, 1, 0)" diff --git a/posthog/queries/funnels/funnel_unordered_persons.py b/posthog/queries/funnels/funnel_unordered_persons.py index 334798c990208..fc1e953bfb58e 100644 --- a/posthog/queries/funnels/funnel_unordered_persons.py +++ b/posthog/queries/funnels/funnel_unordered_persons.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from posthog.models.filters.filter import Filter from posthog.models.filters.mixins.utils import cached_property @@ -25,7 +25,7 @@ def _get_funnel_person_step_events(self): def actor_query( self, limit_actors: Optional[bool] = True, - extra_fields: Optional[List[str]] = None, + extra_fields: Optional[list[str]] = None, ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( diff --git a/posthog/queries/funnels/test/breakdown_cases.py b/posthog/queries/funnels/test/breakdown_cases.py index b38384c745e90..2bc977c974afc 100644 --- a/posthog/queries/funnels/test/breakdown_cases.py +++ b/posthog/queries/funnels/test/breakdown_cases.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from datetime import datetime from string import ascii_lowercase -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from posthog.constants import INSIGHT_FUNNELS from posthog.models.cohort import Cohort @@ -20,7 +20,7 @@ class FunnelStepResult: name: str count: int - breakdown: Union[List[str], str] + breakdown: Union[list[str], str] average_conversion_time: Optional[float] = None median_conversion_time: Optional[float] = None type: Literal["events", "actions"] = "events" @@ -35,8 +35,8 @@ def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): return [val["id"] for val in serialized_result] - def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]): - def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: + def _assert_funnel_breakdown_result_is_correct(self, result, steps: list[FunnelStepResult]): + def funnel_result(step: FunnelStepResult, order: int) -> dict[str, Any]: return { "action_id": step.name if step.type == "events" else step.action_id, "name": step.name, @@ -2646,11 +2646,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): return TestFunnelBreakdown -def sort_breakdown_funnel_results(results: List[Dict[int, Any]]): +def sort_breakdown_funnel_results(results: list[dict[int, Any]]): return sorted(results, key=lambda r: r[0]["breakdown_value"]) -def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str, Any]]): +def assert_funnel_results_equal(left: list[dict[str, Any]], right: list[dict[str, Any]]): """ Helper to be able to compare two funnel results, but exclude people urls from the comparison, as these include: @@ -2660,7 +2660,7 @@ def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str 2. contain timestamps which are not stable across runs """ - def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _filter(steps: list[dict[str, Any]]) -> list[dict[str, Any]]: return [{**step, "converted_people_url": None, "dropped_people_url": None} for step in steps] assert len(left) == len(right) diff --git a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py index 7994b195fca94..800cd9f46dca0 100644 --- a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, Optional +from typing import Optional from posthog.models import Filter from posthog.queries.funnels import ClickhouseFunnel @@ -115,7 +115,7 @@ def setUp(self): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + def _run(self, extra: Optional[dict] = None, events_extra: Optional[dict] = None): if events_extra is None: events_extra = {} if extra is None: diff --git a/posthog/queries/funnels/utils.py b/posthog/queries/funnels/utils.py index 68f93c2d4542e..b2c0df300ce8c 100644 --- a/posthog/queries/funnels/utils.py +++ b/posthog/queries/funnels/utils.py @@ -1,11 +1,9 @@ -from typing import Type - from posthog.constants import FunnelOrderType from posthog.models.filters import Filter from posthog.queries.funnels import ClickhouseFunnelBase -def get_funnel_order_class(filter: Filter) -> Type[ClickhouseFunnelBase]: +def get_funnel_order_class(filter: Filter) -> type[ClickhouseFunnelBase]: from posthog.queries.funnels import ( ClickhouseFunnel, ClickhouseFunnelStrict, diff --git a/posthog/queries/groups_join_query/groups_join_query.py b/posthog/queries/groups_join_query/groups_join_query.py index 2cc62849cacc3..6499d39ce1e94 100644 --- a/posthog/queries/groups_join_query/groups_join_query.py +++ b/posthog/queries/groups_join_query/groups_join_query.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, Union +from typing import Optional, Union from posthog.models import Filter from posthog.models.filters.path_filter import PathFilter @@ -31,5 +31,5 @@ def __init__( self._join_key = join_key self._person_on_events_mode = person_on_events_mode - def get_join_query(self) -> Tuple[str, Dict]: + def get_join_query(self) -> tuple[str, dict]: return "", {} diff --git a/posthog/queries/paths/paths.py b/posthog/queries/paths/paths.py index 6a98857e3927d..21438ee6ea79f 100644 --- a/posthog/queries/paths/paths.py +++ b/posthog/queries/paths/paths.py @@ -1,6 +1,6 @@ import dataclasses from collections import defaultdict -from typing import Dict, List, Literal, Optional, Tuple, Union, cast +from typing import Literal, Optional, Union, cast from rest_framework.exceptions import ValidationError @@ -35,8 +35,8 @@ class Paths: _filter: PathFilter _funnel_filter: Optional[Filter] _team: Team - _extra_event_fields: List[ColumnName] - _extra_event_properties: List[PropertyName] + _extra_event_fields: list[ColumnName] + _extra_event_properties: list[PropertyName] def __init__(self, filter: PathFilter, team: Team, funnel_filter: Optional[Filter] = None) -> None: self._filter = filter @@ -50,8 +50,8 @@ def __init__(self, filter: PathFilter, team: Team, funnel_filter: Optional[Filte } self._funnel_filter = funnel_filter - self._extra_event_fields: List[ColumnName] = [] - self._extra_event_properties: List[PropertyName] = [] + self._extra_event_fields: list[ColumnName] = [] + self._extra_event_properties: list[PropertyName] = [] if self._filter.include_recordings: self._extra_event_fields = ["uuid", "timestamp"] self._extra_event_properties = ["$session_id", "$window_id"] @@ -93,7 +93,7 @@ def _format_results(self, results): ) return resp - def _exec_query(self) -> List[Tuple]: + def _exec_query(self) -> list[tuple]: query = self.get_query() return insight_sync_execute( query, @@ -225,7 +225,7 @@ def get_path_query_funnel_cte(self, funnel_filter: Filter): return "", {} # Implemented in /ee - def get_edge_weight_clause(self) -> Tuple[str, Dict]: + def get_edge_weight_clause(self) -> tuple[str, dict]: return "", {} # Implemented in /ee @@ -240,8 +240,8 @@ def get_session_threshold_clause(self) -> str: return "arraySplit(x -> if(x.3 < %(session_time_threshold)s, 0, 1), paths_tuple)" # Implemented in /ee - def get_target_clause(self) -> Tuple[str, Dict]: - params: Dict[str, Union[str, None]] = { + def get_target_clause(self) -> tuple[str, dict]: + params: dict[str, Union[str, None]] = { "target_point": None, "secondary_target_point": None, } @@ -276,7 +276,7 @@ def get_array_compacting_function(self) -> Literal["arrayResize", "arraySlice"]: return "arraySlice" # Implemented in /ee - def get_filtered_path_ordering(self) -> Tuple[str, ...]: + def get_filtered_path_ordering(self) -> tuple[str, ...]: fields_to_include = ["filtered_path", "filtered_timings"] + [ f"filtered_{field}s" for field in self.extra_event_fields_and_properties ] diff --git a/posthog/queries/paths/paths_actors.py b/posthog/queries/paths/paths_actors.py index e39a01dfee34c..ec739271795e0 100644 --- a/posthog/queries/paths/paths_actors.py +++ b/posthog/queries/paths/paths_actors.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, cast +from typing import Optional, cast from posthog.models.filters.filter import Filter from posthog.queries.actor_base_query import ActorBaseQuery @@ -27,7 +27,7 @@ class PathsActors(Paths, ActorBaseQuery): # type: ignore QUERY_TYPE = "paths" - def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: + def actor_query(self, limit_actors: Optional[bool] = True) -> tuple[str, dict]: paths_per_person_query = self.get_paths_per_person_query() person_path_filter = self.get_person_path_filter() paths_funnel_cte = "" diff --git a/posthog/queries/paths/paths_event_query.py b/posthog/queries/paths/paths_event_query.py index 61b032aa663ec..31241cea64919 100644 --- a/posthog/queries/paths/paths_event_query.py +++ b/posthog/queries/paths/paths_event_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Tuple +from typing import Any from posthog.constants import ( FUNNEL_PATH_AFTER_STEP, @@ -21,7 +21,7 @@ class PathEventQuery(EventQuery): FUNNEL_PERSONS_ALIAS = "funnel_actors" _filter: PathFilter - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: funnel_paths_timestamp = "" funnel_paths_join = "" funnel_paths_filter = "" @@ -151,7 +151,7 @@ def _determine_should_join_persons(self) -> None: if self._person_on_events_mode != PersonsOnEventsMode.disabled: self._should_join_persons = False - def _get_grouping_fields(self) -> Tuple[List[str], Dict[str, Any]]: + def _get_grouping_fields(self) -> tuple[list[str], dict[str, Any]]: _fields = [] params = {} @@ -188,8 +188,8 @@ def _get_grouping_fields(self) -> Tuple[List[str], Dict[str, Any]]: return _fields, params - def _get_event_query(self, deep_filtering: bool) -> Tuple[str, Dict[str, Any]]: - params: Dict[str, Any] = {} + def _get_event_query(self, deep_filtering: bool) -> tuple[str, dict[str, Any]]: + params: dict[str, Any] = {} conditions = [] or_conditions = [] diff --git a/posthog/queries/person_query.py b/posthog/queries/person_query.py index cffcce890c80b..b785fcb7442e0 100644 --- a/posthog/queries/person_query.py +++ b/posthog/queries/person_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Optional, Union from uuid import UUID from posthog.clickhouse.materialized_columns import ColumnName @@ -45,7 +45,7 @@ class PersonQuery: _filter: Union[Filter, PathFilter, RetentionFilter, StickinessFilter] _team_id: int _column_optimizer: ColumnOptimizer - _extra_fields: Set[ColumnName] + _extra_fields: set[ColumnName] _inner_person_properties: Optional[PropertyGroup] _cohort: Optional[Cohort] _include_distinct_ids: Optional[bool] = False @@ -58,10 +58,10 @@ def __init__( cohort: Optional[Cohort] = None, *, entity: Optional[Entity] = None, - extra_fields: Optional[List[ColumnName]] = None, + extra_fields: Optional[list[ColumnName]] = None, # A sub-optimal version of the `cohort` parameter above, the difference being that # this supports multiple cohort filters, but is not as performant as the above. - cohort_filters: Optional[List[Property]] = None, + cohort_filters: Optional[list[Property]] = None, include_distinct_ids: Optional[bool] = False, ) -> None: self._filter = filter @@ -90,7 +90,7 @@ def get_query( prepend: Optional[Union[str, int]] = None, paginate: bool = False, filter_future_persons: bool = False, - ) -> Tuple[str, Dict]: + ) -> tuple[str, dict]: prepend = str(prepend) if prepend is not None else "" fields = "id" + " ".join( @@ -175,7 +175,7 @@ def get_query( ) @property - def fields(self) -> List[ColumnName]: + def fields(self) -> list[ColumnName]: "Returns person table fields this query exposes" return [alias for column_name, alias in self._get_fields()] @@ -194,7 +194,7 @@ def is_used(self): def _uses_person_id(self, prop: Property) -> bool: return prop.type in ("person", "static-cohort", "precalculated-cohort") - def _get_fields(self) -> List[Tuple[str, str]]: + def _get_fields(self) -> list[tuple[str, str]]: # :TRICKY: Figure out what fields we want to expose - minimizing this set is good for performance. # We use the result from column_optimizer to figure out counts of all properties to be filtered and queried. # Here, we remove the ones only to be used for filtering. @@ -207,7 +207,7 @@ def _get_fields(self) -> List[Tuple[str, str]]: return [(column_name, self.ALIASES.get(column_name, column_name)) for column_name in sorted(columns)] - def _get_person_filter_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: + def _get_person_filter_clauses(self, prepend: str = "") -> tuple[str, str, dict]: finalization_conditions, params = parse_prop_grouped_clauses( self._team_id, self._inner_person_properties, @@ -231,7 +231,7 @@ def _get_person_filter_clauses(self, prepend: str = "") -> Tuple[str, str, Dict] params.update(prefiltering_params) return prefiltering_conditions, finalization_conditions, params - def _get_fast_single_cohort_clause(self) -> Tuple[str, Dict]: + def _get_fast_single_cohort_clause(self) -> tuple[str, dict]: if self._cohort: cohort_table = ( GET_STATIC_COHORTPEOPLE_BY_COHORT_ID if self._cohort.is_static else GET_COHORTPEOPLE_BY_COHORT_ID @@ -252,10 +252,10 @@ def _get_fast_single_cohort_clause(self) -> Tuple[str, Dict]: else: return "", {} - def _get_multiple_cohorts_clause(self, prepend: str = "") -> Tuple[str, Dict]: + def _get_multiple_cohorts_clause(self, prepend: str = "") -> tuple[str, dict]: if self._cohort_filters: query = [] - params: Dict[str, Any] = {} + params: dict[str, Any] = {} # TODO: doesn't support non-caclculated cohorts for index, property in enumerate(self._cohort_filters): @@ -274,7 +274,7 @@ def _get_multiple_cohorts_clause(self, prepend: str = "") -> Tuple[str, Dict]: else: return "", {} - def _get_limit_offset_clause(self) -> Tuple[str, Dict]: + def _get_limit_offset_clause(self) -> tuple[str, dict]: if not isinstance(self._filter, Filter): return "", {} @@ -295,7 +295,7 @@ def _get_limit_offset_clause(self) -> Tuple[str, Dict]: return clause, params - def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: + def _get_search_clauses(self, prepend: str = "") -> tuple[str, str, dict]: """ Return - respectively - the prefiltering search clause (not aggregated by is_deleted or version, which is great for memory usage), the final search clause (aggregated for true results, more expensive), and new params. @@ -365,7 +365,7 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: return "", "", {} - def _get_distinct_id_clause(self) -> Tuple[str, Dict]: + def _get_distinct_id_clause(self) -> tuple[str, dict]: if not isinstance(self._filter, Filter): return "", {} @@ -378,7 +378,7 @@ def _get_distinct_id_clause(self) -> Tuple[str, Dict]: return distinct_id_clause, {"distinct_id_filter": self._filter.distinct_id} return "", {} - def _add_distinct_id_join_if_needed(self, query: str, params: Dict[Any, Any]) -> Tuple[str, Dict[Any, Any]]: + def _add_distinct_id_join_if_needed(self, query: str, params: dict[Any, Any]) -> tuple[str, dict[Any, Any]]: if not self._include_distinct_ids: return query, params return ( @@ -395,7 +395,7 @@ def _add_distinct_id_join_if_needed(self, query: str, params: Dict[Any, Any]) -> params, ) - def _get_email_clause(self) -> Tuple[str, Dict]: + def _get_email_clause(self) -> tuple[str, dict]: if not isinstance(self._filter, Filter): return "", {} @@ -407,7 +407,7 @@ def _get_email_clause(self) -> Tuple[str, Dict]: ) return "", {} - def _get_updated_after_clause(self) -> Tuple[str, Dict]: + def _get_updated_after_clause(self) -> tuple[str, dict]: if not isinstance(self._filter, Filter): return "", {} diff --git a/posthog/queries/properties_timeline/properties_timeline.py b/posthog/queries/properties_timeline/properties_timeline.py index 34c392353098a..fe9191e5e1c15 100644 --- a/posthog/queries/properties_timeline/properties_timeline.py +++ b/posthog/queries/properties_timeline/properties_timeline.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Any, Dict, List, Set, TypedDict, Union, cast +from typing import Any, TypedDict, Union, cast from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter from posthog.models.group.group import Group @@ -18,13 +18,13 @@ class PropertiesTimelinePoint(TypedDict): timestamp: str - properties: Dict[str, Any] + properties: dict[str, Any] relevant_event_count: int class PropertiesTimelineResult(TypedDict): - points: List[PropertiesTimelinePoint] - crucial_property_keys: List[str] + points: list[PropertiesTimelinePoint] + crucial_property_keys: list[str] effective_date_from: str effective_date_to: str @@ -56,7 +56,7 @@ class PropertiesTimelineResult(TypedDict): class PropertiesTimeline: - def extract_crucial_property_keys(self, filter: PropertiesTimelineFilter) -> Set[str]: + def extract_crucial_property_keys(self, filter: PropertiesTimelineFilter) -> set[str]: is_filter_relevant = lambda property_type, property_group_type_index: ( (property_type == "person") if filter.aggregation_group_type_index is None @@ -76,7 +76,7 @@ def extract_crucial_property_keys(self, filter: PropertiesTimelineFilter) -> Set if filter.breakdown and filter.breakdown_type == "person": if isinstance(filter.breakdown, list): - crucial_property_keys.update(cast(List[str], filter.breakdown)) + crucial_property_keys.update(cast(list[str], filter.breakdown)) else: crucial_property_keys.add(filter.breakdown) diff --git a/posthog/queries/properties_timeline/properties_timeline_event_query.py b/posthog/queries/properties_timeline/properties_timeline_event_query.py index d3ca17eb70091..b5e9a87d07c82 100644 --- a/posthog/queries/properties_timeline/properties_timeline_event_query.py +++ b/posthog/queries/properties_timeline/properties_timeline_event_query.py @@ -1,5 +1,5 @@ import datetime as dt -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional from zoneinfo import ZoneInfo from posthog.models.entity.util import get_entity_filtering_params @@ -20,7 +20,7 @@ def __init__(self, filter: PropertiesTimelineFilter, *args, **kwargs): super().__init__(filter, *args, **kwargs) self._group_type_index = filter.aggregation_group_type_index - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: real_fields = [f"{self.EVENT_TABLE_ALIAS}.timestamp AS timestamp"] sentinel_fields = ["NULL AS timestamp"] @@ -72,8 +72,8 @@ def _determine_should_join_persons(self) -> None: def _determine_should_join_sessions(self) -> None: self._should_join_sessions = False - def _get_date_filter(self) -> Tuple[str, Dict]: - query_params: Dict[str, Any] = {} + def _get_date_filter(self) -> tuple[str, dict]: + query_params: dict[str, Any] = {} query_date_range = QueryDateRange(self._filter, self._team) effective_timezone = ZoneInfo(self._team.timezone) # Get effective date range from QueryDateRange @@ -92,7 +92,7 @@ def _get_date_filter(self) -> Tuple[str, Dict]: return date_filter, query_params - def _get_entity_query(self) -> Tuple[str, Dict]: + def _get_entity_query(self) -> tuple[str, dict]: entity_params, entity_format_params = get_entity_filtering_params( allowed_entities=self._filter.entities, team_id=self._team_id, diff --git a/posthog/queries/property_optimizer.py b/posthog/queries/property_optimizer.py index d69cadfe5e82b..b11be666fd684 100644 --- a/posthog/queries/property_optimizer.py +++ b/posthog/queries/property_optimizer.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import List, Optional, cast +from typing import Optional, cast from rest_framework.exceptions import ValidationError @@ -94,7 +94,7 @@ def using_only_person_properties(property_group: PropertyGroup) -> bool: elif isinstance(property_group.values[0], PropertyGroup): return all( PropertyOptimizer.using_only_person_properties(group) - for group in cast(List[PropertyGroup], property_group.values) + for group in cast(list[PropertyGroup], property_group.values) ) else: diff --git a/posthog/queries/property_values.py b/posthog/queries/property_values.py index a8b943f25d1d2..0e79d15146af6 100644 --- a/posthog/queries/property_values.py +++ b/posthog/queries/property_values.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from django.utils import timezone @@ -16,7 +16,7 @@ def get_property_values_for_key( key: str, team: Team, - event_names: Optional[List[str]] = None, + event_names: Optional[list[str]] = None, value: Optional[str] = None, ): property_field, mat_column_exists = get_property_string_expr("events", key, "%(key)s", "properties") diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py index 2825e4e0360b0..578f2ccf041c3 100644 --- a/posthog/queries/query_date_range.py +++ b/posthog/queries/query_date_range.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from functools import cached_property -from typing import Dict, Literal, Optional, Tuple +from typing import Literal, Optional from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta @@ -117,7 +117,7 @@ def date_from_clause(self): return self._get_timezone_aware_date_condition("date_from") @cached_property - def date_to(self) -> Tuple[str, Dict]: + def date_to(self) -> tuple[str, dict]: date_to_query = self.date_to_clause date_to = self.date_to_param @@ -129,7 +129,7 @@ def date_to(self) -> Tuple[str, Dict]: return date_to_query, date_to_param @cached_property - def date_from(self) -> Tuple[str, Dict]: + def date_from(self) -> tuple[str, dict]: date_from_query = self.date_from_clause date_from = self.date_from_param diff --git a/posthog/queries/retention/actors_query.py b/posthog/queries/retention/actors_query.py index 5a49c510a3240..e087b88e44fc8 100644 --- a/posthog/queries/retention/actors_query.py +++ b/posthog/queries/retention/actors_query.py @@ -1,5 +1,5 @@ import dataclasses -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from posthog.models.filters.retention_filter import RetentionFilter from posthog.models.team import Team @@ -19,7 +19,7 @@ class AppearanceRow: actor_id: str appearance_count: int # This is actually the number of days from first event to the current event. - appearances: List[float] + appearances: list[float] # Note: This class does not respect the entire flor from ActorBaseQuery because the result shape differs from other actor queries @@ -98,7 +98,7 @@ def build_actor_activity_query( selected_interval: Optional[int] = None, aggregate_users_by_distinct_id: Optional[bool] = None, retention_events_query=RetentionEventsQuery, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: from posthog.queries.retention import ( build_returning_event_query, build_target_event_query, @@ -150,7 +150,7 @@ def _build_actor_query( filter_by_breakdown: Optional[BreakdownValues] = None, selected_interval: Optional[int] = None, retention_events_query=RetentionEventsQuery, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: actor_activity_query, actor_activity_query_params = build_actor_activity_query( filter=filter, team=team, diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py index 8f8b0d89254bf..d3b9f43ca5c60 100644 --- a/posthog/queries/retention/retention.py +++ b/posthog/queries/retention/retention.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from urllib.parse import urlencode from zoneinfo import ZoneInfo @@ -24,7 +24,7 @@ class Retention: def __init__(self, base_uri="/"): self._base_uri = base_uri - def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> List[Dict[str, Any]]: + def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> list[dict[str, Any]]: filter.team = team retention_by_breakdown = self._get_retention_by_breakdown_values(filter, team) if filter.breakdowns: @@ -34,7 +34,7 @@ def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> List[Dict def _get_retention_by_breakdown_values( self, filter: RetentionFilter, team: Team - ) -> Dict[CohortKey, Dict[str, Any]]: + ) -> dict[CohortKey, dict[str, Any]]: actor_query, actor_query_params = build_actor_activity_query( filter=filter, team=team, retention_events_query=self.event_query ) @@ -77,7 +77,7 @@ def _construct_people_url_for_trend_breakdown_interval( ).to_params() return f"{self._base_uri}api/person/retention/?{urlencode(params)}" - def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, Any]], filter: RetentionFilter): + def process_breakdown_table_result(self, resultset: dict[CohortKey, dict[str, Any]], filter: RetentionFilter): result = [ { "values": [ @@ -101,7 +101,7 @@ def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, An def process_table_result( self, - resultset: Dict[CohortKey, Dict[str, Any]], + resultset: dict[CohortKey, dict[str, Any]], filter: RetentionFilter, team: Team, ): @@ -140,7 +140,7 @@ def construct_url(first_day): return result - def actors_in_period(self, filter: RetentionFilter, team: Team) -> Tuple[list, int]: + def actors_in_period(self, filter: RetentionFilter, team: Team) -> tuple[list, int]: """ Creates a response of the form @@ -168,7 +168,7 @@ def build_returning_event_query( aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, retention_events_query=RetentionEventsQuery, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: returning_event_query_templated, returning_event_params = retention_events_query( filter=filter.shallow_clone({"breakdowns": []}), # Avoid pulling in breakdown values from returning event query team=team, @@ -186,7 +186,7 @@ def build_target_event_query( aggregate_users_by_distinct_id: Optional[bool] = None, person_on_events_mode: PersonsOnEventsMode = PersonsOnEventsMode.disabled, retention_events_query=RetentionEventsQuery, -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: target_event_query_templated, target_event_params = retention_events_query( filter=filter, team=team, diff --git a/posthog/queries/retention/retention_events_query.py b/posthog/queries/retention/retention_events_query.py index e84e4bc1e91cc..9e64b758be6e8 100644 --- a/posthog/queries/retention/retention_events_query.py +++ b/posthog/queries/retention/retention_events_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Literal, Optional, Tuple, Union, cast +from typing import Any, Literal, Optional, Union, cast from posthog.constants import ( PAGEVIEW_EVENT, @@ -37,7 +37,7 @@ def __init__( person_on_events_mode=person_on_events_mode, ) - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: _fields = [ self.get_timestamp_field(), self.target_field(), diff --git a/posthog/queries/retention/types.py b/posthog/queries/retention/types.py index d3f77fab7f51e..0a9e630da6a85 100644 --- a/posthog/queries/retention/types.py +++ b/posthog/queries/retention/types.py @@ -1,4 +1,4 @@ -from typing import NamedTuple, Tuple, Union +from typing import NamedTuple, Union -BreakdownValues = Tuple[Union[str, int], ...] +BreakdownValues = tuple[Union[str, int], ...] CohortKey = NamedTuple("CohortKey", (("breakdown_values", BreakdownValues), ("period", int))) diff --git a/posthog/queries/stickiness/stickiness.py b/posthog/queries/stickiness/stickiness.py index 50c2ff81ad987..26204b8e9964f 100644 --- a/posthog/queries/stickiness/stickiness.py +++ b/posthog/queries/stickiness/stickiness.py @@ -1,6 +1,6 @@ import copy import urllib.parse -from typing import Any, Dict, List +from typing import Any from posthog.constants import TREND_FILTER_TYPE_ACTIONS from posthog.models.action import Action @@ -19,7 +19,7 @@ class Stickiness: event_query_class = StickinessEventsQuery actor_query_class = StickinessActors - def run(self, filter: StickinessFilter, team: Team, *args, **kwargs) -> List[Dict[str, Any]]: + def run(self, filter: StickinessFilter, team: Team, *args, **kwargs) -> list[dict[str, Any]]: response = [] for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS and entity.id is not None: @@ -29,7 +29,7 @@ def run(self, filter: StickinessFilter, team: Team, *args, **kwargs) -> List[Dic response.extend(entity_resp) return response - def stickiness(self, entity: Entity, filter: StickinessFilter, team: Team) -> Dict[str, Any]: + def stickiness(self, entity: Entity, filter: StickinessFilter, team: Team) -> dict[str, Any]: events_query, event_params = self.event_query_class( entity, filter, team, person_on_events_mode=team.person_on_events_mode ).get_query() @@ -66,8 +66,8 @@ def people( _, serialized_actors, _ = self.actor_query_class(entity=target_entity, filter=filter, team=team).get_actors() return serialized_actors - def process_result(self, counts: List, filter: StickinessFilter, entity: Entity) -> Dict[str, Any]: - response: Dict[int, int] = {} + def process_result(self, counts: list, filter: StickinessFilter, entity: Entity) -> dict[str, Any]: + response: dict[int, int] = {} for result in counts: response[result[1]] = result[0] @@ -92,8 +92,8 @@ def process_result(self, counts: List, filter: StickinessFilter, entity: Entity) "persons_urls": self._get_persons_url(filter, entity), } - def _serialize_entity(self, entity: Entity, filter: StickinessFilter, team: Team) -> List[Dict[str, Any]]: - serialized: Dict[str, Any] = { + def _serialize_entity(self, entity: Entity, filter: StickinessFilter, team: Team) -> list[dict[str, Any]]: + serialized: dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, @@ -107,7 +107,7 @@ def _serialize_entity(self, entity: Entity, filter: StickinessFilter, team: Team response.append(new_dict) return response - def _get_persons_url(self, filter: StickinessFilter, entity: Entity) -> List[Dict[str, Any]]: + def _get_persons_url(self, filter: StickinessFilter, entity: Entity) -> list[dict[str, Any]]: persons_url = [] cache_invalidation_key = generate_short_id() for interval_idx in range(1, filter.total_intervals): @@ -119,7 +119,7 @@ def _get_persons_url(self, filter: StickinessFilter, entity: Entity) -> List[Dic "entity_math": entity.math, "entity_order": entity.order, } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) persons_url.append( { "filter": extra_params, diff --git a/posthog/queries/stickiness/stickiness_actors.py b/posthog/queries/stickiness/stickiness_actors.py index 625d3852ce536..c6c20301f2bf7 100644 --- a/posthog/queries/stickiness/stickiness_actors.py +++ b/posthog/queries/stickiness/stickiness_actors.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple +from typing import Optional from posthog.models.entity import Entity from posthog.models.filters.mixins.utils import cached_property @@ -22,7 +22,7 @@ def __init__(self, team: Team, entity: Entity, filter: StickinessFilter, **kwarg def aggregation_group_type_index(self): return None - def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: + def actor_query(self, limit_actors: Optional[bool] = True) -> tuple[str, dict]: events_query, event_params = self.event_query_class( entity=self.entity, filter=self._filter, diff --git a/posthog/queries/stickiness/stickiness_event_query.py b/posthog/queries/stickiness/stickiness_event_query.py index 25d68b1d6bfdf..7c8c92222ef95 100644 --- a/posthog/queries/stickiness/stickiness_event_query.py +++ b/posthog/queries/stickiness/stickiness_event_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Tuple +from typing import Any from posthog.constants import TREND_FILTER_TYPE_ACTIONS, PropertyOperatorType from posthog.models import Entity @@ -20,7 +20,7 @@ def __init__(self, entity: Entity, *args, **kwargs): super().__init__(*args, **kwargs) self._should_round_interval = True - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: prop_query, prop_params = self._get_prop_groups( self._filter.property_groups.combine_property_group(PropertyOperatorType.AND, self._entity.property_groups), person_properties_mode=get_person_properties_mode(self._team), @@ -95,7 +95,7 @@ def _determine_should_join_persons(self) -> None: def aggregation_target(self): return self._person_id_alias - def get_entity_query(self) -> Tuple[str, Dict[str, Any]]: + def get_entity_query(self) -> tuple[str, dict[str, Any]]: if self._entity.type == TREND_FILTER_TYPE_ACTIONS: condition, params = format_action_filter( team_id=self._team_id, diff --git a/posthog/queries/test/test_paths.py b/posthog/queries/test/test_paths.py index 45f09a9ca5787..4be8e9789810c 100644 --- a/posthog/queries/test/test_paths.py +++ b/posthog/queries/test/test_paths.py @@ -1,5 +1,4 @@ import dataclasses -from typing import Dict from dateutil.relativedelta import relativedelta from django.utils.timezone import now @@ -26,7 +25,7 @@ class MockEvent: distinct_id: str team: Team timestamp: str - properties: Dict + properties: dict class TestPaths(ClickhouseTestMixin, APIBaseTest): diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py index abb32426dd68d..333babb6ccfbf 100644 --- a/posthog/queries/test/test_trends.py +++ b/posthog/queries/test/test_trends.py @@ -1,7 +1,7 @@ import json import uuid from datetime import datetime -from typing import Dict, List, Optional, Tuple, Union +from typing import Optional, Union from unittest.mock import patch, ANY from urllib.parse import parse_qsl, urlparse @@ -56,8 +56,8 @@ from posthog.utils import generate_cache_key -def breakdown_label(entity: Entity, value: Union[str, int]) -> Dict[str, Optional[Union[str, int]]]: - ret_dict: Dict[str, Optional[Union[str, int]]] = {} +def breakdown_label(entity: Entity, value: Union[str, int]) -> dict[str, Optional[Union[str, int]]]: + ret_dict: dict[str, Optional[Union[str, int]]] = {} if not value or not isinstance(value, str) or "cohort_" not in value: label = ( value @@ -112,7 +112,7 @@ def _get_trend_people(self, filter: Filter, entity: Entity): ).json() return response["results"][0]["people"] - def _create_events(self, use_time=False) -> Tuple[Action, Person]: + def _create_events(self, use_time=False) -> tuple[Action, Person]: person = _create_person( team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], @@ -1788,7 +1788,7 @@ def test_trends_compare_hour_interval_relative_range(self): ], ) - def _test_events_with_dates(self, dates: List[str], result, query_time=None, **filter_params): + def _test_events_with_dates(self, dates: list[str], result, query_time=None, **filter_params): _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"}) for time in dates: with freeze_time(time): diff --git a/posthog/queries/time_to_see_data/hierarchy.py b/posthog/queries/time_to_see_data/hierarchy.py index b4b686b612405..260a1fad0efbb 100644 --- a/posthog/queries/time_to_see_data/hierarchy.py +++ b/posthog/queries/time_to_see_data/hierarchy.py @@ -1,6 +1,5 @@ from dataclasses import dataclass, field from enum import Enum -from typing import List class NodeType(Enum): @@ -24,7 +23,7 @@ class NodeType(Enum): class Node: type: NodeType data: dict - children: List["Node"] = field(default_factory=list) + children: list["Node"] = field(default_factory=list) def to_dict(self): return { @@ -39,7 +38,7 @@ def construct_hierarchy(session, interactions_and_events, queries) -> dict: Constructs a tree-like hierarchy for session based on interactions and queries, to expose triggered-by relationships. """ - nodes: List[Node] = [] + nodes: list[Node] = [] nodes.extend(make_empty_node(interaction_type, data) for data in interactions_and_events) nodes.extend(make_empty_node(query_type, data) for data in queries) diff --git a/posthog/queries/time_to_see_data/sessions.py b/posthog/queries/time_to_see_data/sessions.py index 8ebeeb8db36a6..709d253d5b78a 100644 --- a/posthog/queries/time_to_see_data/sessions.py +++ b/posthog/queries/time_to_see_data/sessions.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Tuple +from typing import Optional from posthog.client import query_with_columns from posthog.queries.time_to_see_data.hierarchy import construct_hierarchy @@ -58,7 +58,7 @@ def get_sessions(query: SessionsQuerySerializer) -> SessionResponseSerializer: return response_serializer -def get_session_events(query: SessionEventsQuerySerializer) -> Optional[Dict]: +def get_session_events(query: SessionEventsQuerySerializer) -> Optional[dict]: params = { "team_id": query.validated_data["team_id"], "session_id": query.validated_data["session_id"], @@ -82,12 +82,12 @@ def get_session_events(query: SessionEventsQuerySerializer) -> Optional[Dict]: return construct_hierarchy(sessions[0], events, queries) -def _fetch_sessions(query: SessionsQuerySerializer) -> List[Dict]: +def _fetch_sessions(query: SessionsQuerySerializer) -> list[dict]: condition, params = _sessions_condition(query) return query_with_columns(GET_SESSIONS.format(condition=condition), params) -def _sessions_condition(query: SessionsQuerySerializer) -> Tuple[str, Dict]: +def _sessions_condition(query: SessionsQuerySerializer) -> tuple[str, dict]: conditions = [] if "team_id" in query.validated_data: diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py index 444f045384a14..0f06984bac083 100644 --- a/posthog/queries/trends/breakdown.py +++ b/posthog/queries/trends/breakdown.py @@ -2,7 +2,8 @@ import re import urllib.parse from datetime import datetime -from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union +from collections.abc import Callable from zoneinfo import ZoneInfo from django.forms import ValidationError @@ -104,7 +105,7 @@ def __init__( self.filter = filter self.team = team self.team_id = team.pk - self.params: Dict[str, Any] = {"team_id": team.pk} + self.params: dict[str, Any] = {"team_id": team.pk} self.column_optimizer = column_optimizer or ColumnOptimizer(self.filter, self.team_id) self.add_person_urls = add_person_urls self.person_on_events_mode = person_on_events_mode @@ -122,7 +123,7 @@ def actor_aggregator(self) -> str: return self._person_id_alias @cached_property - def _props_to_filter(self) -> Tuple[str, Dict]: + def _props_to_filter(self) -> tuple[str, dict]: props_to_filter = self.filter.property_groups.combine_property_group( PropertyOperatorType.AND, self.entity.property_groups ) @@ -140,7 +141,7 @@ def _props_to_filter(self) -> Tuple[str, Dict]: hogql_context=self.filter.hogql_context, ) - def get_query(self) -> Tuple[str, Dict, Callable]: + def get_query(self) -> tuple[str, dict, Callable]: date_params = {} query_date_range = QueryDateRange(filter=self.filter, team=self.team) @@ -165,7 +166,7 @@ def get_query(self) -> Tuple[str, Dict, Callable]: ) action_query = "" - action_params: Dict = {} + action_params: dict = {} if self.entity.type == TREND_FILTER_TYPE_ACTIONS: action = self.entity.get_action() action_query, action_params = format_action_filter( @@ -439,7 +440,7 @@ def _breakdown_cohort_params(self): return params, breakdown_filter, breakdown_filter_params, "value" - def _breakdown_prop_params(self, aggregate_operation: str, math_params: Dict): + def _breakdown_prop_params(self, aggregate_operation: str, math_params: dict): values_arr, has_more_values = get_breakdown_prop_values( self.filter, self.entity, @@ -564,7 +565,7 @@ def _get_breakdown_value(self, breakdown: str) -> str: return breakdown_value - def _get_histogram_breakdown_values(self, raw_breakdown_value: str, buckets: List[int]): + def _get_histogram_breakdown_values(self, raw_breakdown_value: str, buckets: list[int]): multi_if_conditionals = [] values_arr = [] @@ -607,9 +608,9 @@ def breakdown_sort_function(self, value): return count_or_aggregated_value * -1, value.get("label") # reverse it def _parse_single_aggregate_result( - self, filter: Filter, entity: Entity, additional_values: Dict[str, Any] + self, filter: Filter, entity: Entity, additional_values: dict[str, Any] ) -> Callable: - def _parse(result: List) -> List: + def _parse(result: list) -> list: parsed_results = [] cache_invalidation_key = generate_short_id() for stats in result: @@ -623,7 +624,7 @@ def _parse(result: List) -> List: "breakdown_value": result_descriptors["breakdown_value"], "breakdown_type": filter.breakdown_type or "event", } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) parsed_result = { "aggregated_value": float( correct_result_for_sampling(aggregated_value, filter.sampling_factor, entity.math) @@ -647,7 +648,7 @@ def _parse(result: List) -> List: return _parse def _parse_trend_result(self, filter: Filter, entity: Entity) -> Callable: - def _parse(result: List) -> List: + def _parse(result: list) -> list: parsed_results = [] for stats in result: result_descriptors = self._breakdown_result_descriptors(stats[2], filter, entity) @@ -679,9 +680,9 @@ def _get_persons_url( filter: Filter, entity: Entity, team: Team, - point_dates: List[datetime], + point_dates: list[datetime], breakdown_value: Union[str, int], - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: persons_url = [] cache_invalidation_key = generate_short_id() for point_date in point_dates: @@ -705,7 +706,7 @@ def _get_persons_url( "breakdown_value": breakdown_value, "breakdown_type": filter.breakdown_type or "event", } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) persons_url.append( { "filter": extra_params, @@ -744,7 +745,7 @@ def _determine_breakdown_label( else: return str(value) or BREAKDOWN_NULL_DISPLAY - def _person_join_condition(self) -> Tuple[str, Dict]: + def _person_join_condition(self) -> tuple[str, dict]: if self.person_on_events_mode == PersonsOnEventsMode.person_id_no_override_properties_on_events: return "", {} @@ -780,7 +781,7 @@ def _person_join_condition(self) -> Tuple[str, Dict]: else: return "", {} - def _groups_join_condition(self) -> Tuple[str, Dict]: + def _groups_join_condition(self) -> tuple[str, dict]: return GroupsJoinQuery( self.filter, self.team_id, @@ -788,7 +789,7 @@ def _groups_join_condition(self) -> Tuple[str, Dict]: person_on_events_mode=self.person_on_events_mode, ).get_join_query() - def _sessions_join_condition(self) -> Tuple[str, Dict]: + def _sessions_join_condition(self) -> tuple[str, dict]: session_query = SessionQuery(filter=self.filter, team=self.team) if session_query.is_used: query, session_params = session_query.get_query() diff --git a/posthog/queries/trends/formula.py b/posthog/queries/trends/formula.py index 4f59e5b0cd794..b2fd1bcd8062c 100644 --- a/posthog/queries/trends/formula.py +++ b/posthog/queries/trends/formula.py @@ -2,7 +2,7 @@ from itertools import accumulate import re from string import ascii_uppercase -from typing import Any, Dict, List +from typing import Any from sentry_sdk import push_scope @@ -22,7 +22,7 @@ class TrendsFormula: def _run_formula_query(self, filter: Filter, team: Team): letters = [ascii_uppercase[i] for i in range(0, len(filter.entities))] queries = [] - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for idx, entity in enumerate(filter.entities): _, sql, entity_params, _ = self._get_sql_for_entity(filter, team, entity) # type: ignore sql = PARAM_DISAMBIGUATION_REGEX.sub(f"%({idx}_", sql) @@ -96,7 +96,7 @@ def _run_formula_query(self, filter: Filter, team: Team): ) response = [] for item in result: - additional_values: Dict[str, Any] = {"label": self._label(filter, item)} + additional_values: dict[str, Any] = {"label": self._label(filter, item)} if filter.breakdown: additional_values["breakdown_value"] = additional_values["label"] @@ -113,7 +113,7 @@ def _run_formula_query(self, filter: Filter, team: Team): response.append(parse_response(item, filter, additional_values=additional_values)) return response - def _label(self, filter: Filter, item: List) -> str: + def _label(self, filter: Filter, item: list) -> str: if filter.breakdown: if filter.breakdown_type == "cohort": return get_breakdown_cohort_name(item[2]) diff --git a/posthog/queries/trends/lifecycle.py b/posthog/queries/trends/lifecycle.py index 2629672879e7a..199e3c57973b6 100644 --- a/posthog/queries/trends/lifecycle.py +++ b/posthog/queries/trends/lifecycle.py @@ -1,5 +1,6 @@ import urllib -from typing import Any, Callable, Dict, List, Tuple +from typing import Any +from collections.abc import Callable from posthog.models.entity import Entity from posthog.models.entity.util import get_entity_filtering_params @@ -28,7 +29,7 @@ class Lifecycle: - def _format_lifecycle_query(self, entity: Entity, filter: Filter, team: Team) -> Tuple[str, Dict, Callable]: + def _format_lifecycle_query(self, entity: Entity, filter: Filter, team: Team) -> tuple[str, dict, Callable]: event_query, event_params = LifecycleEventQuery( team=team, filter=filter, person_on_events_mode=team.person_on_events_mode ).get_query() @@ -40,7 +41,7 @@ def _format_lifecycle_query(self, entity: Entity, filter: Filter, team: Team) -> ) def _parse_result(self, filter: Filter, entity: Entity, team: Team) -> Callable: - def _parse(result: List) -> List: + def _parse(result: list) -> list: res = [] for val in result: label = "{} - {}".format(entity.name, val[2]) @@ -61,7 +62,7 @@ def get_people(self, filter: LifecycleFilter, team: Team): _, serialized_actors, _ = LifecycleActors(filter=filter, team=team, limit_actors=True).get_actors() return serialized_actors - def _get_persons_urls(self, filter: Filter, entity: Entity, times: List[str], status) -> List[Dict[str, Any]]: + def _get_persons_urls(self, filter: Filter, entity: Entity, times: list[str], status) -> list[dict[str, Any]]: persons_url = [] cache_invalidation_key = generate_short_id() for target_date in times: @@ -75,7 +76,7 @@ def _get_persons_urls(self, filter: Filter, entity: Entity, times: List[str], st "lifecycle_type": status, } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) persons_url.append( { "filter": extra_params, @@ -167,7 +168,7 @@ def _person_query(self): ) def _get_date_filter(self): - date_params: Dict[str, Any] = {} + date_params: dict[str, Any] = {} query_date_range = QueryDateRange(self._filter, self._team, should_round=False) _, date_from_params = query_date_range.date_from _, date_to_params = query_date_range.date_to diff --git a/posthog/queries/trends/lifecycle_actors.py b/posthog/queries/trends/lifecycle_actors.py index 2b83dbb364ddb..0e4b7446cda52 100644 --- a/posthog/queries/trends/lifecycle_actors.py +++ b/posthog/queries/trends/lifecycle_actors.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple +from typing import Optional from posthog.queries.actor_base_query import ActorBaseQuery from posthog.queries.trends.lifecycle import LifecycleEventQuery @@ -13,7 +13,7 @@ class LifecycleActors(ActorBaseQuery): QUERY_TYPE = "lifecycle" - def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: + def actor_query(self, limit_actors: Optional[bool] = True) -> tuple[str, dict]: events_query, event_params = self.event_query_class( filter=self._filter, team=self._team, diff --git a/posthog/queries/trends/test/test_breakdowns.py b/posthog/queries/trends/test/test_breakdowns.py index 78b5a01e45aaa..3b8651d541512 100644 --- a/posthog/queries/trends/test/test_breakdowns.py +++ b/posthog/queries/trends/test/test_breakdowns.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, Optional +from typing import Optional from posthog.constants import TRENDS_TABLE from posthog.models import Filter @@ -104,7 +104,7 @@ def setUp(self): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + def _run(self, extra: Optional[dict] = None, events_extra: Optional[dict] = None): if events_extra is None: events_extra = {} if extra is None: diff --git a/posthog/queries/trends/test/test_breakdowns_by_current_url.py b/posthog/queries/trends/test/test_breakdowns_by_current_url.py index 26e0c40ae6404..8474d7a27bb23 100644 --- a/posthog/queries/trends/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/trends/test/test_breakdowns_by_current_url.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, Optional +from typing import Optional from posthog.models import Filter from posthog.queries.trends.trends import Trends @@ -99,7 +99,7 @@ def setUp(self): journeys_for(journey, team=self.team, create_people=True) - def _run(self, extra: Optional[Dict] = None, events_extra: Optional[Dict] = None): + def _run(self, extra: Optional[dict] = None, events_extra: Optional[dict] = None): if events_extra is None: events_extra = {} if extra is None: diff --git a/posthog/queries/trends/test/test_formula.py b/posthog/queries/trends/test/test_formula.py index 01e838336e5c8..d711bbff6f827 100644 --- a/posthog/queries/trends/test/test_formula.py +++ b/posthog/queries/trends/test/test_formula.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional from freezegun.api import freeze_time @@ -129,7 +129,7 @@ def setUp(self): }, ) - def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None): + def _run(self, extra: Optional[dict] = None, run_at: Optional[str] = None): if extra is None: extra = {} with freeze_time(run_at or "2020-01-04T13:01:01Z"): diff --git a/posthog/queries/trends/test/test_paging_breakdowns.py b/posthog/queries/trends/test/test_paging_breakdowns.py index b4040fee61897..47ea447005c1a 100644 --- a/posthog/queries/trends/test/test_paging_breakdowns.py +++ b/posthog/queries/trends/test/test_paging_breakdowns.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional from freezegun import freeze_time @@ -38,7 +38,7 @@ def setUp(self): create_people=True, ) - def _run(self, extra: Optional[Dict] = None, run_at: Optional[str] = None): + def _run(self, extra: Optional[dict] = None, run_at: Optional[str] = None): if extra is None: extra = {} with freeze_time(run_at or "2020-01-04T13:01:01Z"): diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py index e36f6d2de7313..5e91d9272cf18 100644 --- a/posthog/queries/trends/total_volume.py +++ b/posthog/queries/trends/total_volume.py @@ -1,6 +1,7 @@ import urllib.parse from datetime import date, datetime, timedelta -from typing import Any, Callable, Dict, List, Tuple, Union +from typing import Any, Union +from collections.abc import Callable from posthog.clickhouse.query_tagging import tag_queries from posthog.constants import ( @@ -48,7 +49,7 @@ class TrendsTotalVolume: EVENT_TABLE_ALIAS = EventQuery.EVENT_TABLE_ALIAS PERSON_ID_OVERRIDES_TABLE_ALIAS = EventQuery.PERSON_ID_OVERRIDES_TABLE_ALIAS - def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tuple[str, Dict, Callable]: + def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> tuple[str, dict, Callable]: interval_func = get_interval_func_ch(filter.interval) person_id_alias = f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id" @@ -82,7 +83,7 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup "timestamp": "e.timestamp", "interval_func": interval_func, } - params: Dict = {"team_id": team.id, "timezone": team.timezone} + params: dict = {"team_id": team.id, "timezone": team.timezone} params = {**params, **math_params, **event_query_params} if filter.display in NON_TIME_SERIES_DISPLAY_TYPES: @@ -219,14 +220,14 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup return final_query, params, self._parse_total_volume_result(filter, entity, team) def _parse_total_volume_result(self, filter: Filter, entity: Entity, team: Team) -> Callable: - def _parse(result: List) -> List: + def _parse(result: list) -> list: parsed_results = [] if result is not None: for stats in result: parsed_result = parse_response(stats, filter, entity=entity) - point_dates: List[Union[datetime, date]] = stats[0] + point_dates: list[Union[datetime, date]] = stats[0] # Ensure we have datetimes for all points - point_datetimes: List[datetime] = [ + point_datetimes: list[datetime] = [ datetime.combine(d, datetime.min.time()) if not isinstance(d, datetime) else d for d in point_dates ] @@ -238,7 +239,7 @@ def _parse(result: List) -> List: return _parse def _parse_aggregate_volume_result(self, filter: Filter, entity: Entity, team_id: int) -> Callable: - def _parse(result: List) -> List: + def _parse(result: list) -> list: aggregated_value = result[0][0] if result else 0 seconds_in_interval = TIME_IN_SECONDS[filter.interval] time_range = enumerate_time_range(filter, seconds_in_interval) @@ -249,7 +250,7 @@ def _parse(result: List) -> List: "entity_math": entity.math, "entity_order": entity.order, } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) cache_invalidation_key = generate_short_id() return [ @@ -286,8 +287,8 @@ def _get_persons_url( filter: Filter, entity: Entity, team: Team, - point_datetimes: List[datetime], - ) -> List[Dict[str, Any]]: + point_datetimes: list[datetime], + ) -> list[dict[str, Any]]: persons_url = [] cache_invalidation_key = generate_short_id() for point_datetime in point_datetimes: @@ -301,7 +302,7 @@ def _get_persons_url( "entity_order": entity.order, } - parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) + parsed_params: dict[str, str] = encode_get_request_params({**filter_params, **extra_params}) persons_url.append( { "filter": extra_params, diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py index 81e35336138bf..da8e0ff80e1c7 100644 --- a/posthog/queries/trends/trends.py +++ b/posthog/queries/trends/trends.py @@ -2,7 +2,8 @@ import threading from datetime import datetime, timedelta from itertools import accumulate -from typing import Any, Callable, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast +from collections.abc import Callable from zoneinfo import ZoneInfo from dateutil import parser @@ -33,7 +34,7 @@ class Trends(TrendsTotalVolume, Lifecycle, TrendsFormula): - def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> Tuple[str, str, Dict, Callable]: + def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> tuple[str, str, dict, Callable]: if filter.breakdown and filter.display not in NON_BREAKDOWN_DISPLAY_TYPES: query_type = "trends_breakdown" sql, params, parse_function = TrendsBreakdown( @@ -53,7 +54,7 @@ def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> Tup return query_type, sql, params, parse_function # Use cached result even on refresh if team has strict caching enabled - def get_cached_result(self, filter: Filter, team: Team) -> Optional[List[Dict[str, Any]]]: + def get_cached_result(self, filter: Filter, team: Team) -> Optional[list[dict[str, Any]]]: if not team.strict_caching_enabled or filter.breakdown or filter.display != TRENDS_LINEAR: return None @@ -73,7 +74,7 @@ def get_cached_result(self, filter: Filter, team: Team) -> Optional[List[Dict[st return cached_result if _is_present else None # Determine if the current timerange is present in the cache - def is_present_timerange(self, cached_result: List[Dict[str, Any]], filter: Filter, team: Team) -> bool: + def is_present_timerange(self, cached_result: list[dict[str, Any]], filter: Filter, team: Team) -> bool: if ( len(cached_result) > 0 and cached_result[0].get("days") @@ -92,7 +93,7 @@ def is_present_timerange(self, cached_result: List[Dict[str, Any]], filter: Filt return _is_present # Use a condensed filter if a cached result exists in the current timerange - def adjusted_filter(self, filter: Filter, team: Team) -> Tuple[Filter, Optional[Dict[str, Any]]]: + def adjusted_filter(self, filter: Filter, team: Team) -> tuple[Filter, Optional[dict[str, Any]]]: cached_result = self.get_cached_result(filter, team) new_filter = filter.shallow_clone({"date_from": interval_unit(filter.interval)}) if cached_result else filter @@ -107,7 +108,7 @@ def adjusted_filter(self, filter: Filter, team: Team) -> Tuple[Filter, Optional[ def merge_results( self, result, - cached_result: Optional[Dict[str, Any]], + cached_result: Optional[dict[str, Any]], entity_order: int, filter: Filter, team: Team, @@ -129,7 +130,7 @@ def merge_results( else: return result, {} - def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[str, Any]]: + def _run_query(self, filter: Filter, team: Team, entity: Entity) -> list[dict[str, Any]]: adjusted_filter, cached_result = self.adjusted_filter(filter, team) with push_scope() as scope: query_type, sql, params, parse_function = self._get_sql_for_entity(adjusted_filter, team, entity) @@ -163,12 +164,12 @@ def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[st def _run_query_for_threading( self, - result: List, + result: list, index: int, query_type, sql, params, - query_tags: Dict, + query_tags: dict, filter: Filter, team_id: int, ): @@ -177,10 +178,10 @@ def _run_query_for_threading( scope.set_context("query", {"sql": sql, "params": params}) result[index] = insight_sync_execute(sql, params, query_type=query_type, filter=filter, team_id=team_id) - def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: - result: List[Optional[List[Dict[str, Any]]]] = [None] * len(filter.entities) - parse_functions: List[Optional[Callable]] = [None] * len(filter.entities) - sql_statements_with_params: List[Tuple[Optional[str], Dict]] = [(None, {})] * len(filter.entities) + def _run_parallel(self, filter: Filter, team: Team) -> list[dict[str, Any]]: + result: list[Optional[list[dict[str, Any]]]] = [None] * len(filter.entities) + parse_functions: list[Optional[Callable]] = [None] * len(filter.entities) + sql_statements_with_params: list[tuple[Optional[str], dict]] = [(None, {})] * len(filter.entities) cached_result = None jobs = [] @@ -225,7 +226,7 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: "params": sql_statements_with_params[i][1], }, ) - serialized_data = cast(List[Callable], parse_functions)[entity.index](result[entity.index]) + serialized_data = cast(list[Callable], parse_functions)[entity.index](result[entity.index]) serialized_data = self._format_serialized(entity, serialized_data) merged_results, cached_result = self.merge_results( serialized_data, @@ -237,9 +238,9 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: result[entity.index] = merged_results # flatten results - flat_results: List[Dict[str, Any]] = [] + flat_results: list[dict[str, Any]] = [] for item in result: - for flat in cast(List[Dict[str, Any]], item): + for flat in cast(list[dict[str, Any]], item): flat_results.append(flat) if cached_result: @@ -248,7 +249,7 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: return flat_results - def run(self, filter: Filter, team: Team, is_csv_export: bool = False, *args, **kwargs) -> List[Dict[str, Any]]: + def run(self, filter: Filter, team: Team, is_csv_export: bool = False, *args, **kwargs) -> list[dict[str, Any]]: self.is_csv_export = is_csv_export actions = Action.objects.filter(team_id=team.pk).order_by("-id") if len(filter.actions) > 0: @@ -274,10 +275,10 @@ def run(self, filter: Filter, team: Team, is_csv_export: bool = False, *args, ** return result - def _format_serialized(self, entity: Entity, result: List[Dict[str, Any]]): + def _format_serialized(self, entity: Entity, result: list[dict[str, Any]]): serialized_data = [] - serialized: Dict[str, Any] = { + serialized: dict[str, Any] = { "action": entity.to_dict(), "label": entity.name, "count": 0, @@ -293,7 +294,7 @@ def _format_serialized(self, entity: Entity, result: List[Dict[str, Any]]): return serialized_data - def _handle_cumulative(self, entity_metrics: List) -> List[Dict[str, Any]]: + def _handle_cumulative(self, entity_metrics: list) -> list[dict[str, Any]]: for metrics in entity_metrics: metrics.update(data=list(accumulate(metrics["data"]))) return entity_metrics diff --git a/posthog/queries/trends/trends_actors.py b/posthog/queries/trends/trends_actors.py index 9c4afa89c41a6..f7db8b36d8ac3 100644 --- a/posthog/queries/trends/trends_actors.py +++ b/posthog/queries/trends/trends_actors.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from posthog.constants import PropertyOperatorType from posthog.models.cohort import Cohort @@ -37,7 +37,7 @@ def aggregation_group_type_index(self): return self.entity.math_group_type_index return None - def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: + def actor_query(self, limit_actors: Optional[bool] = True) -> tuple[str, dict]: if self._filter.breakdown_type == "cohort" and self._filter.breakdown_value != "all": cohort = Cohort.objects.get(pk=self._filter.breakdown_value, team_id=self._team.pk) self._filter = self._filter.shallow_clone( @@ -95,7 +95,7 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: } ) - extra_fields: List[str] = ["distinct_id", "team_id"] if not self.is_aggregating_by_groups else [] + extra_fields: list[str] = ["distinct_id", "team_id"] if not self.is_aggregating_by_groups else [] if self._filter.include_recordings: extra_fields += ["uuid"] @@ -147,7 +147,7 @@ def _aggregation_actor_field(self) -> str: return "person_id" @cached_property - def _aggregation_actor_value_expression_with_params(self) -> Tuple[str, Dict[str, Any]]: + def _aggregation_actor_value_expression_with_params(self) -> tuple[str, dict[str, Any]]: if self.entity.math in PROPERTY_MATH_FUNCTIONS: math_aggregate_operation, _, math_params = process_math( self.entity, self._team, filter=self._filter, event_table_alias="e" diff --git a/posthog/queries/trends/trends_event_query.py b/posthog/queries/trends/trends_event_query.py index bc9e9b979bd00..b856cb6a035e5 100644 --- a/posthog/queries/trends/trends_event_query.py +++ b/posthog/queries/trends/trends_event_query.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Tuple +from typing import Any from posthog.models.property.util import get_property_string_expr from posthog.queries.trends.trends_event_query_base import TrendsEventQueryBase @@ -6,7 +6,7 @@ class TrendsEventQuery(TrendsEventQueryBase): - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: person_id_field = "" if self._should_join_distinct_ids: person_id_field = f", {self._person_id_alias} as person_id" diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py index dbeb9f17cdc3d..8fb17d3579e8f 100644 --- a/posthog/queries/trends/trends_event_query_base.py +++ b/posthog/queries/trends/trends_event_query_base.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Tuple +from typing import Any from posthog.constants import ( MONTHLY_ACTIVE, @@ -29,7 +29,7 @@ def __init__(self, entity: Entity, *args, **kwargs): self._entity = entity super().__init__(*args, **kwargs) - def get_query_base(self) -> Tuple[str, Dict[str, Any]]: + def get_query_base(self) -> tuple[str, dict[str, Any]]: """ Returns part of the event query with only FROM, JOINs and WHERE clauses. """ @@ -114,9 +114,9 @@ def _get_not_null_actor_condition(self) -> str: # If aggregating by group, exclude events that aren't associated with a group return f"""AND "$group_{self._entity.math_group_type_index}" != ''""" - def _get_date_filter(self) -> Tuple[str, Dict]: + def _get_date_filter(self) -> tuple[str, dict]: date_query = "" - date_params: Dict[str, Any] = {} + date_params: dict[str, Any] = {} query_date_range = QueryDateRange(self._filter, self._team) parsed_date_from, date_from_params = query_date_range.date_from parsed_date_to, date_to_params = query_date_range.date_to @@ -145,7 +145,7 @@ def _get_date_filter(self) -> Tuple[str, Dict]: return date_query, date_params - def _get_entity_query(self, *, deep_filtering: bool) -> Tuple[str, Dict]: + def _get_entity_query(self, *, deep_filtering: bool) -> tuple[str, dict]: entity_params, entity_format_params = get_entity_filtering_params( allowed_entities=[self._entity], team_id=self._team_id, diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py index e002145de9957..3558640602e48 100644 --- a/posthog/queries/trends/util.py +++ b/posthog/queries/trends/util.py @@ -1,6 +1,6 @@ import datetime from datetime import timedelta -from typing import Any, Dict, List, Optional, Tuple, TypeVar +from typing import Any, Optional, TypeVar from zoneinfo import ZoneInfo import structlog @@ -60,10 +60,10 @@ def process_math( filter: Filter, event_table_alias: Optional[str] = None, person_id_alias: str = "person_id", -) -> Tuple[str, str, Dict[str, Any]]: +) -> tuple[str, str, dict[str, Any]]: aggregate_operation = "count(*)" join_condition = "" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if entity.math in (UNIQUE_USERS, WEEKLY_ACTIVE, MONTHLY_ACTIVE): if team.aggregate_users_by_distinct_id: @@ -100,11 +100,11 @@ def process_math( def parse_response( - stats: Dict, + stats: dict, filter: Filter, - additional_values: Optional[Dict] = None, + additional_values: Optional[dict] = None, entity: Optional[Entity] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: if additional_values is None: additional_values = {} counts = stats[1] @@ -122,7 +122,7 @@ def parse_response( } -def get_active_user_params(filter: Filter, entity: Entity, team_id: int) -> Tuple[Dict[str, Any], Dict[str, Any]]: +def get_active_user_params(filter: Filter, entity: Entity, team_id: int) -> tuple[dict[str, Any], dict[str, Any]]: diff = timedelta(days=7 if entity.math == WEEKLY_ACTIVE else 30) date_from: datetime.datetime @@ -155,11 +155,11 @@ def get_active_user_params(filter: Filter, entity: Entity, team_id: int) -> Tupl return format_params, query_params -def enumerate_time_range(filter: Filter, seconds_in_interval: int) -> List[str]: +def enumerate_time_range(filter: Filter, seconds_in_interval: int) -> list[str]: date_from = filter.date_from date_to = filter.date_to delta = timedelta(seconds=seconds_in_interval) - time_range: List[str] = [] + time_range: list[str] = [] if not date_from or not date_to: return time_range diff --git a/posthog/queries/util.py b/posthog/queries/util.py index e366fb1cc7833..e0d2cb9896f02 100644 --- a/posthog/queries/util.py +++ b/posthog/queries/util.py @@ -1,7 +1,7 @@ import json from datetime import datetime, timedelta from enum import Enum, auto -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from zoneinfo import ZoneInfo from django.utils import timezone @@ -46,21 +46,21 @@ class PersonPropertiesMode(Enum): SELECT timestamp from events WHERE team_id = %(team_id)s AND timestamp > %(earliest_timestamp)s order by timestamp limit 1 """ -TIME_IN_SECONDS: Dict[str, Any] = { +TIME_IN_SECONDS: dict[str, Any] = { "hour": 3600, "day": 3600 * 24, "week": 3600 * 24 * 7, "month": 3600 * 24 * 30, # TODO: Let's get rid of this lie! Months are not all 30 days long } -PERIOD_TO_TRUNC_FUNC: Dict[str, str] = { +PERIOD_TO_TRUNC_FUNC: dict[str, str] = { "hour": "toStartOfHour", "week": "toStartOfWeek", "day": "toStartOfDay", "month": "toStartOfMonth", } -PERIOD_TO_INTERVAL_FUNC: Dict[str, str] = { +PERIOD_TO_INTERVAL_FUNC: dict[str, str] = { "hour": "toIntervalHour", "week": "toIntervalWeek", "day": "toIntervalDay", @@ -141,7 +141,7 @@ def get_time_in_seconds_for_period(period: Optional[str]) -> str: return seconds_in_period -def deep_dump_object(params: Dict[str, Any]) -> Dict[str, Any]: +def deep_dump_object(params: dict[str, Any]) -> dict[str, Any]: for key in params: if isinstance(params[key], dict) or isinstance(params[key], list): params[key] = json.dumps(params[key]) diff --git a/posthog/rate_limit.py b/posthog/rate_limit.py index 856d1b6cceb32..d85238c3d491a 100644 --- a/posthog/rate_limit.py +++ b/posthog/rate_limit.py @@ -2,7 +2,7 @@ import re import time from functools import lru_cache -from typing import List, Optional +from typing import Optional from prometheus_client import Counter from rest_framework.throttling import SimpleRateThrottle, BaseThrottle, UserRateThrottle @@ -36,7 +36,7 @@ @lru_cache(maxsize=1) -def get_team_allow_list(_ttl: int) -> List[str]: +def get_team_allow_list(_ttl: int) -> list[str]: """ The "allow list" will change way less frequently than it will be called _ttl is passed an infrequently changing value to ensure the cache is invalidated after some delay diff --git a/posthog/renderers.py b/posthog/renderers.py index fa2d532fdce70..2c7853497ea57 100644 --- a/posthog/renderers.py +++ b/posthog/renderers.py @@ -1,10 +1,8 @@ -from typing import Dict - import orjson from rest_framework.renderers import JSONRenderer from rest_framework.utils.encoders import JSONEncoder -CleaningMarker = bool | Dict[int, "CleaningMarker"] +CleaningMarker = bool | dict[int, "CleaningMarker"] class SafeJSONRenderer(JSONRenderer): diff --git a/posthog/schema.py b/posthog/schema.py index 5673db2a3bf54..281cd8ef3a039 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -4,7 +4,7 @@ from __future__ import annotations from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Literal, Optional, Union from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel @@ -165,8 +165,8 @@ class DatabaseSchemaQueryResponseField(BaseModel): model_config = ConfigDict( extra="forbid", ) - chain: Optional[List[str]] = None - fields: Optional[List[str]] = None + chain: Optional[list[str]] = None + fields: Optional[list[str]] = None key: str table: Optional[str] = None type: str @@ -203,9 +203,9 @@ class ElementType(BaseModel): model_config = ConfigDict( extra="forbid", ) - attr_class: Optional[List[str]] = None + attr_class: Optional[list[str]] = None attr_id: Optional[str] = None - attributes: Dict[str, str] + attributes: dict[str, str] href: Optional[str] = None nth_child: Optional[float] = None nth_of_type: Optional[float] = None @@ -232,9 +232,9 @@ class EventDefinition(BaseModel): model_config = ConfigDict( extra="forbid", ) - elements: List + elements: list event: str - properties: Dict[str, Any] + properties: dict[str, Any] class CorrelationType(str, Enum): @@ -257,9 +257,9 @@ class Person(BaseModel): model_config = ConfigDict( extra="forbid", ) - distinct_ids: List[str] + distinct_ids: list[str] is_identified: Optional[bool] = None - properties: Dict[str, Any] + properties: dict[str, Any] class EventType(BaseModel): @@ -267,12 +267,12 @@ class EventType(BaseModel): extra="forbid", ) distinct_id: str - elements: List[ElementType] + elements: list[ElementType] elements_chain: Optional[str] = None event: str id: str person: Optional[Person] = None - properties: Dict[str, Any] + properties: dict[str, Any] timestamp: str uuid: Optional[str] = None @@ -282,7 +282,7 @@ class Response(BaseModel): extra="forbid", ) next: Optional[str] = None - results: List[EventType] + results: list[EventType] class Properties(BaseModel): @@ -321,7 +321,7 @@ class FunnelCorrelationResult(BaseModel): model_config = ConfigDict( extra="forbid", ) - events: List[EventOddsRatioSerialized] + events: list[EventOddsRatioSerialized] skewed: bool @@ -374,7 +374,7 @@ class FunnelTimeToConvertResults(BaseModel): extra="forbid", ) average_conversion_time: Optional[float] = None - bins: List[List[int]] + bins: list[list[int]] class FunnelVizType(str, Enum): @@ -432,7 +432,7 @@ class HogQLQueryModifiers(BaseModel): model_config = ConfigDict( extra="forbid", ) - dataWarehouseEventsModifiers: Optional[List[DataWarehouseEventsModifier]] = None + dataWarehouseEventsModifiers: Optional[list[DataWarehouseEventsModifier]] = None inCohortVia: Optional[InCohortVia] = None materializationMode: Optional[MaterializationMode] = None personsArgMaxVersion: Optional[PersonsArgMaxVersion] = None @@ -496,12 +496,12 @@ class InsightActorsQueryOptionsResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - breakdown: Optional[List[BreakdownItem]] = None - compare: Optional[List[CompareItem]] = None - day: Optional[List[DayItem]] = None - interval: Optional[List[IntervalItem]] = None - series: Optional[List[Series]] = None - status: Optional[List[StatusItem]] = None + breakdown: Optional[list[BreakdownItem]] = None + compare: Optional[list[CompareItem]] = None + day: Optional[list[DayItem]] = None + interval: Optional[list[IntervalItem]] = None + series: Optional[list[Series]] = None + status: Optional[list[StatusItem]] = None class InsightFilterProperty(str, Enum): @@ -604,14 +604,14 @@ class PathsFilter(BaseModel): ) edgeLimit: Optional[int] = None endPoint: Optional[str] = None - excludeEvents: Optional[List[str]] = None - includeEventTypes: Optional[List[PathType]] = None - localPathCleaningFilters: Optional[List[PathCleaningFilter]] = None + excludeEvents: Optional[list[str]] = None + includeEventTypes: Optional[list[PathType]] = None + localPathCleaningFilters: Optional[list[PathCleaningFilter]] = None maxEdgeWeight: Optional[int] = None minEdgeWeight: Optional[int] = None pathDropoffKey: Optional[str] = Field(default=None, description="Relevant only within actors query") pathEndKey: Optional[str] = Field(default=None, description="Relevant only within actors query") - pathGroupings: Optional[List[str]] = None + pathGroupings: Optional[list[str]] = None pathReplacements: Optional[bool] = None pathStartKey: Optional[str] = Field(default=None, description="Relevant only within actors query") pathsHogQLExpression: Optional[str] = None @@ -625,14 +625,14 @@ class PathsFilterLegacy(BaseModel): ) edge_limit: Optional[int] = None end_point: Optional[str] = None - exclude_events: Optional[List[str]] = None - funnel_filter: Optional[Dict[str, Any]] = None + exclude_events: Optional[list[str]] = None + funnel_filter: Optional[dict[str, Any]] = None funnel_paths: Optional[FunnelPathType] = None - include_event_types: Optional[List[PathType]] = None - local_path_cleaning_filters: Optional[List[PathCleaningFilter]] = None + include_event_types: Optional[list[PathType]] = None + local_path_cleaning_filters: Optional[list[PathCleaningFilter]] = None max_edge_weight: Optional[int] = None min_edge_weight: Optional[int] = None - path_groupings: Optional[List[str]] = None + path_groupings: Optional[list[str]] = None path_replacements: Optional[bool] = None path_type: Optional[PathType] = None paths_hogql_expression: Optional[str] = None @@ -693,39 +693,39 @@ class QueryResponseAlternative1(BaseModel): extra="forbid", ) next: Optional[str] = None - results: List[EventType] + results: list[EventType] class QueryResponseAlternative2(BaseModel): model_config = ConfigDict( extra="forbid", ) - results: List[Dict[str, Any]] + results: list[dict[str, Any]] class QueryResponseAlternative5(BaseModel): model_config = ConfigDict( extra="forbid", ) - breakdown: Optional[List[BreakdownItem]] = None - compare: Optional[List[CompareItem]] = None - day: Optional[List[DayItem]] = None - interval: Optional[List[IntervalItem]] = None - series: Optional[List[Series]] = None - status: Optional[List[StatusItem]] = None + breakdown: Optional[list[BreakdownItem]] = None + compare: Optional[list[CompareItem]] = None + day: Optional[list[DayItem]] = None + interval: Optional[list[IntervalItem]] = None + series: Optional[list[Series]] = None + status: Optional[list[StatusItem]] = None class QueryResponseAlternative8(BaseModel): model_config = ConfigDict( extra="forbid", ) - errors: List[HogQLNotice] + errors: list[HogQLNotice] inputExpr: Optional[str] = None inputSelect: Optional[str] = None isValid: Optional[bool] = None isValidView: Optional[bool] = None - notices: List[HogQLNotice] - warnings: List[HogQLNotice] + notices: list[HogQLNotice] + warnings: list[HogQLNotice] class QueryStatus(BaseModel): @@ -822,7 +822,7 @@ class SessionPropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["session"] = "session" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class StepOrderValue(str, Enum): @@ -837,7 +837,7 @@ class StickinessFilter(BaseModel): ) compare: Optional[bool] = None display: Optional[ChartDisplayType] = None - hidden_legend_indexes: Optional[List[float]] = None + hidden_legend_indexes: Optional[list[float]] = None showLegend: Optional[bool] = None showValuesOnSeries: Optional[bool] = None @@ -848,7 +848,7 @@ class StickinessFilterLegacy(BaseModel): ) compare: Optional[bool] = None display: Optional[ChartDisplayType] = None - hidden_legend_indexes: Optional[List[float]] = None + hidden_legend_indexes: Optional[list[float]] = None show_legend: Optional[bool] = None show_values_on_series: Optional[bool] = None @@ -862,8 +862,8 @@ class StickinessQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[Dict[str, Any]] - timings: Optional[List[QueryTiming]] = None + results: list[dict[str, Any]] + timings: Optional[list[QueryTiming]] = None class TimeToSeeDataQuery(BaseModel): @@ -874,7 +874,7 @@ class TimeToSeeDataQuery(BaseModel): modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") sessionEnd: Optional[str] = None sessionId: Optional[str] = Field(default=None, description="Project to filter on. Defaults to current session") sessionStart: Optional[str] = Field( @@ -887,7 +887,7 @@ class TimeToSeeDataSessionsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - results: List[Dict[str, Any]] + results: list[dict[str, Any]] class TimeToSeeDataWaterfallNode(BaseModel): @@ -902,7 +902,7 @@ class TimelineEntry(BaseModel): model_config = ConfigDict( extra="forbid", ) - events: List[EventType] + events: list[EventType] recording_duration_s: Optional[float] = Field(default=None, description="Duration of the recording in seconds.") sessionId: Optional[str] = Field(default=None, description="Session ID. None means out-of-session events") @@ -919,7 +919,7 @@ class TrendsFilter(BaseModel): decimalPlaces: Optional[float] = None display: Optional[ChartDisplayType] = None formula: Optional[str] = None - hidden_legend_indexes: Optional[List[float]] = None + hidden_legend_indexes: Optional[list[float]] = None showLabelsOnSeries: Optional[bool] = None showLegend: Optional[bool] = None showPercentStackView: Optional[bool] = None @@ -939,7 +939,7 @@ class TrendsFilterLegacy(BaseModel): decimal_places: Optional[float] = None display: Optional[ChartDisplayType] = None formula: Optional[str] = None - hidden_legend_indexes: Optional[List[float]] = None + hidden_legend_indexes: Optional[list[float]] = None show_labels_on_series: Optional[bool] = None show_legend: Optional[bool] = None show_percent_stack_view: Optional[bool] = None @@ -956,8 +956,8 @@ class TrendsQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[Dict[str, Any]] - timings: Optional[List[QueryTiming]] = None + results: list[dict[str, Any]] + timings: Optional[list[QueryTiming]] = None class ActionsPie(BaseModel): @@ -1020,9 +1020,9 @@ class WebOverviewQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[WebOverviewItem] + results: list[WebOverviewItem] samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None + timings: Optional[list[QueryTiming]] = None class WebStatsBreakdown(str, Enum): @@ -1047,7 +1047,7 @@ class WebStatsTableQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hasMore: Optional[bool] = None hogql: Optional[str] = None is_cached: Optional[bool] = None @@ -1056,42 +1056,42 @@ class WebStatsTableQueryResponse(BaseModel): modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None offset: Optional[int] = None - results: List + results: list samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class WebTopClicksQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hogql: Optional[str] = None is_cached: Optional[bool] = None last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List + results: list samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class ActorsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: List + columns: list hasMore: Optional[bool] = None hogql: str limit: int missing_actors_count: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: int - results: List[List] - timings: Optional[List[QueryTiming]] = None - types: List[str] + results: list[list] + timings: Optional[list[QueryTiming]] = None + types: list[str] class AnyResponseType1(BaseModel): @@ -1099,7 +1099,7 @@ class AnyResponseType1(BaseModel): extra="forbid", ) next: Optional[str] = None - results: List[EventType] + results: list[EventType] class Breakdown(BaseModel): @@ -1115,14 +1115,14 @@ class BreakdownFilter(BaseModel): model_config = ConfigDict( extra="forbid", ) - breakdown: Optional[Union[str, float, List[Union[str, float]]]] = None + breakdown: Optional[Union[str, float, list[Union[str, float]]]] = None breakdown_group_type_index: Optional[int] = None breakdown_hide_other_aggregation: Optional[bool] = None breakdown_histogram_bin_count: Optional[int] = None breakdown_limit: Optional[int] = None breakdown_normalize_url: Optional[bool] = None breakdown_type: Optional[BreakdownType] = None - breakdowns: Optional[List[Breakdown]] = None + breakdowns: Optional[list[Breakdown]] = None class DataNode(BaseModel): @@ -1133,16 +1133,16 @@ class DataNode(BaseModel): modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") class ChartSettings(BaseModel): model_config = ConfigDict( extra="forbid", ) - goalLines: Optional[List[GoalLine]] = None + goalLines: Optional[list[GoalLine]] = None xAxis: Optional[ChartAxis] = None - yAxis: Optional[List[ChartAxis]] = None + yAxis: Optional[list[ChartAxis]] = None class DataWarehousePersonPropertyFilter(BaseModel): @@ -1153,7 +1153,7 @@ class DataWarehousePersonPropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["data_warehouse_person_property"] = "data_warehouse_person_property" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class DataWarehousePropertyFilter(BaseModel): @@ -1164,7 +1164,7 @@ class DataWarehousePropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["data_warehouse"] = "data_warehouse" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class ElementPropertyFilter(BaseModel): @@ -1175,7 +1175,7 @@ class ElementPropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["element"] = "element" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class EventPropertyFilter(BaseModel): @@ -1186,22 +1186,22 @@ class EventPropertyFilter(BaseModel): label: Optional[str] = None operator: Optional[PropertyOperator] = PropertyOperator("exact") type: Literal["event"] = Field(default="event", description="Event properties") - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class EventsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: List + columns: list hasMore: Optional[bool] = None hogql: str limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: Optional[int] = None - results: List[List] - timings: Optional[List[QueryTiming]] = None - types: List[str] + results: list[list] + timings: Optional[list[QueryTiming]] = None + types: list[str] class FeaturePropertyFilter(BaseModel): @@ -1212,22 +1212,22 @@ class FeaturePropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["feature"] = Field(default="feature", description='Event property with "$feature/" prepended') - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class FunnelCorrelationResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hasMore: Optional[bool] = None hogql: Optional[str] = None limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: Optional[int] = None results: FunnelCorrelationResult - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class FunnelsFilterLegacy(BaseModel): @@ -1237,7 +1237,7 @@ class FunnelsFilterLegacy(BaseModel): bin_count: Optional[Union[float, str]] = None breakdown_attribution_type: Optional[BreakdownAttributionType] = None breakdown_attribution_value: Optional[float] = None - exclusions: Optional[List[FunnelExclusionLegacy]] = None + exclusions: Optional[list[FunnelExclusionLegacy]] = None funnel_aggregate_by_hogql: Optional[str] = None funnel_from_step: Optional[float] = None funnel_order_type: Optional[StepOrderValue] = None @@ -1246,7 +1246,7 @@ class FunnelsFilterLegacy(BaseModel): funnel_viz_type: Optional[FunnelVizType] = None funnel_window_interval: Optional[float] = None funnel_window_interval_unit: Optional[FunnelConversionWindowTimeUnit] = None - hidden_legend_breakdowns: Optional[List[str]] = None + hidden_legend_breakdowns: Optional[list[str]] = None layout: Optional[FunnelLayout] = None @@ -1259,8 +1259,8 @@ class FunnelsQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: Union[FunnelTimeToConvertResults, List[Dict[str, Any]], List[List[Dict[str, Any]]]] - timings: Optional[List[QueryTiming]] = None + results: Union[FunnelTimeToConvertResults, list[dict[str, Any]], list[list[dict[str, Any]]]] + timings: Optional[list[QueryTiming]] = None class GroupPropertyFilter(BaseModel): @@ -1272,7 +1272,7 @@ class GroupPropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["group"] = "group" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class HogQLAutocompleteResponse(BaseModel): @@ -1280,8 +1280,8 @@ class HogQLAutocompleteResponse(BaseModel): extra="forbid", ) incomplete_list: bool = Field(..., description="Whether or not the suggestions returned are complete") - suggestions: List[AutocompleteCompletionItem] - timings: Optional[List[QueryTiming]] = Field( + suggestions: list[AutocompleteCompletionItem] + timings: Optional[list[QueryTiming]] = Field( default=None, description="Measured timings for different parts of the query generation process" ) @@ -1290,13 +1290,13 @@ class HogQLMetadataResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - errors: List[HogQLNotice] + errors: list[HogQLNotice] inputExpr: Optional[str] = None inputSelect: Optional[str] = None isValid: Optional[bool] = None isValidView: Optional[bool] = None - notices: List[HogQLNotice] - warnings: List[HogQLNotice] + notices: list[HogQLNotice] + warnings: list[HogQLNotice] class HogQLPropertyFilter(BaseModel): @@ -1306,7 +1306,7 @@ class HogQLPropertyFilter(BaseModel): key: str label: Optional[str] = None type: Literal["hogql"] = "hogql" - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class HogQLQueryResponse(BaseModel): @@ -1314,11 +1314,11 @@ class HogQLQueryResponse(BaseModel): extra="forbid", ) clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query") - columns: Optional[List] = Field(default=None, description="Returned columns") + columns: Optional[list] = Field(default=None, description="Returned columns") error: Optional[str] = Field( default=None, description="Query error. Returned only if 'explain' is true. Throws an error otherwise." ) - explain: Optional[List[str]] = Field(default=None, description="Query explanation output") + explain: Optional[list[str]] = Field(default=None, description="Query explanation output") hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query") limit: Optional[int] = None @@ -1328,11 +1328,11 @@ class HogQLQueryResponse(BaseModel): ) offset: Optional[int] = None query: Optional[str] = Field(default=None, description="Input query string") - results: Optional[List] = Field(default=None, description="Query results") - timings: Optional[List[QueryTiming]] = Field( + results: Optional[list] = Field(default=None, description="Query results") + timings: Optional[list[QueryTiming]] = Field( default=None, description="Measured timings for different parts of the query generation process" ) - types: Optional[List] = Field(default=None, description="Types of returned columns") + types: Optional[list] = Field(default=None, description="Types of returned columns") class InsightActorsQueryBase(BaseModel): @@ -1349,7 +1349,7 @@ class LifecycleFilter(BaseModel): extra="forbid", ) showValuesOnSeries: Optional[bool] = None - toggledLifecycles: Optional[List[LifecycleToggle]] = None + toggledLifecycles: Optional[list[LifecycleToggle]] = None class LifecycleFilterLegacy(BaseModel): @@ -1357,7 +1357,7 @@ class LifecycleFilterLegacy(BaseModel): extra="forbid", ) show_values_on_series: Optional[bool] = None - toggledLifecycles: Optional[List[LifecycleToggle]] = None + toggledLifecycles: Optional[list[LifecycleToggle]] = None class LifecycleQueryResponse(BaseModel): @@ -1369,8 +1369,8 @@ class LifecycleQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[Dict[str, Any]] - timings: Optional[List[QueryTiming]] = None + results: list[dict[str, Any]] + timings: Optional[list[QueryTiming]] = None class Node(BaseModel): @@ -1389,8 +1389,8 @@ class PathsQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[Dict[str, Any]] - timings: Optional[List[QueryTiming]] = None + results: list[dict[str, Any]] + timings: Optional[list[QueryTiming]] = None class PersonPropertyFilter(BaseModel): @@ -1401,7 +1401,7 @@ class PersonPropertyFilter(BaseModel): label: Optional[str] = None operator: PropertyOperator type: Literal["person"] = Field(default="person", description="Person properties") - value: Optional[Union[str, float, List[Union[str, float]]]] = None + value: Optional[Union[str, float, list[Union[str, float]]]] = None class QueryResponse(BaseModel): @@ -1414,38 +1414,38 @@ class QueryResponse(BaseModel): modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None results: Any - timings: Optional[List[QueryTiming]] = None + timings: Optional[list[QueryTiming]] = None class QueryResponseAlternative3(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: List + columns: list hasMore: Optional[bool] = None hogql: str limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: Optional[int] = None - results: List[List] - timings: Optional[List[QueryTiming]] = None - types: List[str] + results: list[list] + timings: Optional[list[QueryTiming]] = None + types: list[str] class QueryResponseAlternative4(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: List + columns: list hasMore: Optional[bool] = None hogql: str limit: int missing_actors_count: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: int - results: List[List] - timings: Optional[List[QueryTiming]] = None - types: List[str] + results: list[list] + timings: Optional[list[QueryTiming]] = None + types: list[str] class QueryResponseAlternative6(BaseModel): @@ -1454,8 +1454,8 @@ class QueryResponseAlternative6(BaseModel): ) hasMore: Optional[bool] = None hogql: Optional[str] = None - results: List[TimelineEntry] - timings: Optional[List[QueryTiming]] = None + results: list[TimelineEntry] + timings: Optional[list[QueryTiming]] = None class QueryResponseAlternative7(BaseModel): @@ -1463,11 +1463,11 @@ class QueryResponseAlternative7(BaseModel): extra="forbid", ) clickhouse: Optional[str] = Field(default=None, description="Executed ClickHouse query") - columns: Optional[List] = Field(default=None, description="Returned columns") + columns: Optional[list] = Field(default=None, description="Returned columns") error: Optional[str] = Field( default=None, description="Query error. Returned only if 'explain' is true. Throws an error otherwise." ) - explain: Optional[List[str]] = Field(default=None, description="Query explanation output") + explain: Optional[list[str]] = Field(default=None, description="Query explanation output") hasMore: Optional[bool] = None hogql: Optional[str] = Field(default=None, description="Generated HogQL query") limit: Optional[int] = None @@ -1477,11 +1477,11 @@ class QueryResponseAlternative7(BaseModel): ) offset: Optional[int] = None query: Optional[str] = Field(default=None, description="Input query string") - results: Optional[List] = Field(default=None, description="Query results") - timings: Optional[List[QueryTiming]] = Field( + results: Optional[list] = Field(default=None, description="Query results") + timings: Optional[list[QueryTiming]] = Field( default=None, description="Measured timings for different parts of the query generation process" ) - types: Optional[List] = Field(default=None, description="Types of returned columns") + types: Optional[list] = Field(default=None, description="Types of returned columns") class QueryResponseAlternative9(BaseModel): @@ -1489,8 +1489,8 @@ class QueryResponseAlternative9(BaseModel): extra="forbid", ) incomplete_list: bool = Field(..., description="Whether or not the suggestions returned are complete") - suggestions: List[AutocompleteCompletionItem] - timings: Optional[List[QueryTiming]] = Field( + suggestions: list[AutocompleteCompletionItem] + timings: Optional[list[QueryTiming]] = Field( default=None, description="Measured timings for different parts of the query generation process" ) @@ -1504,16 +1504,16 @@ class QueryResponseAlternative10(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[WebOverviewItem] + results: list[WebOverviewItem] samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None + timings: Optional[list[QueryTiming]] = None class QueryResponseAlternative11(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hasMore: Optional[bool] = None hogql: Optional[str] = None is_cached: Optional[bool] = None @@ -1522,26 +1522,26 @@ class QueryResponseAlternative11(BaseModel): modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None offset: Optional[int] = None - results: List + results: list samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class QueryResponseAlternative12(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hogql: Optional[str] = None is_cached: Optional[bool] = None last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List + results: list samplingRate: Optional[SamplingRate] = None - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class QueryResponseAlternative13(BaseModel): @@ -1553,23 +1553,23 @@ class QueryResponseAlternative13(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[Dict[str, Any]] - timings: Optional[List[QueryTiming]] = None + results: list[dict[str, Any]] + timings: Optional[list[QueryTiming]] = None class QueryResponseAlternative17(BaseModel): model_config = ConfigDict( extra="forbid", ) - columns: Optional[List] = None + columns: Optional[list] = None hasMore: Optional[bool] = None hogql: Optional[str] = None limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None offset: Optional[int] = None results: FunnelCorrelationResult - timings: Optional[List[QueryTiming]] = None - types: Optional[List] = None + timings: Optional[list[QueryTiming]] = None + types: Optional[list] = None class RetentionFilter(BaseModel): @@ -1602,7 +1602,7 @@ class RetentionResult(BaseModel): ) date: AwareDatetime label: str - values: List[RetentionValue] + values: list[RetentionValue] class SavedInsightNode(BaseModel): @@ -1664,8 +1664,8 @@ class SessionsTimelineQueryResponse(BaseModel): ) hasMore: Optional[bool] = None hogql: Optional[str] = None - results: List[TimelineEntry] - timings: Optional[List[QueryTiming]] = None + results: list[TimelineEntry] + timings: Optional[list[QueryTiming]] = None class TimeToSeeDataJSONNode(BaseModel): @@ -1699,7 +1699,7 @@ class WebAnalyticsQueryBase(BaseModel): ) dateRange: Optional[DateRange] = None modifiers: Optional[HogQLQueryModifiers] = None - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] + properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] sampling: Optional[Sampling] = None useSessionsTable: Optional[bool] = None @@ -1712,7 +1712,7 @@ class WebOverviewQuery(BaseModel): dateRange: Optional[DateRange] = None kind: Literal["WebOverviewQuery"] = "WebOverviewQuery" modifiers: Optional[HogQLQueryModifiers] = None - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] + properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] response: Optional[WebOverviewQueryResponse] = None sampling: Optional[Sampling] = None useSessionsTable: Optional[bool] = None @@ -1730,7 +1730,7 @@ class WebStatsTableQuery(BaseModel): kind: Literal["WebStatsTableQuery"] = "WebStatsTableQuery" limit: Optional[int] = None modifiers: Optional[HogQLQueryModifiers] = None - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] + properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] response: Optional[WebStatsTableQueryResponse] = None sampling: Optional[Sampling] = None useSessionsTable: Optional[bool] = None @@ -1743,7 +1743,7 @@ class WebTopClicksQuery(BaseModel): dateRange: Optional[DateRange] = None kind: Literal["WebTopClicksQuery"] = "WebTopClicksQuery" modifiers: Optional[HogQLQueryModifiers] = None - properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] + properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] response: Optional[WebTopClicksQueryResponse] = None sampling: Optional[Sampling] = None useSessionsTable: Optional[bool] = None @@ -1752,7 +1752,7 @@ class WebTopClicksQuery(BaseModel): class AnyResponseType( RootModel[ Union[ - Dict[str, Any], + dict[str, Any], HogQLQueryResponse, HogQLMetadataResponse, HogQLAutocompleteResponse, @@ -1762,7 +1762,7 @@ class AnyResponseType( ] ): root: Union[ - Dict[str, Any], + dict[str, Any], HogQLQueryResponse, HogQLMetadataResponse, HogQLAutocompleteResponse, @@ -1778,7 +1778,7 @@ class DashboardFilter(BaseModel): date_from: Optional[str] = None date_to: Optional[str] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1804,7 +1804,7 @@ class DataWarehouseNode(BaseModel): custom_name: Optional[str] = None distinct_id_field: str fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1838,7 +1838,7 @@ class DataWarehouseNode(BaseModel): ) name: Optional[str] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1855,7 +1855,7 @@ class DataWarehouseNode(BaseModel): ] ] ] = Field(default=None, description="Properties configurable in the interface") - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") table_name: str timestamp_field: str @@ -1868,7 +1868,7 @@ class DatabaseSchemaQuery(BaseModel): modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - response: Optional[Dict[str, List[DatabaseSchemaQueryResponseField]]] = Field( + response: Optional[dict[str, list[DatabaseSchemaQueryResponseField]]] = Field( default=None, description="Cached query response" ) @@ -1879,7 +1879,7 @@ class EntityNode(BaseModel): ) custom_name: Optional[str] = None fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1911,7 +1911,7 @@ class EntityNode(BaseModel): ) name: Optional[str] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1928,7 +1928,7 @@ class EntityNode(BaseModel): ] ] ] = Field(default=None, description="Properties configurable in the interface") - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") class EventsNode(BaseModel): @@ -1938,7 +1938,7 @@ class EventsNode(BaseModel): custom_name: Optional[str] = None event: Optional[str] = Field(default=None, description="The event or `null` for all events.") fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -1970,9 +1970,9 @@ class EventsNode(BaseModel): default=None, description="Modifiers used when performing the query" ) name: Optional[str] = None - orderBy: Optional[List[str]] = Field(default=None, description="Columns to order by") + orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by") properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2002,7 +2002,7 @@ class EventsQuery(BaseModel): event: Optional[str] = Field(default=None, description="Limit to events matching this string") filterTestAccounts: Optional[bool] = Field(default=None, description="Filter test accounts") fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2028,10 +2028,10 @@ class EventsQuery(BaseModel): default=None, description="Modifiers used when performing the query" ) offset: Optional[int] = Field(default=None, description="Number of rows to skip before returning rows") - orderBy: Optional[List[str]] = Field(default=None, description="Columns to order by") + orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by") personId: Optional[str] = Field(default=None, description="Show events for a given person") properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2049,8 +2049,8 @@ class EventsQuery(BaseModel): ] ] = Field(default=None, description="Properties configurable in the interface") response: Optional[EventsQueryResponse] = Field(default=None, description="Cached query response") - select: List[str] = Field(..., description="Return a limited set of data. Required.") - where: Optional[List[str]] = Field(default=None, description="HogQL filters to apply on returned data") + select: list[str] = Field(..., description="Return a limited set of data. Required.") + where: Optional[list[str]] = Field(default=None, description="HogQL filters to apply on returned data") class FunnelExclusionActionsNode(BaseModel): @@ -2059,7 +2059,7 @@ class FunnelExclusionActionsNode(BaseModel): ) custom_name: Optional[str] = None fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2094,7 +2094,7 @@ class FunnelExclusionActionsNode(BaseModel): ) name: Optional[str] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2111,7 +2111,7 @@ class FunnelExclusionActionsNode(BaseModel): ] ] ] = Field(default=None, description="Properties configurable in the interface") - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") class FunnelExclusionEventsNode(BaseModel): @@ -2121,7 +2121,7 @@ class FunnelExclusionEventsNode(BaseModel): custom_name: Optional[str] = None event: Optional[str] = Field(default=None, description="The event or `null` for all events.") fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2155,9 +2155,9 @@ class FunnelExclusionEventsNode(BaseModel): default=None, description="Modifiers used when performing the query" ) name: Optional[str] = None - orderBy: Optional[List[str]] = Field(default=None, description="Columns to order by") + orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by") properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2184,7 +2184,7 @@ class HogQLFilters(BaseModel): dateRange: Optional[DateRange] = None filterTestAccounts: Optional[bool] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2215,7 +2215,7 @@ class HogQLQuery(BaseModel): ) query: str response: Optional[HogQLQueryResponse] = Field(default=None, description="Cached query response") - values: Optional[Dict[str, Any]] = Field( + values: Optional[dict[str, Any]] = Field( default=None, description="Constant values that can be referenced with the {placeholder} syntax in the query" ) @@ -2227,7 +2227,7 @@ class PersonsNode(BaseModel): cohort: Optional[int] = None distinctId: Optional[str] = None fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2254,7 +2254,7 @@ class PersonsNode(BaseModel): ) offset: Optional[int] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2271,7 +2271,7 @@ class PersonsNode(BaseModel): ] ] ] = Field(default=None, description="Properties configurable in the interface") - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") search: Optional[str] = None @@ -2280,7 +2280,7 @@ class PropertyGroupFilterValue(BaseModel): extra="forbid", ) type: FilterLogicalOperator - values: List[ + values: list[ Union[ PropertyGroupFilterValue, Union[ @@ -2310,15 +2310,15 @@ class QueryResponseAlternative14(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[RetentionResult] - timings: Optional[List[QueryTiming]] = None + results: list[RetentionResult] + timings: Optional[list[QueryTiming]] = None class QueryResponseAlternative( RootModel[ Union[ QueryResponseAlternative1, - Dict[str, Any], + dict[str, Any], QueryResponseAlternative2, QueryResponseAlternative3, QueryResponseAlternative4, @@ -2333,13 +2333,13 @@ class QueryResponseAlternative( QueryResponseAlternative13, QueryResponseAlternative14, QueryResponseAlternative17, - Dict[str, List[DatabaseSchemaQueryResponseField]], + dict[str, list[DatabaseSchemaQueryResponseField]], ] ] ): root: Union[ QueryResponseAlternative1, - Dict[str, Any], + dict[str, Any], QueryResponseAlternative2, QueryResponseAlternative3, QueryResponseAlternative4, @@ -2354,7 +2354,7 @@ class QueryResponseAlternative( QueryResponseAlternative13, QueryResponseAlternative14, QueryResponseAlternative17, - Dict[str, List[DatabaseSchemaQueryResponseField]], + dict[str, list[DatabaseSchemaQueryResponseField]], ] @@ -2367,8 +2367,8 @@ class RetentionQueryResponse(BaseModel): last_refresh: Optional[str] = None modifiers: Optional[HogQLQueryModifiers] = None next_allowed_client_refresh: Optional[str] = None - results: List[RetentionResult] - timings: Optional[List[QueryTiming]] = None + results: list[RetentionResult] + timings: Optional[list[QueryTiming]] = None class SessionsTimelineQuery(BaseModel): @@ -2395,7 +2395,7 @@ class ActionsNode(BaseModel): ) custom_name: Optional[str] = None fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2428,7 +2428,7 @@ class ActionsNode(BaseModel): ) name: Optional[str] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2445,7 +2445,7 @@ class ActionsNode(BaseModel): ] ] ] = Field(default=None, description="Properties configurable in the interface") - response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response") + response: Optional[dict[str, Any]] = Field(default=None, description="Cached query response") class DataVisualizationNode(BaseModel): @@ -2465,7 +2465,7 @@ class FunnelsFilter(BaseModel): binCount: Optional[int] = None breakdownAttributionType: Optional[BreakdownAttributionType] = None breakdownAttributionValue: Optional[int] = None - exclusions: Optional[List[Union[FunnelExclusionEventsNode, FunnelExclusionActionsNode]]] = None + exclusions: Optional[list[Union[FunnelExclusionEventsNode, FunnelExclusionActionsNode]]] = None funnelAggregateByHogQL: Optional[str] = None funnelFromStep: Optional[int] = None funnelOrderType: Optional[StepOrderValue] = None @@ -2474,7 +2474,7 @@ class FunnelsFilter(BaseModel): funnelVizType: Optional[FunnelVizType] = None funnelWindowInterval: Optional[int] = None funnelWindowIntervalUnit: Optional[FunnelConversionWindowTimeUnit] = None - hidden_legend_breakdowns: Optional[List[str]] = None + hidden_legend_breakdowns: Optional[list[str]] = None layout: Optional[FunnelLayout] = None @@ -2508,7 +2508,7 @@ class PropertyGroupFilter(BaseModel): extra="forbid", ) type: FilterLogicalOperator - values: List[PropertyGroupFilterValue] + values: list[PropertyGroupFilterValue] class RetentionQuery(BaseModel): @@ -2526,7 +2526,7 @@ class RetentionQuery(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2567,7 +2567,7 @@ class StickinessQuery(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2587,7 +2587,7 @@ class StickinessQuery(BaseModel): ] ] = Field(default=None, description="Property filters for all series") samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") - series: List[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( ..., description="Events and actions to include" ) stickinessFilter: Optional[StickinessFilter] = Field( @@ -2614,7 +2614,7 @@ class TrendsQuery(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2635,7 +2635,7 @@ class TrendsQuery(BaseModel): ] = Field(default=None, description="Property filters for all series") response: Optional[TrendsQueryResponse] = None samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") - series: List[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( ..., description="Events and actions to include" ) trendsFilter: Optional[TrendsFilter] = Field(default=None, description="Properties specific to the trends insight") @@ -2645,22 +2645,22 @@ class FilterType(BaseModel): model_config = ConfigDict( extra="forbid", ) - actions: Optional[List[Dict[str, Any]]] = None + actions: Optional[list[dict[str, Any]]] = None aggregation_group_type_index: Optional[float] = None - breakdown: Optional[Union[str, float, List[Union[str, float]]]] = None + breakdown: Optional[Union[str, float, list[Union[str, float]]]] = None breakdown_group_type_index: Optional[float] = None breakdown_hide_other_aggregation: Optional[bool] = None breakdown_limit: Optional[int] = None breakdown_normalize_url: Optional[bool] = None breakdown_type: Optional[BreakdownType] = None - breakdowns: Optional[List[Breakdown]] = None - data_warehouse: Optional[List[Dict[str, Any]]] = None + breakdowns: Optional[list[Breakdown]] = None + data_warehouse: Optional[list[dict[str, Any]]] = None date_from: Optional[str] = None date_to: Optional[str] = None entity_id: Optional[Union[str, float]] = None entity_math: Optional[str] = None entity_type: Optional[EntityType] = None - events: Optional[List[Dict[str, Any]]] = None + events: Optional[list[dict[str, Any]]] = None explicit_date: Optional[Union[bool, str]] = Field( default=None, description='Whether the `date_from` and `date_to` should be used verbatim. Disables rounding to the start and end of period. Strings are cast to bools, e.g. "true" -> true.', @@ -2669,10 +2669,10 @@ class FilterType(BaseModel): from_dashboard: Optional[Union[bool, float]] = None insight: Optional[InsightType] = None interval: Optional[IntervalType] = None - new_entity: Optional[List[Dict[str, Any]]] = None + new_entity: Optional[list[dict[str, Any]]] = None properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2716,7 +2716,7 @@ class FunnelsQuery(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2736,7 +2736,7 @@ class FunnelsQuery(BaseModel): ] ] = Field(default=None, description="Property filters for all series") samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") - series: List[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( ..., description="Events and actions to include" ) @@ -2756,7 +2756,7 @@ class InsightsQueryBase(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2798,7 +2798,7 @@ class LifecycleQuery(BaseModel): ) properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2819,7 +2819,7 @@ class LifecycleQuery(BaseModel): ] = Field(default=None, description="Property filters for all series") response: Optional[LifecycleQueryResponse] = None samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") - series: List[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode]] = Field( ..., description="Events and actions to include" ) @@ -2844,7 +2844,7 @@ class FunnelsActorsQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) - funnelCustomSteps: Optional[List[int]] = Field( + funnelCustomSteps: Optional[list[int]] = Field( default=None, description="Custom step numbers to get persons for. This overrides `funnelStep`. Primarily for correlation use.", ) @@ -2852,7 +2852,7 @@ class FunnelsActorsQuery(BaseModel): default=None, description="Index of the step for which we want to get the timestamp for, per person. Positive for converted persons, negative for dropped of persons.", ) - funnelStepBreakdown: Optional[Union[str, float, List[Union[str, float]]]] = Field( + funnelStepBreakdown: Optional[Union[str, float, list[Union[str, float]]]] = Field( default=None, description="The breakdown value for which to get persons for. This is an array for person and event properties, a string for groups and an integer for cohorts.", ) @@ -2887,7 +2887,7 @@ class PathsQuery(BaseModel): pathsFilter: PathsFilter = Field(..., description="Properties specific to the paths insight") properties: Optional[ Union[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -2914,11 +2914,11 @@ class FunnelCorrelationQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) - funnelCorrelationEventExcludePropertyNames: Optional[List[str]] = None - funnelCorrelationEventNames: Optional[List[str]] = None - funnelCorrelationExcludeEventNames: Optional[List[str]] = None - funnelCorrelationExcludeNames: Optional[List[str]] = None - funnelCorrelationNames: Optional[List[str]] = None + funnelCorrelationEventExcludePropertyNames: Optional[list[str]] = None + funnelCorrelationEventNames: Optional[list[str]] = None + funnelCorrelationExcludeEventNames: Optional[list[str]] = None + funnelCorrelationExcludeNames: Optional[list[str]] = None + funnelCorrelationNames: Optional[list[str]] = None funnelCorrelationType: FunnelCorrelationResultsType kind: Literal["FunnelCorrelationQuery"] = "FunnelCorrelationQuery" response: Optional[FunnelCorrelationResponse] = None @@ -2956,7 +2956,7 @@ class FunnelCorrelationActorsQuery(BaseModel): funnelCorrelationPersonConverted: Optional[bool] = None funnelCorrelationPersonEntity: Optional[Union[EventsNode, ActionsNode, DataWarehouseNode]] = None funnelCorrelationPropertyValues: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -3015,7 +3015,7 @@ class ActorsQuery(BaseModel): extra="forbid", ) fixedProperties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -3038,9 +3038,9 @@ class ActorsQuery(BaseModel): default=None, description="Modifiers used when performing the query" ) offset: Optional[int] = None - orderBy: Optional[List[str]] = None + orderBy: Optional[list[str]] = None properties: Optional[ - List[ + list[ Union[ EventPropertyFilter, PersonPropertyFilter, @@ -3059,7 +3059,7 @@ class ActorsQuery(BaseModel): ] = None response: Optional[ActorsQueryResponse] = Field(default=None, description="Cached query response") search: Optional[str] = None - select: Optional[List[str]] = None + select: Optional[list[str]] = None source: Optional[Union[InsightActorsQuery, FunnelsActorsQuery, FunnelCorrelationActorsQuery, HogQLQuery]] = None @@ -3070,7 +3070,7 @@ class DataTableNode(BaseModel): allowSorting: Optional[bool] = Field( default=None, description="Can the user click on column headers to sort the table? (default: true)" ) - columns: Optional[List[str]] = Field( + columns: Optional[list[str]] = Field( default=None, description="Columns shown in the table, unless the `source` provides them." ) embedded: Optional[bool] = Field(default=None, description="Uses the embedded version of LemonTable") @@ -3078,7 +3078,7 @@ class DataTableNode(BaseModel): default=None, description="Can expand row to show raw event data (default: true)" ) full: Optional[bool] = Field(default=None, description="Show with most visual options enabled. Used in scenes.") - hiddenColumns: Optional[List[str]] = Field( + hiddenColumns: Optional[list[str]] = Field( default=None, description="Columns that aren't shown in the table, even if in columns or returned data" ) kind: Literal["DataTableNode"] = "DataTableNode" diff --git a/posthog/session_recordings/models/metadata.py b/posthog/session_recordings/models/metadata.py index 98359a09f30fe..dd26fde6a3b32 100644 --- a/posthog/session_recordings/models/metadata.py +++ b/posthog/session_recordings/models/metadata.py @@ -1,7 +1,7 @@ from datetime import datetime -from typing import Dict, List, Optional, TypedDict, Union +from typing import Optional, TypedDict, Union, Literal -SnapshotData = Dict +SnapshotData = dict WindowId = Optional[str] @@ -22,7 +22,7 @@ class SessionRecordingEventSummary(TypedDict): timestamp: int type: int # keys of this object should be any of EVENT_SUMMARY_DATA_INCLUSIONS - data: Dict[str, Union[int, str]] + data: dict[str, Union[int, str]] # NOTE: MatchingSessionRecordingEvent is a minimal version of full events that is used to display events matching a filter on the frontend @@ -35,7 +35,7 @@ class MatchingSessionRecordingEvent(TypedDict): class DecompressedRecordingData(TypedDict): has_next: bool - snapshot_data_by_window_id: Dict[WindowId, List[Union[SnapshotData, SessionRecordingEventSummary]]] + snapshot_data_by_window_id: dict[WindowId, list[Union[SnapshotData, SessionRecordingEventSummary]]] class RecordingMetadata(TypedDict): @@ -51,13 +51,14 @@ class RecordingMetadata(TypedDict): first_url: str duration: int active_seconds: int + snapshot_source: Literal["web", "mobile"] class RecordingMatchingEvents(TypedDict): - events: List[MatchingSessionRecordingEvent] + events: list[MatchingSessionRecordingEvent] class PersistedRecordingV1(TypedDict): version: str # "2022-12-22" - snapshot_data_by_window_id: Dict[WindowId, List[Union[SnapshotData, SessionRecordingEventSummary]]] + snapshot_data_by_window_id: dict[WindowId, list[Union[SnapshotData, SessionRecordingEventSummary]]] distinct_id: str diff --git a/posthog/session_recordings/models/session_recording.py b/posthog/session_recordings/models/session_recording.py index 5e057b7e72150..359df2faf94e7 100644 --- a/posthog/session_recordings/models/session_recording.py +++ b/posthog/session_recordings/models/session_recording.py @@ -1,8 +1,9 @@ -from typing import Any, List, Literal, Optional +from typing import Any, Literal, Optional, Union from django.conf import settings from django.db import models +from posthog.models.person.missing_person import MissingPerson from posthog.models.person.person import Person from posthog.models.signals import mutable_receiver from posthog.models.team.team import Team @@ -57,7 +58,7 @@ class Meta: # DYNAMIC FIELDS viewed: Optional[bool] = False - person: Optional[Person] = None + _person: Optional[Person] = None matching_events: Optional[RecordingMatchingEvents] = None # Metadata can be loaded from Clickhouse or S3 @@ -107,9 +108,24 @@ def storage(self): return "object_storage_lts" - def load_person(self) -> Optional[Person]: - if self.person: - return self.person + @property + def snapshot_source(self) -> Optional[str]: + return self._metadata.get("snapshot_source", "web") if self._metadata else "web" + + @property + def person(self) -> Union[Person, MissingPerson]: + if self._person: + return self._person + + return MissingPerson(team_id=self.team_id, distinct_id=self.distinct_id) + + @person.setter + def person(self, value: Person): + self._person = value + + def load_person(self): + if self._person: + return try: self.person = Person.objects.get( @@ -117,9 +133,8 @@ def load_person(self) -> Optional[Person]: persondistinctid__team_id=self.team, team=self.team, ) - return self.person except Person.DoesNotExist: - return None + pass def check_viewed_for_user(self, user: Any, save_viewed=False) -> None: if not save_viewed: @@ -132,7 +147,7 @@ def check_viewed_for_user(self, user: Any, save_viewed=False) -> None: def build_object_storage_path(self, version: Literal["2023-08-01", "2022-12-22"]) -> str: if version == "2022-12-22": - path_parts: List[str] = [ + path_parts: list[str] = [ settings.OBJECT_STORAGE_SESSION_RECORDING_LTS_FOLDER, f"team-{self.team_id}", f"session-{self.session_id}", @@ -157,7 +172,7 @@ def get_or_build(session_id: str, team: Team) -> "SessionRecording": return SessionRecording(session_id=session_id, team=team) @staticmethod - def get_or_build_from_clickhouse(team: Team, ch_recordings: List[dict]) -> "List[SessionRecording]": + def get_or_build_from_clickhouse(team: Team, ch_recordings: list[dict]) -> "list[SessionRecording]": session_ids = sorted([recording["session_id"] for recording in ch_recordings]) recordings_by_id = { @@ -189,7 +204,7 @@ def get_or_build_from_clickhouse(team: Team, ch_recordings: List[dict]) -> "List return recordings - def set_start_url_from_urls(self, urls: Optional[List[str]] = None, first_url: Optional[str] = None): + def set_start_url_from_urls(self, urls: Optional[list[str]] = None, first_url: Optional[str] = None): if first_url: self.start_url = first_url[:512] return diff --git a/posthog/session_recordings/queries/session_query.py b/posthog/session_recordings/queries/session_query.py index d0ff7b32afb4e..eb856194806de 100644 --- a/posthog/session_recordings/queries/session_query.py +++ b/posthog/session_recordings/queries/session_query.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, Union +from typing import Optional, Union from posthog.models import Filter from posthog.models.filters.path_filter import PathFilter @@ -29,7 +29,7 @@ def __init__( self._team = team self._session_id_alias = session_id_alias - def get_query(self) -> Tuple[str, Dict]: + def get_query(self) -> tuple[str, dict]: params = {"team_id": self._team.pk} query_date_range = QueryDateRange(filter=self._filter, team=self._team, should_round=False) diff --git a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py index 4f64fff7f8ab3..b9458c597c9fc 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py +++ b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py @@ -1,7 +1,7 @@ import dataclasses import re from datetime import datetime, timedelta -from typing import Any, Dict, List, Literal, NamedTuple, Tuple, Union +from typing import Any, Literal, NamedTuple, Union from django.conf import settings from sentry_sdk import capture_exception @@ -25,15 +25,15 @@ class SummaryEventFiltersSQL: having_conditions: str having_select: str where_conditions: str - params: Dict[str, Any] + params: dict[str, Any] class SessionRecordingQueryResult(NamedTuple): - results: List + results: list has_more_recording: bool -def _get_recording_start_time_clause(recording_filters: SessionRecordingsFilter) -> Tuple[str, Dict[str, Any]]: +def _get_recording_start_time_clause(recording_filters: SessionRecordingsFilter) -> tuple[str, dict[str, Any]]: start_time_clause = "" start_time_params = {} if recording_filters.date_from: @@ -52,7 +52,7 @@ def _get_order_by_clause(filter_order: str | None) -> str: def _get_filter_by_log_text_session_ids_clause( team: Team, recording_filters: SessionRecordingsFilter, column_name="session_id" -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: if not recording_filters.console_search_query: return "", {} @@ -66,7 +66,7 @@ def _get_filter_by_log_text_session_ids_clause( def _get_filter_by_provided_session_ids_clause( recording_filters: SessionRecordingsFilter, column_name="session_id" -) -> Tuple[str, Dict[str, Any]]: +) -> tuple[str, dict[str, Any]]: if recording_filters.session_ids is None: return "", {} @@ -111,7 +111,7 @@ def ttl_days(self): # a recording spans the time boundaries # TODO This is just copied from below @cached_property - def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: + def _get_events_timestamp_clause(self) -> tuple[str, dict[str, Any]]: timestamp_clause = "" timestamp_params = {} if self._filter.date_from: @@ -124,8 +124,8 @@ def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: @staticmethod def _get_console_log_clause( - console_logs_filter: List[Literal["error", "warn", "info"]], - ) -> Tuple[str, Dict[str, Any]]: + console_logs_filter: list[Literal["error", "warn", "info"]], + ) -> tuple[str, dict[str, Any]]: return ( ( f"AND level in %(console_logs_levels)s", @@ -135,7 +135,7 @@ def _get_console_log_clause( else ("", {}) ) - def get_query(self) -> Tuple[str, Dict]: + def get_query(self) -> tuple[str, dict]: if not self._filter.console_search_query: return "", {} @@ -177,7 +177,7 @@ def _determine_should_join_distinct_ids(self) -> None: pass # we have to implement this from EventQuery but don't need it - def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]: + def _data_to_return(self, results: list[Any]) -> list[dict[str, Any]]: pass _raw_persons_query = """ @@ -195,7 +195,7 @@ def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]: {filter_by_person_uuid_condition} """ - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: # we don't support PoE V1 - hopefully that's ok if self._person_on_events_mode == PersonsOnEventsMode.person_id_override_properties_on_events: return "", {} @@ -280,7 +280,7 @@ def _determine_should_join_distinct_ids(self) -> None: pass # we have to implement this from EventQuery but don't need it - def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]: + def _data_to_return(self, results: list[Any]) -> list[dict[str, Any]]: pass def _determine_should_join_events(self): @@ -354,7 +354,7 @@ def ttl_days(self): HAVING 1=1 {event_filter_having_events_condition} """ - def format_event_filter(self, entity: Entity, prepend: str, team_id: int) -> Tuple[str, Dict[str, Any]]: + def format_event_filter(self, entity: Entity, prepend: str, team_id: int) -> tuple[str, dict[str, Any]]: filter_sql, params = format_entity_filter( team_id=team_id, entity=entity, @@ -382,8 +382,8 @@ def format_event_filter(self, entity: Entity, prepend: str, team_id: int) -> Tup @cached_property def build_event_filters(self) -> SummaryEventFiltersSQL: - event_names_to_filter: List[Union[int, str]] = [] - params: Dict = {} + event_names_to_filter: list[Union[int, str]] = [] + params: dict = {} condition_sql = "" for index, entity in enumerate(self._filter.entities): @@ -432,7 +432,7 @@ def build_event_filters(self) -> SummaryEventFiltersSQL: params=params, ) - def _get_groups_query(self) -> Tuple[str, Dict]: + def _get_groups_query(self) -> tuple[str, dict]: try: from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery except ImportError: @@ -449,7 +449,7 @@ def _get_groups_query(self) -> Tuple[str, Dict]: # We want to select events beyond the range of the recording to handle the case where # a recording spans the time boundaries @cached_property - def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: + def _get_events_timestamp_clause(self) -> tuple[str, dict[str, Any]]: timestamp_clause = "" timestamp_params = {} if self._filter.date_from: @@ -460,7 +460,7 @@ def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: timestamp_params["event_end_time"] = self._filter.date_to + timedelta(hours=12) return timestamp_clause, timestamp_params - def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any]]: + def get_query(self, select_event_ids: bool = False) -> tuple[str, dict[str, Any]]: if not self._determine_should_join_events(): return "", {} @@ -564,7 +564,7 @@ def _persons_join_or_subquery(self, event_filters, prop_query): return persons_join, persons_select_params, persons_sub_query @cached_property - def _get_person_id_clause(self) -> Tuple[str, Dict[str, Any]]: + def _get_person_id_clause(self) -> tuple[str, dict[str, Any]]: person_id_clause = "" person_id_params = {} if self._filter.person_uuid: @@ -572,7 +572,7 @@ def _get_person_id_clause(self) -> Tuple[str, Dict[str, Any]]: person_id_params = {"person_uuid": self._filter.person_uuid} return person_id_clause, person_id_params - def matching_events(self) -> List[str]: + def matching_events(self) -> list[str]: self._filter.hogql_context.modifiers.personsOnEventsMode = self._person_on_events_mode query, query_params = self.get_query(select_event_ids=True) query_results = sync_execute(query, {**query_params, **self._filter.hogql_context.values}) @@ -644,7 +644,7 @@ def ttl_days(self): """ @staticmethod - def _data_to_return(results: List[Any]) -> List[Dict[str, Any]]: + def _data_to_return(results: list[Any]) -> list[dict[str, Any]]: default_columns = [ "session_id", "team_id", @@ -694,7 +694,7 @@ def run(self) -> SessionRecordingQueryResult: def limit(self): return self._filter.limit or self.SESSION_RECORDINGS_DEFAULT_LIMIT - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: offset = self._filter.offset or 0 base_params = { @@ -758,7 +758,7 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: def duration_clause( self, duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"], - ) -> Tuple[str, Dict[str, Any]]: + ) -> tuple[str, dict[str, Any]]: duration_clause = "" duration_params = {} if self._filter.recording_duration_filter: @@ -775,7 +775,7 @@ def duration_clause( return duration_clause, duration_params @staticmethod - def _get_console_log_clause(console_logs_filter: List[Literal["error", "warn", "info"]]) -> str: + def _get_console_log_clause(console_logs_filter: list[Literal["error", "warn", "info"]]) -> str: # to avoid a CH migration we map from info to log when constructing the query here filters = [f"console_{'log' if log == 'info' else log}_count > 0" for log in console_logs_filter] return f"AND ({' OR '.join(filters)})" if filters else "" diff --git a/posthog/session_recordings/queries/session_recording_properties.py b/posthog/session_recordings/queries/session_recording_properties.py index e7c5544f14fe7..2d2ef187c0407 100644 --- a/posthog/session_recordings/queries/session_recording_properties.py +++ b/posthog/session_recordings/queries/session_recording_properties.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Tuple +from typing import TYPE_CHECKING, Any, NamedTuple from posthog.client import sync_execute from posthog.models.event.util import parse_properties @@ -14,12 +14,12 @@ class EventFiltersSQL(NamedTuple): aggregate_select_clause: str aggregate_having_clause: str where_conditions: str - params: Dict[str, Any] + params: dict[str, Any] class SessionRecordingProperties(EventQuery): _filter: SessionRecordingsFilter - _session_ids: List[str] + _session_ids: list[str] SESSION_RECORDING_PROPERTIES_ALLOWLIST = { "$os", @@ -47,7 +47,7 @@ class SessionRecordingProperties(EventQuery): GROUP BY session_id """ - def __init__(self, team: "Team", session_ids: List[str], filter: SessionRecordingsFilter): + def __init__(self, team: "Team", session_ids: list[str], filter: SessionRecordingsFilter): super().__init__(team=team, filter=filter) self._session_ids = sorted(session_ids) # Sort for stable queries @@ -56,7 +56,7 @@ def _determine_should_join_distinct_ids(self) -> None: # We want to select events beyond the range of the recording to handle the case where # a recording spans the time boundaries - def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: + def _get_events_timestamp_clause(self) -> tuple[str, dict[str, Any]]: timestamp_clause = "" timestamp_params = {} if self._filter.date_from: @@ -67,11 +67,11 @@ def _get_events_timestamp_clause(self) -> Tuple[str, Dict[str, Any]]: timestamp_params["event_end_time"] = self._filter.date_to + timedelta(hours=12) return timestamp_clause, timestamp_params - def format_session_recording_id_filters(self) -> Tuple[str, Dict]: + def format_session_recording_id_filters(self) -> tuple[str, dict]: where_conditions = "AND session_id IN %(session_ids)s" return where_conditions, {"session_ids": self._session_ids} - def get_query(self) -> Tuple[str, Dict[str, Any]]: + def get_query(self) -> tuple[str, dict[str, Any]]: base_params = {"team_id": self._team_id} ( events_timestamp_clause, @@ -90,7 +90,7 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: {**base_params, **events_timestamp_params, **session_ids_params}, ) - def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]: + def _data_to_return(self, results: list[Any]) -> list[dict[str, Any]]: return [ { "session_id": row[0], @@ -99,7 +99,7 @@ def _data_to_return(self, results: List[Any]) -> List[Dict[str, Any]]: for row in results ] - def run(self) -> List: + def run(self) -> list: query, query_params = self.get_query() query_results = sync_execute(query, query_params) session_recording_properties = self._data_to_return(query_results) diff --git a/posthog/session_recordings/queries/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py index f6ebb417df84b..1607aad167176 100644 --- a/posthog/session_recordings/queries/session_replay_events.py +++ b/posthog/session_recordings/queries/session_replay_events.py @@ -1,7 +1,9 @@ from datetime import datetime, timedelta -from typing import Optional, Tuple, List +from typing import Optional +import pytz from django.conf import settings +from django.core.cache import cache from posthog.clickhouse.client import sync_execute from posthog.cloud_utils import is_cloud @@ -15,26 +17,47 @@ ) +def seconds_until_midnight(): + now = datetime.now(pytz.timezone("UTC")) + midnight = (now + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) + difference = midnight - now + return difference.seconds + + class SessionReplayEvents: def exists(self, session_id: str, team: Team) -> bool: - # TODO we could cache this result when its result is True. + cache_key = f"summarize_recording_existence_team_{team.pk}_id_{session_id}" + cached_response = cache.get(cache_key) + if isinstance(cached_response, bool): + return cached_response + # Once we know that session exists we don't need to check again (until the end of the day since TTL might apply) + existence = self._check_exists_within_days(ttl_days(team), session_id, team) or self._check_exists_within_days( + 370, session_id, team + ) + + if existence: + # let's be cautious and not cache non-existence + # in case we manage to check existence just before the first event hits ClickHouse + # that should be impossible but cache invalidation is hard etc etc + cache.set(cache_key, existence, timeout=seconds_until_midnight()) + return existence + + @staticmethod + def _check_exists_within_days(days: int, session_id: str, team: Team) -> bool: result = sync_execute( """ - SELECT count(1) + SELECT count() FROM session_replay_events - WHERE team_id = %(team_id)s + PREWHERE team_id = %(team_id)s AND session_id = %(session_id)s - -- we should check for the `ttl_days(team)` TTL here, - -- but for a shared/pinned recording - -- the TTL effectively becomes 1 year - -- and we don't know which we're dealing with - AND min_first_timestamp >= now() - INTERVAL 370 DAY + AND min_first_timestamp >= now() - INTERVAL %(days)s DAY + AND min_first_timestamp <= now() """, { "team_id": team.pk, "session_id": session_id, - "recording_ttl_days": ttl_days(team), + "days": days, }, ) return result[0][0] > 0 @@ -58,7 +81,8 @@ def get_metadata( sum(active_milliseconds)/1000 as active_seconds, sum(console_log_count) as console_log_count, sum(console_warn_count) as console_warn_count, - sum(console_error_count) as console_error_count + sum(console_error_count) as console_error_count, + argMinMerge(snapshot_source) as snapshot_source FROM session_replay_events PREWHERE @@ -74,7 +98,7 @@ def get_metadata( ) ) - replay_response: List[Tuple] = sync_execute( + replay_response: list[tuple] = sync_execute( query, { "team_id": team.pk, @@ -102,11 +126,12 @@ def get_metadata( console_log_count=replay[9], console_warn_count=replay[10], console_error_count=replay[11], + snapshot_source=replay[12] or "web", ) def get_events( - self, session_id: str, team: Team, metadata: RecordingMetadata, events_to_ignore: List[str] | None - ) -> Tuple[List | None, List | None]: + self, session_id: str, team: Team, metadata: RecordingMetadata, events_to_ignore: list[str] | None + ) -> tuple[list | None, list | None]: from posthog.schema import HogQLQuery, HogQLQueryResponse from posthog.hogql_queries.hogql_query_runner import HogQLQueryRunner @@ -142,7 +167,6 @@ def get_events( def ttl_days(team: Team) -> int: - ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 if is_cloud(): # NOTE: We use Playlists as a proxy to see if they are subbed to Recordings is_paid = team.organization.is_feature_available(AvailableFeature.RECORDINGS_PLAYLISTS) @@ -153,5 +177,6 @@ def ttl_days(team: Team) -> int: if days_since_blob_ingestion < ttl_days: ttl_days = days_since_blob_ingestion - + else: + ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 return ttl_days diff --git a/posthog/session_recordings/queries/test/session_replay_sql.py b/posthog/session_recordings/queries/test/session_replay_sql.py index b72c64dbc0f68..fbec2ea065036 100644 --- a/posthog/session_recordings/queries/test/session_replay_sql.py +++ b/posthog/session_recordings/queries/test/session_replay_sql.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Optional, List, Dict +from typing import Optional from uuid import uuid4 from dateutil.parser import parse @@ -113,7 +113,7 @@ def produce_replay_summary( console_log_count: Optional[int] = None, console_warn_count: Optional[int] = None, console_error_count: Optional[int] = None, - log_messages: Dict[str, List[str]] | None = None, + log_messages: dict[str, list[str]] | None = None, snapshot_source: str | None = None, ): if log_messages is None: diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py index 1af1554415de3..5abfc3727fe7f 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Dict from uuid import uuid4 from dateutil.relativedelta import relativedelta @@ -76,7 +75,7 @@ def create_event( properties=properties, ) - def _filter_recordings_by(self, recordings_filter: Dict) -> SessionRecordingQueryResult: + def _filter_recordings_by(self, recordings_filter: dict) -> SessionRecordingQueryResult: the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=the_filter, team=self.team) return session_recording_list_instance.run() diff --git a/posthog/session_recordings/queries/test/test_session_replay_events.py b/posthog/session_recordings/queries/test/test_session_replay_events.py index 04393f8500c07..d6b80f208edb2 100644 --- a/posthog/session_recordings/queries/test/test_session_replay_events.py +++ b/posthog/session_recordings/queries/test/test_session_replay_events.py @@ -52,6 +52,7 @@ def test_get_metadata(self) -> None: "keypress_count": 2, "mouse_activity_count": 2, "start_time": self.base_time, + "snapshot_source": "web", } def test_get_nonexistent_metadata(self) -> None: diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py index d6890c63517e1..8e943db34a600 100644 --- a/posthog/session_recordings/realtime_snapshots.py +++ b/posthog/session_recordings/realtime_snapshots.py @@ -1,6 +1,6 @@ import json from time import sleep -from typing import Dict, List, Optional +from typing import Optional import structlog from prometheus_client import Counter @@ -54,7 +54,7 @@ def publish_subscription(team_id: str, session_id: str) -> None: raise e -def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Optional[List[Dict]]: +def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Optional[list[dict]]: try: redis = get_client(settings.SESSION_RECORDING_REDIS_URL) key = get_key(team_id, session_id) diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index 9249d92ae1ccb..6d4a638b4d051 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -1,9 +1,9 @@ import os import time from datetime import datetime, timedelta, timezone - +from prometheus_client import Histogram import json -from typing import Any, List, Type, cast, Dict, Tuple +from typing import Any, cast from django.conf import settings @@ -50,9 +50,7 @@ from ee.session_recordings.session_summary.summarize_session import summarize_recording from ee.session_recordings.ai.similar_recordings import similar_recordings from ee.session_recordings.ai.error_clustering import error_clustering -from posthog.session_recordings.snapshots.convert_legacy_snapshots import ( - convert_original_version_lts_recording, -) +from posthog.session_recordings.snapshots.convert_legacy_snapshots import convert_original_version_lts_recording from posthog.storage import object_storage from prometheus_client import Counter @@ -63,6 +61,21 @@ labelnames=["source"], ) +GENERATE_PRE_SIGNED_URL_HISTOGRAM = Histogram( + "session_snapshots_generate_pre_signed_url_histogram", + "Time taken to generate a pre-signed URL for a session snapshot", +) + +GET_REALTIME_SNAPSHOTS_FROM_REDIS = Histogram( + "session_snapshots_get_realtime_snapshots_from_redis_histogram", + "Time taken to get realtime snapshots from Redis", +) + +STREAM_RESPONSE_TO_CLIENT_HISTOGRAM = Histogram( + "session_snapshots_stream_response_to_client_histogram", + "Time taken to stream a session snapshot to the client", +) + class SurrogatePairSafeJSONEncoder(JSONEncoder): def encode(self, o): @@ -134,6 +147,7 @@ class Meta: "start_url", "person", "storage", + "snapshot_source", ] read_only_fields = [ @@ -153,6 +167,7 @@ class Meta: "console_error_count", "start_url", "storage", + "snapshot_source", ] @@ -189,7 +204,7 @@ class SessionRecordingSnapshotsSerializer(serializers.Serializer): def list_recordings_response( - filter: SessionRecordingsFilter, request: request.Request, serializer_context: Dict[str, Any] + filter: SessionRecordingsFilter, request: request.Request, serializer_context: dict[str, Any] ) -> Response: (recordings, timings) = list_recordings(filter, request, context=serializer_context) response = Response(recordings) @@ -209,7 +224,7 @@ class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet): sharing_enabled_actions = ["retrieve", "snapshots", "snapshot_file"] - def get_serializer_class(self) -> Type[serializers.Serializer]: + def get_serializer_class(self) -> type[serializers.Serializer]: if isinstance(self.request.successful_authenticator, SharingAccessTokenAuthentication): return SessionRecordingSharedSerializer else: @@ -250,7 +265,7 @@ def matching_events(self, request: request.Request, *args: Any, **kwargs: Any) - "Must specify at least one event or action filter", ) - matching_events: List[str] = SessionIdEventsQuery(filter=filter, team=self.team).matching_events() + matching_events: list[str] = SessionIdEventsQuery(filter=filter, team=self.team).matching_events() return JsonResponse(data={"results": matching_events}) # Returns metadata about the recording @@ -340,9 +355,9 @@ def snapshots(self, request: request.Request, **kwargs): SNAPSHOT_SOURCE_REQUESTED.labels(source=source).inc() if not source: - sources: List[dict] = [] + sources: list[dict] = [] - blob_keys: List[str] | None = None + blob_keys: list[str] | None = None if recording.object_storage_path: if recording.storage_version == "2023-08-01": blob_prefix = recording.object_storage_path @@ -405,7 +420,8 @@ def snapshots(self, request: request.Request, **kwargs): response_data["sources"] = sources elif source == "realtime": - snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=str(recording.session_id)) or [] + with GET_REALTIME_SNAPSHOTS_FROM_REDIS.time(): + snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=str(recording.session_id)) or [] event_properties["source"] = "realtime" event_properties["snapshots_length"] = len(snapshots) @@ -418,36 +434,7 @@ def snapshots(self, request: request.Request, **kwargs): response_data["snapshots"] = snapshots elif source == "blob": - blob_key = request.GET.get("blob_key", "") - self._validate_blob_key(blob_key) - - # very short-lived pre-signed URL - if recording.object_storage_path: - if recording.storage_version == "2023-08-01": - file_key = f"{recording.object_storage_path}/{blob_key}" - else: - # this is a legacy recording, we need to load the file from the old path - file_key = convert_original_version_lts_recording(recording) - else: - blob_prefix = settings.OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER - file_key = f"{blob_prefix}/team_id/{self.team.pk}/session_id/{recording.session_id}/data/{blob_key}" - url = object_storage.get_presigned_url(file_key, expiration=60) - if not url: - raise exceptions.NotFound("Snapshot file not found") - - event_properties["source"] = "blob" - event_properties["blob_key"] = blob_key - posthoganalytics.capture( - self._distinct_id_from_request(request), - "session recording snapshots v2 loaded", - event_properties, - ) - - with requests.get(url=url, stream=True) as r: - r.raise_for_status() - response = HttpResponse(content=r.raw, content_type="application/json") - response["Content-Disposition"] = "inline" - return response + return self._stream_blob_to_client(recording, request, event_properties) else: raise exceptions.ValidationError("Invalid source must be one of [realtime, blob]") @@ -599,10 +586,46 @@ def error_clusters(self, request: request.Request, **kwargs): r = Response(clusters, headers={"Cache-Control": "max-age=15"}) return r + def _stream_blob_to_client( + self, recording: SessionRecording, request: request.Request, event_properties: dict + ) -> HttpResponse: + blob_key = request.GET.get("blob_key", "") + self._validate_blob_key(blob_key) + + # very short-lived pre-signed URL + with GENERATE_PRE_SIGNED_URL_HISTOGRAM.time(): + if recording.object_storage_path: + if recording.storage_version == "2023-08-01": + file_key = f"{recording.object_storage_path}/{blob_key}" + else: + # this is a legacy recording, we need to load the file from the old path + file_key = convert_original_version_lts_recording(recording) + else: + blob_prefix = settings.OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER + file_key = f"{blob_prefix}/team_id/{self.team.pk}/session_id/{recording.session_id}/data/{blob_key}" + url = object_storage.get_presigned_url(file_key, expiration=60) + if not url: + raise exceptions.NotFound("Snapshot file not found") + + event_properties["source"] = "blob" + event_properties["blob_key"] = blob_key + posthoganalytics.capture( + self._distinct_id_from_request(request), + "session recording snapshots v2 loaded", + event_properties, + ) + + with STREAM_RESPONSE_TO_CLIENT_HISTOGRAM.time(): + with requests.get(url=url, stream=True) as r: + r.raise_for_status() + response = HttpResponse(content=r.raw, content_type="application/json") + response["Content-Disposition"] = "inline" + return response + def list_recordings( - filter: SessionRecordingsFilter, request: request.Request, context: Dict[str, Any] -) -> Tuple[Dict, Dict]: + filter: SessionRecordingsFilter, request: request.Request, context: dict[str, Any] +) -> tuple[dict, dict]: """ As we can store recordings in S3 or in Clickhouse we need to do a few things here @@ -615,7 +638,7 @@ def list_recordings( all_session_ids = filter.session_ids - recordings: List[SessionRecording] = [] + recordings: list[SessionRecording] = [] more_recordings_available = False team = context["get_team"]() @@ -653,7 +676,7 @@ def list_recordings( if all_session_ids: recordings = sorted( recordings, - key=lambda x: cast(List[str], all_session_ids).index(x.session_id), + key=lambda x: cast(list[str], all_session_ids).index(x.session_id), ) if not request.user.is_authenticated: # for mypy @@ -681,7 +704,9 @@ def list_recordings( for recording in recordings: recording.viewed = recording.session_id in viewed_session_recordings - recording.person = distinct_id_to_person.get(recording.distinct_id) + person = distinct_id_to_person.get(recording.distinct_id) + if person: + recording.person = person session_recording_serializer = SessionRecordingSerializer(recordings, context=context, many=True) results = session_recording_serializer.data diff --git a/posthog/session_recordings/session_recording_helpers.py b/posthog/session_recordings/session_recording_helpers.py index 1eccc2be26e32..8dfb1c0ad2396 100644 --- a/posthog/session_recordings/session_recording_helpers.py +++ b/posthog/session_recordings/session_recording_helpers.py @@ -3,7 +3,8 @@ import json from collections import defaultdict from datetime import datetime, timezone -from typing import Any, Callable, Dict, Generator, List, Tuple +from typing import Any +from collections.abc import Callable, Generator from dateutil.parser import parse from prometheus_client import Counter @@ -89,10 +90,10 @@ class RRWEB_MAP_EVENT_DATA_TYPE: ] -Event = Dict[str, Any] +Event = dict[str, Any] -def split_replay_events(events: List[Event]) -> Tuple[List[Event], List[Event]]: +def split_replay_events(events: list[Event]) -> tuple[list[Event], list[Event]]: replay, other = [], [] for event in events: @@ -102,12 +103,12 @@ def split_replay_events(events: List[Event]) -> Tuple[List[Event], List[Event]]: # TODO is this covered by enough tests post-blob ingester rollout -def preprocess_replay_events_for_blob_ingestion(events: List[Event], max_size_bytes=1024 * 1024) -> List[Event]: +def preprocess_replay_events_for_blob_ingestion(events: list[Event], max_size_bytes=1024 * 1024) -> list[Event]: return _process_windowed_events(events, lambda x: preprocess_replay_events(x, max_size_bytes=max_size_bytes)) def preprocess_replay_events( - _events: List[Event] | Generator[Event, None, None], max_size_bytes=1024 * 1024 + _events: list[Event] | Generator[Event, None, None], max_size_bytes=1024 * 1024 ) -> Generator[Event, None, None]: """ The events going to blob ingestion are uncompressed (the compression happens in the Kafka producer) @@ -135,7 +136,7 @@ def preprocess_replay_events( window_id = events[0]["properties"].get("$window_id") snapshot_source = events[0]["properties"].get("$snapshot_source", "web") - def new_event(items: List[dict] | None = None) -> Event: + def new_event(items: list[dict] | None = None) -> Event: return { **events[0], "event": "$snapshot_items", # New event name to avoid confusion with the old $snapshot event @@ -151,7 +152,7 @@ def new_event(items: List[dict] | None = None) -> Event: # 1. Group by $snapshot_bytes if any of the events have it if events[0]["properties"].get("$snapshot_bytes"): - current_event: Dict | None = None + current_event: dict | None = None current_event_size = 0 for event in events: @@ -208,13 +209,13 @@ def new_event(items: List[dict] | None = None) -> Event: def _process_windowed_events( - events: List[Event], fn: Callable[[List[Any]], Generator[Event, None, None]] -) -> List[Event]: + events: list[Event], fn: Callable[[list[Any]], Generator[Event, None, None]] +) -> list[Event]: """ Helper method to simplify grouping events by window_id and session_id, processing them with the given function, and then returning the flattened list """ - result: List[Event] = [] + result: list[Event] = [] snapshots_by_session_and_window_id = defaultdict(list) for event in events: @@ -228,7 +229,7 @@ def _process_windowed_events( return result -def is_unprocessed_snapshot_event(event: Dict) -> bool: +def is_unprocessed_snapshot_event(event: dict) -> bool: try: is_snapshot = event["event"] == "$snapshot" except KeyError: @@ -274,5 +275,5 @@ def convert_to_timestamp(source: str) -> int: return int(parse(source).timestamp() * 1000) -def byte_size_dict(x: Dict | List) -> int: +def byte_size_dict(x: dict | list) -> int: return len(json.dumps(x)) diff --git a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py index 963016d0e869a..d2d4ba2c4b4bd 100644 --- a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py +++ b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py @@ -1,5 +1,4 @@ import json -from typing import Dict import structlog from prometheus_client import Histogram @@ -67,7 +66,7 @@ def _prepare_legacy_content(content: str) -> str: return _convert_legacy_format_from_lts_storage(json_content) -def _convert_legacy_format_from_lts_storage(lts_formatted_data: Dict) -> str: +def _convert_legacy_format_from_lts_storage(lts_formatted_data: dict) -> str: """ The latest version is JSONL formatted data. Each line is json containing a window_id and a data array. diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index 002cb37519f7f..3287735b9a436 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -27,6 +27,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -87,6 +88,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -147,6 +149,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -207,6 +210,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -267,6 +271,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -358,6 +363,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -487,6 +493,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -657,6 +664,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -782,30 +790,6 @@ AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_get_session_recordings.36 - ''' - SELECT "posthog_persondistinctid"."id", - "posthog_persondistinctid"."team_id", - "posthog_persondistinctid"."person_id", - "posthog_persondistinctid"."distinct_id", - "posthog_persondistinctid"."version", - "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_persondistinctid" - INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") - WHERE ("posthog_persondistinctid"."distinct_id" IN ('user2', - 'user_one_0') - AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- # name: TestSessionRecordings.test_get_session_recordings.4 ''' SELECT "posthog_team"."id", @@ -834,6 +818,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -894,6 +879,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -954,6 +940,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1014,6 +1001,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1074,6 +1062,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1134,6 +1123,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1225,6 +1215,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1474,6 +1465,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1565,6 +1557,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1914,6 +1907,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2005,6 +1999,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2356,6 +2351,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2458,6 +2454,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2811,6 +2808,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -2902,6 +2900,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3286,6 +3285,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3377,6 +3377,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3473,6 +3474,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3803,6 +3805,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3894,6 +3897,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -3947,6 +3951,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4299,130 +4304,6 @@ AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.251 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.252 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.253 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.254 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.255 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.256 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.257 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.258 - ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.259 - ''' - SELECT "posthog_sessionrecording"."id", - "posthog_sessionrecording"."session_id", - "posthog_sessionrecording"."team_id", - "posthog_sessionrecording"."created_at", - "posthog_sessionrecording"."deleted", - "posthog_sessionrecording"."object_storage_path", - "posthog_sessionrecording"."distinct_id", - "posthog_sessionrecording"."duration", - "posthog_sessionrecording"."active_seconds", - "posthog_sessionrecording"."inactive_seconds", - "posthog_sessionrecording"."start_time", - "posthog_sessionrecording"."end_time", - "posthog_sessionrecording"."click_count", - "posthog_sessionrecording"."keypress_count", - "posthog_sessionrecording"."mouse_activity_count", - "posthog_sessionrecording"."console_log_count", - "posthog_sessionrecording"."console_warn_count", - "posthog_sessionrecording"."console_error_count", - "posthog_sessionrecording"."start_url", - "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '10', - '2', - '3', - '4', - '5', - '6', - '7', - '8', - '9') - AND "posthog_sessionrecording"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.26 ''' SELECT "posthog_instancesetting"."id", @@ -4434,46 +4315,6 @@ LIMIT 1 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.260 - ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.261 - ''' - SELECT "posthog_persondistinctid"."id", - "posthog_persondistinctid"."team_id", - "posthog_persondistinctid"."person_id", - "posthog_persondistinctid"."distinct_id", - "posthog_persondistinctid"."version", - "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_persondistinctid" - INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") - WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', - 'user10', - 'user2', - 'user3', - 'user4', - 'user5', - 'user6', - 'user7', - 'user8', - 'user9') - AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ''' -# --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.27 ''' SELECT "posthog_instancesetting"."id", @@ -4736,6 +4577,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -4827,6 +4669,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5170,6 +5013,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5261,6 +5105,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5617,6 +5462,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -5708,6 +5554,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/session_recordings/test/test_lts_session_recordings.py b/posthog/session_recordings/test/test_lts_session_recordings.py index 7d60d07defb2c..bd6dfc39d246d 100644 --- a/posthog/session_recordings/test/test_lts_session_recordings.py +++ b/posthog/session_recordings/test/test_lts_session_recordings.py @@ -1,5 +1,4 @@ import uuid -from typing import List from unittest.mock import patch, MagicMock, call, Mock from rest_framework import status @@ -32,7 +31,7 @@ def test_2023_08_01_version_stored_snapshots_can_be_gathered( session_id = str(uuid.uuid4()) lts_storage_path = "purposefully/not/what/we/would/calculate/to/prove/this/is/used" - def list_objects_func(path: str) -> List[str]: + def list_objects_func(path: str) -> list[str]: # this mock simulates a recording whose blob storage has been deleted by TTL # but which has been stored in LTS blob storage if path == lts_storage_path: @@ -88,7 +87,7 @@ def test_original_version_stored_snapshots_can_be_gathered( session_id = str(uuid.uuid4()) lts_storage_path = "1234-5678" - def list_objects_func(_path: str) -> List[str]: + def list_objects_func(_path: str) -> list[str]: return [] mock_list_objects.side_effect = list_objects_func @@ -138,7 +137,7 @@ def test_2023_08_01_version_stored_snapshots_can_be_loaded( session_id = str(uuid.uuid4()) lts_storage_path = "purposefully/not/what/we/would/calculate/to/prove/this/is/used" - def list_objects_func(path: str) -> List[str]: + def list_objects_func(path: str) -> list[str]: # this mock simulates a recording whose blob storage has been deleted by TTL # but which has been stored in LTS blob storage if path == lts_storage_path: @@ -208,7 +207,7 @@ def test_original_version_stored_snapshots_can_be_loaded_without_upversion( session_id = str(uuid.uuid4()) lts_storage_path = "1234-5678" - def list_objects_func(path: str) -> List[str]: + def list_objects_func(path: str) -> list[str]: return [] mock_list_objects.side_effect = list_objects_func diff --git a/posthog/session_recordings/test/test_session_recording_helpers.py b/posthog/session_recordings/test/test_session_recording_helpers.py index b6b83e02c28d9..a13b131fb3160 100644 --- a/posthog/session_recordings/test/test_session_recording_helpers.py +++ b/posthog/session_recordings/test/test_session_recording_helpers.py @@ -3,7 +3,7 @@ import random import string from datetime import datetime -from typing import Any, List, Tuple +from typing import Any import pytest from pytest_mock import MockerFixture @@ -27,7 +27,7 @@ def create_activity_data(timestamp: datetime, is_active: bool): ) -def mock_capture_flow(events: List[dict], max_size_bytes=512 * 1024) -> Tuple[List[dict], List[dict]]: +def mock_capture_flow(events: list[dict], max_size_bytes=512 * 1024) -> tuple[list[dict], list[dict]]: """ Returns the legacy events and the new flow ones """ @@ -422,7 +422,7 @@ def test_new_ingestion_groups_using_snapshot_bytes_if_possible(raw_snapshot_even "something": "small", } - events: List[Any] = [ + events: list[Any] = [ { "event": "$snapshot", "properties": { diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 12085f55925eb..2038b39cacb2b 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -1,7 +1,6 @@ import time import uuid from datetime import datetime, timedelta, timezone -from typing import List from unittest.mock import ANY, patch, MagicMock, call from urllib.parse import urlencode @@ -42,7 +41,7 @@ def setUp(self): # TODO this is pretty slow, we should change assertions so that we don't need it self.team = Team.objects.create(organization=self.organization, name="New Team") - def create_snapshot( + def produce_replay_summary( self, distinct_id, session_id, @@ -65,7 +64,7 @@ def create_snapshot( # because we use `now()` in the CH queries which don't know about any frozen time # @snapshot_clickhouse_queries def test_get_session_recordings(self): - twelve_distinct_ids: List[str] = [f"user_one_{i}" for i in range(12)] + twelve_distinct_ids: list[str] = [f"user_one_{i}" for i in range(12)] user = Person.objects.create( team=self.team, @@ -80,11 +79,11 @@ def test_get_session_recordings(self): base_time = (now() - relativedelta(days=1)).replace(microsecond=0) session_id_one = f"test_get_session_recordings-1" - self.create_snapshot("user_one_0", session_id_one, base_time) - self.create_snapshot("user_one_0", session_id_one, base_time + relativedelta(seconds=10)) - self.create_snapshot("user_one_0", session_id_one, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("user_one_0", session_id_one, base_time) + self.produce_replay_summary("user_one_0", session_id_one, base_time + relativedelta(seconds=10)) + self.produce_replay_summary("user_one_0", session_id_one, base_time + relativedelta(seconds=30)) session_id_two = f"test_get_session_recordings-2" - self.create_snapshot("user2", session_id_two, base_time + relativedelta(seconds=20)) + self.produce_replay_summary("user2", session_id_two, base_time + relativedelta(seconds=20)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -132,7 +131,7 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self # almost duplicate of test_get_session_recordings above # but if we have multiple distinct ids on a recording the snapshot # varies which makes the snapshot useless - twelve_distinct_ids: List[str] = [f"user_one_{i}" for i in range(12)] + twelve_distinct_ids: list[str] = [f"user_one_{i}" for i in range(12)] Person.objects.create( team=self.team, @@ -147,11 +146,11 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self base_time = (now() - relativedelta(days=1)).replace(microsecond=0) session_id_one = f"test_get_session_recordings-1" - self.create_snapshot("user_one_0", session_id_one, base_time) - self.create_snapshot("user_one_1", session_id_one, base_time + relativedelta(seconds=10)) - self.create_snapshot("user_one_2", session_id_one, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("user_one_0", session_id_one, base_time) + self.produce_replay_summary("user_one_1", session_id_one, base_time + relativedelta(seconds=10)) + self.produce_replay_summary("user_one_2", session_id_one, base_time + relativedelta(seconds=30)) session_id_two = f"test_get_session_recordings-2" - self.create_snapshot("user2", session_id_two, base_time + relativedelta(seconds=20)) + self.produce_replay_summary("user2", session_id_two, base_time + relativedelta(seconds=20)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -201,8 +200,8 @@ def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user", distinct_ids=[distinct_id], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot(distinct_id, session_id, base_time) - self.create_snapshot(distinct_id, session_id, base_time + relativedelta(seconds=10)) + self.produce_replay_summary(distinct_id, session_id, base_time) + self.produce_replay_summary(distinct_id, session_id, base_time + relativedelta(seconds=10)) flush_persons_and_events() def test_session_recordings_dont_leak_teams(self) -> None: @@ -219,8 +218,8 @@ def test_session_recordings_dont_leak_teams(self) -> None: ) base_time = (now() - relativedelta(days=1)).replace(microsecond=0) - self.create_snapshot("user", "1", base_time, team_id=another_team.pk) - self.create_snapshot("user", "2", base_time) + self.produce_replay_summary("user", "1", base_time, team_id=another_team.pk) + self.produce_replay_summary("user", "2", base_time) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -235,8 +234,8 @@ def test_session_recording_for_user_with_multiple_distinct_ids(self) -> None: distinct_ids=["d1", "d2"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot("d1", "1", base_time) - self.create_snapshot("d2", "2", base_time + relativedelta(seconds=30)) + self.produce_replay_summary("d1", "1", base_time) + self.produce_replay_summary("d2", "2", base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() @@ -251,8 +250,8 @@ def test_viewed_state_of_session_recording_version_1(self): ) base_time = (now() - timedelta(days=1)).replace(microsecond=0) SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id="1") - self.create_snapshot("u1", "1", base_time) - self.create_snapshot("u1", "2", base_time + relativedelta(seconds=30)) + self.produce_replay_summary("u1", "1", base_time) + self.produce_replay_summary("u1", "2", base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() self.assertEqual(len(response_data["results"]), 2) @@ -272,8 +271,8 @@ def test_viewed_state_of_session_recording_version_3(self): session_id_two = "2" SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id=session_id_one) - self.create_snapshot("u1", session_id_one, base_time) - self.create_snapshot("u1", session_id_two, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("u1", session_id_one, base_time) + self.produce_replay_summary("u1", session_id_two, base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() @@ -383,11 +382,12 @@ def test_get_single_session_recording_metadata(self): "uuid": ANY, }, "storage": "object_storage", + "snapshot_source": "web", } def test_single_session_recording_doesnt_leak_teams(self): another_team = Team.objects.create(organization=self.organization) - self.create_snapshot( + self.produce_replay_summary( "user", "id_no_team_leaking", now() - relativedelta(days=1), @@ -410,7 +410,18 @@ def test_session_recording_with_no_person(self): response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/id_no_person") response_data = response.json() - self.assertEqual(response_data["person"], None) + + self.assertEqual( + response_data["person"], + { + "id": None, + "name": None, + "distinct_ids": ["d1"], + "properties": {}, + "created_at": None, + "uuid": response_data["person"]["uuid"], + }, + ) def test_session_recording_doesnt_exist(self): response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/non_existent_id") @@ -422,7 +433,7 @@ def test_session_recording_doesnt_exist(self): def test_request_to_another_teams_endpoint_returns_401(self): org = Organization.objects.create(name="Separate Org") another_team = Team.objects.create(organization=org) - self.create_snapshot( + self.produce_replay_summary( "user", "id_no_team_leaking", now() - relativedelta(days=1), @@ -444,17 +455,17 @@ def test_session_ids_filter(self, use_recording_events: bool, api_version: int): distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot( + self.produce_replay_summary( "user", "1", now() - relativedelta(days=1), ) - self.create_snapshot( + self.produce_replay_summary( "user", "2", now() - relativedelta(days=2), ) - self.create_snapshot( + self.produce_replay_summary( "user", "3", now() - relativedelta(days=3), @@ -478,9 +489,9 @@ def test_empty_list_session_ids_filter_returns_no_recordings(self): distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot("user", "1", now() - relativedelta(days=1)) - self.create_snapshot("user", "2", now() - relativedelta(days=2)) - self.create_snapshot("user", "3", now() - relativedelta(days=3)) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1)) + self.produce_replay_summary("user", "2", now() - relativedelta(days=2)) + self.produce_replay_summary("user", "3", now() - relativedelta(days=3)) # Fetch playlist params_string = urlencode({"session_ids": "[]"}) @@ -491,7 +502,7 @@ def test_empty_list_session_ids_filter_returns_no_recordings(self): self.assertEqual(len(response_data["results"]), 0) def test_delete_session_recording(self): - self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) response = self.client.delete(f"/api/projects/{self.team.id}/session_recordings/1") self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) # Trying to delete same recording again returns 404 @@ -503,7 +514,7 @@ def test_delete_session_recording(self): return_value=2, ) def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None: - self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/1") assert response.status_code == status.HTTP_200_OK @@ -576,7 +587,7 @@ def test_get_snapshots_v2_from_lts(self, mock_list_objects: MagicMock, _mock_exi object_storage_path="an lts stored object path", ) - def list_objects_func(path: str) -> List[str]: + def list_objects_func(path: str) -> list[str]: # this mock simulates a recording whose blob storage has been deleted by TTL # but which has been stored in LTS blob storage if path == "an lts stored object path": @@ -827,7 +838,7 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None: session_id = str(uuid.uuid4()) with freeze_time("2023-01-01T12:00:00Z"): - self.create_snapshot( + self.produce_replay_summary( "user", session_id, now() - relativedelta(days=1), @@ -866,10 +877,12 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None: } # now create a snapshot record that doesn't have a fixed date, as it needs to be within TTL for the request below to complete - self.create_snapshot( + self.produce_replay_summary( "user", session_id, - now(), + # a little before now, since the DB checks if the snapshot is within TTL and before now + # if the test runs too quickly it looks like the snapshot is not there + now() - relativedelta(seconds=1), team_id=self.team.pk, ) @@ -936,7 +949,7 @@ def test_get_matching_events(self) -> None: # the matching session session_id = f"test_get_matching_events-1-{uuid.uuid4()}" - self.create_snapshot("user", session_id, base_time) + self.produce_replay_summary("user", session_id, base_time) event_id = _create_event( event="$pageview", properties={"$session_id": session_id}, @@ -946,7 +959,7 @@ def test_get_matching_events(self) -> None: # a non-matching session non_matching_session_id = f"test_get_matching_events-2-{uuid.uuid4()}" - self.create_snapshot("user", non_matching_session_id, base_time) + self.produce_replay_summary("user", non_matching_session_id, base_time) _create_event( event="$pageview", properties={"$session_id": non_matching_session_id}, diff --git a/posthog/settings/__init__.py b/posthog/settings/__init__.py index 455b7e8dc34a1..faf2e466764d2 100644 --- a/posthog/settings/__init__.py +++ b/posthog/settings/__init__.py @@ -13,7 +13,6 @@ # isort: skip_file import os -from typing import Dict, List # :TRICKY: Imported before anything else to support overloads from posthog.settings.overrides import * @@ -68,7 +67,7 @@ DISABLE_MMDB = get_from_env( "DISABLE_MMDB", TEST, type_cast=str_to_bool ) # plugin server setting disabling GeoIP feature -PLUGINS_PREINSTALLED_URLS: List[str] = ( +PLUGINS_PREINSTALLED_URLS: list[str] = ( os.getenv( "PLUGINS_PREINSTALLED_URLS", "https://www.npmjs.com/package/@posthog/geoip-plugin", @@ -100,7 +99,7 @@ # Wether to use insight queries converted to HogQL. HOGQL_INSIGHTS_OVERRIDE = get_from_env("HOGQL_INSIGHTS_OVERRIDE", optional=True, type_cast=str_to_bool) -HOOK_EVENTS: Dict[str, str] = {} +HOOK_EVENTS: dict[str, str] = {} # Support creating multiple organizations in a single instance. Requires a premium license. MULTI_ORG_ENABLED = get_from_env("MULTI_ORG_ENABLED", False, type_cast=str_to_bool) diff --git a/posthog/settings/data_stores.py b/posthog/settings/data_stores.py index f3402a748111f..d175f04f07c2a 100644 --- a/posthog/settings/data_stores.py +++ b/posthog/settings/data_stores.py @@ -1,6 +1,5 @@ import json import os -from typing import List from urllib.parse import urlparse import dj_database_url @@ -173,7 +172,7 @@ def postgres_config(host: str) -> dict: READONLY_CLICKHOUSE_PASSWORD = os.getenv("READONLY_CLICKHOUSE_PASSWORD", None) -def _parse_kafka_hosts(hosts_string: str) -> List[str]: +def _parse_kafka_hosts(hosts_string: str) -> list[str]: hosts = [] for host in hosts_string.split(","): if "://" in host: diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py index 8a3a24d981ce0..7f6296e559114 100644 --- a/posthog/settings/ingestion.py +++ b/posthog/settings/ingestion.py @@ -17,6 +17,9 @@ # partitioning-related settings below. CAPTURE_ALLOW_RANDOM_PARTITIONING = get_from_env("CAPTURE_ALLOW_RANDOM_PARTITIONING", True, type_cast=str_to_bool) +# TOOD: make default after rollout on both prods: remove the superfluous hashing of the Kafka message key +CAPTURE_SKIP_KEY_HASHING = get_from_env("CAPTURE_SKIP_KEY_HASHING", type_cast=bool, default=False) + # A list of pairs (in the format 2:myLovelyId) that we should use # random partitioning for when producing events to the Kafka topic consumed by the plugin server. # This is a measure to handle hot partitions in ad-hoc cases. diff --git a/posthog/settings/logs.py b/posthog/settings/logs.py index 8f41f3e6c21e6..f8f21294e37a3 100644 --- a/posthog/settings/logs.py +++ b/posthog/settings/logs.py @@ -1,7 +1,6 @@ import logging import os import threading -from typing import List import structlog @@ -27,7 +26,7 @@ def add_pid_and_tid( # To enable standard library logs to be formatted via structlog, we add this # `foreign_pre_chain` to both formatters. -foreign_pre_chain: List[structlog.types.Processor] = [ +foreign_pre_chain: list[structlog.types.Processor] = [ structlog.contextvars.merge_contextvars, structlog.processors.TimeStamper(fmt="iso"), structlog.stdlib.add_logger_name, diff --git a/posthog/settings/sentry.py b/posthog/settings/sentry.py index 0d3fcee485506..d279af33c6a94 100644 --- a/posthog/settings/sentry.py +++ b/posthog/settings/sentry.py @@ -9,6 +9,7 @@ from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration +from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration from posthog.git import get_git_commit_full from posthog.settings import get_from_env @@ -141,8 +142,6 @@ def traces_sampler(sampling_context: dict) -> float: def sentry_init() -> None: if not TEST and os.getenv("SENTRY_DSN"): - sentry_sdk.utils.MAX_STRING_LENGTH = 10_000_000 - # Setting this on enables more visibility, at the risk of capturing personal information we should not: # - standard sentry "client IP" field, through send_default_pii # - django access logs (info level) @@ -151,7 +150,6 @@ def sentry_init() -> None: send_pii = get_from_env("SENTRY_SEND_PII", type_cast=bool, default=False) sentry_logging_level = logging.INFO if send_pii else logging.ERROR - sentry_logging = LoggingIntegration(level=sentry_logging_level, event_level=None) profiles_sample_rate = get_from_env("SENTRY_PROFILES_SAMPLE_RATE", type_cast=float, default=0.0) release = get_git_commit_full() @@ -164,9 +162,11 @@ def sentry_init() -> None: DjangoIntegration(), CeleryIntegration(), RedisIntegration(), - sentry_logging, + ClickhouseDriverIntegration(), + LoggingIntegration(level=sentry_logging_level, event_level=None), ], - request_bodies="always" if send_pii else "never", + max_request_body_size="always" if send_pii else "never", + max_value_length=8192, # Increased from the default of 1024 to capture SQL statements in full sample_rate=1.0, # Configures the sample rate for error events, in the range of 0.0 to 1.0 (default). # If set to 0.1 only 10% of error events will be sent. Events are picked randomly. diff --git a/posthog/settings/session_replay.py b/posthog/settings/session_replay.py index 4cd8a429aa028..429f3207dccf7 100644 --- a/posthog/settings/session_replay.py +++ b/posthog/settings/session_replay.py @@ -1,5 +1,3 @@ -from typing import List - from posthog.settings import get_from_env, get_list from posthog.utils import str_to_bool @@ -18,7 +16,7 @@ "REALTIME_SNAPSHOTS_FROM_REDIS_ATTEMPT_TIMEOUT_SECONDS", 0.2, type_cast=float ) -REPLAY_EMBEDDINGS_ALLOWED_TEAMS: List[str] = get_list(get_from_env("REPLAY_EMBEDDINGS_ALLOWED_TEAM", "", type_cast=str)) +REPLAY_EMBEDDINGS_ALLOWED_TEAMS: list[str] = get_list(get_from_env("REPLAY_EMBEDDINGS_ALLOWED_TEAM", "", type_cast=str)) REPLAY_EMBEDDINGS_BATCH_SIZE = get_from_env("REPLAY_EMBEDDINGS_BATCH_SIZE", 10, type_cast=int) REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS = get_from_env("REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS", 30, type_cast=int) REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS = get_from_env( diff --git a/posthog/settings/temporal.py b/posthog/settings/temporal.py index ccb5fbfb0db3f..ce0e72172eabb 100644 --- a/posthog/settings/temporal.py +++ b/posthog/settings/temporal.py @@ -1,5 +1,4 @@ import os -from typing import Dict from posthog.settings.utils import get_list, get_from_env @@ -24,6 +23,6 @@ CLICKHOUSE_MAX_EXECUTION_TIME = get_from_env("CLICKHOUSE_MAX_EXECUTION_TIME", 0, type_cast=int) CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT = get_from_env("CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT", 10000, type_cast=int) # Comma separated list of overrides in the format "team_id:block_size" -CLICKHOUSE_MAX_BLOCK_SIZE_OVERRIDES: Dict[int, int] = dict( +CLICKHOUSE_MAX_BLOCK_SIZE_OVERRIDES: dict[int, int] = dict( [map(int, o.split(":")) for o in os.getenv("CLICKHOUSE_MAX_BLOCK_SIZE_OVERRIDES", "").split(",") if o] # type: ignore ) diff --git a/posthog/settings/utils.py b/posthog/settings/utils.py index 6dd22dbf97cf8..eead270c7bd7d 100644 --- a/posthog/settings/utils.py +++ b/posthog/settings/utils.py @@ -1,5 +1,6 @@ import os -from typing import Any, Callable, List, Optional, Set +from typing import Any, Optional +from collections.abc import Callable from django.core.exceptions import ImproperlyConfigured @@ -28,13 +29,13 @@ def get_from_env( return value -def get_list(text: str) -> List[str]: +def get_list(text: str) -> list[str]: if not text: return [] return [item.strip() for item in text.split(",")] -def get_set(text: str) -> Set[str]: +def get_set(text: str) -> set[str]: if not text: return set() return {item.strip() for item in text.split(",")} diff --git a/posthog/settings/web.py b/posthog/settings/web.py index ee6961de70e79..b80c1baab02d6 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -1,7 +1,6 @@ # Web app specific settings/middleware/apps setup import os from datetime import timedelta -from typing import List from corsheaders.defaults import default_headers @@ -160,7 +159,7 @@ SOCIAL_AUTH_USER_MODEL = "posthog.User" SOCIAL_AUTH_REDIRECT_IS_HTTPS = get_from_env("SOCIAL_AUTH_REDIRECT_IS_HTTPS", not DEBUG, type_cast=str_to_bool) -AUTHENTICATION_BACKENDS: List[str] = [ +AUTHENTICATION_BACKENDS: list[str] = [ "axes.backends.AxesBackend", "social_core.backends.github.GithubOAuth2", "social_core.backends.gitlab.GitLabOAuth2", diff --git a/posthog/storage/object_storage.py b/posthog/storage/object_storage.py index a1ff639b1c293..147b02436fa6e 100644 --- a/posthog/storage/object_storage.py +++ b/posthog/storage/object_storage.py @@ -1,5 +1,5 @@ import abc -from typing import Optional, Union, List, Dict +from typing import Optional, Union import structlog from boto3 import client @@ -26,7 +26,7 @@ def get_presigned_url(self, bucket: str, file_key: str, expiration: int = 3600) pass @abc.abstractmethod - def list_objects(self, bucket: str, prefix: str) -> Optional[List[str]]: + def list_objects(self, bucket: str, prefix: str) -> Optional[list[str]]: pass @abc.abstractmethod @@ -38,11 +38,11 @@ def read_bytes(self, bucket: str, key: str) -> Optional[bytes]: pass @abc.abstractmethod - def tag(self, bucket: str, key: str, tags: Dict[str, str]) -> None: + def tag(self, bucket: str, key: str, tags: dict[str, str]) -> None: pass @abc.abstractmethod - def write(self, bucket: str, key: str, content: Union[str, bytes], extras: Dict | None) -> None: + def write(self, bucket: str, key: str, content: Union[str, bytes], extras: dict | None) -> None: pass @abc.abstractmethod @@ -60,7 +60,7 @@ def head_bucket(self, bucket: str): def get_presigned_url(self, bucket: str, file_key: str, expiration: int = 3600) -> Optional[str]: pass - def list_objects(self, bucket: str, prefix: str) -> Optional[List[str]]: + def list_objects(self, bucket: str, prefix: str) -> Optional[list[str]]: pass def read(self, bucket: str, key: str) -> Optional[str]: @@ -69,10 +69,10 @@ def read(self, bucket: str, key: str) -> Optional[str]: def read_bytes(self, bucket: str, key: str) -> Optional[bytes]: pass - def tag(self, bucket: str, key: str, tags: Dict[str, str]) -> None: + def tag(self, bucket: str, key: str, tags: dict[str, str]) -> None: pass - def write(self, bucket: str, key: str, content: Union[str, bytes], extras: Dict | None) -> None: + def write(self, bucket: str, key: str, content: Union[str, bytes], extras: dict | None) -> None: pass def copy_objects(self, bucket: str, source_prefix: str, target_prefix: str) -> int | None: @@ -103,7 +103,7 @@ def get_presigned_url(self, bucket: str, file_key: str, expiration: int = 3600) capture_exception(e) return None - def list_objects(self, bucket: str, prefix: str) -> Optional[List[str]]: + def list_objects(self, bucket: str, prefix: str) -> Optional[list[str]]: try: s3_response = self.aws_client.list_objects_v2(Bucket=bucket, Prefix=prefix) if s3_response.get("Contents"): @@ -143,7 +143,7 @@ def read_bytes(self, bucket: str, key: str) -> Optional[bytes]: capture_exception(e) raise ObjectStorageError("read failed") from e - def tag(self, bucket: str, key: str, tags: Dict[str, str]) -> None: + def tag(self, bucket: str, key: str, tags: dict[str, str]) -> None: try: self.aws_client.put_object_tagging( Bucket=bucket, @@ -155,7 +155,7 @@ def tag(self, bucket: str, key: str, tags: Dict[str, str]) -> None: capture_exception(e) raise ObjectStorageError("tag failed") from e - def write(self, bucket: str, key: str, content: Union[str, bytes], extras: Dict | None) -> None: + def write(self, bucket: str, key: str, content: Union[str, bytes], extras: dict | None) -> None: s3_response = {} try: s3_response = self.aws_client.put_object(Bucket=bucket, Body=content, Key=key, **(extras or {})) @@ -218,7 +218,7 @@ def object_storage_client() -> ObjectStorageClient: return _client -def write(file_name: str, content: Union[str, bytes], extras: Dict | None = None) -> None: +def write(file_name: str, content: Union[str, bytes], extras: dict | None = None) -> None: return object_storage_client().write( bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name, @@ -227,7 +227,7 @@ def write(file_name: str, content: Union[str, bytes], extras: Dict | None = None ) -def tag(file_name: str, tags: Dict[str, str]) -> None: +def tag(file_name: str, tags: dict[str, str]) -> None: return object_storage_client().tag(bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name, tags=tags) @@ -239,7 +239,7 @@ def read_bytes(file_name: str) -> Optional[bytes]: return object_storage_client().read_bytes(bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name) -def list_objects(prefix: str) -> Optional[List[str]]: +def list_objects(prefix: str) -> Optional[list[str]]: return object_storage_client().list_objects(bucket=settings.OBJECT_STORAGE_BUCKET, prefix=prefix) diff --git a/posthog/storage/test/test_object_storage.py b/posthog/storage/test/test_object_storage.py index f24114911ba9e..3737ca155ee6f 100644 --- a/posthog/storage/test/test_object_storage.py +++ b/posthog/storage/test/test_object_storage.py @@ -57,7 +57,7 @@ def test_write_and_read_works_with_known_byte_content(self) -> None: chunk_id = uuid.uuid4() name = f"{session_id}/{0}-{chunk_id}" file_name = f"{TEST_BUCKET}/test_write_and_read_works_with_known_content/{name}" - write(file_name, "my content".encode("utf-8")) + write(file_name, b"my content") self.assertEqual(read(file_name), "my content") def test_can_generate_presigned_url_for_existing_file(self) -> None: @@ -66,7 +66,7 @@ def test_can_generate_presigned_url_for_existing_file(self) -> None: chunk_id = uuid.uuid4() name = f"{session_id}/{0}-{chunk_id}" file_name = f"{TEST_BUCKET}/test_can_generate_presigned_url_for_existing_file/{name}" - write(file_name, "my content".encode("utf-8")) + write(file_name, b"my content") presigned_url = get_presigned_url(file_name) assert presigned_url is not None @@ -93,7 +93,7 @@ def test_can_list_objects_with_prefix(self) -> None: for file in ["a", "b", "c"]: file_name = f"{TEST_BUCKET}/{shared_prefix}/{file}" - write(file_name, "my content".encode("utf-8")) + write(file_name, b"my content") listing = list_objects(prefix=f"{TEST_BUCKET}/{shared_prefix}") @@ -117,7 +117,7 @@ def test_can_copy_objects_between_prefixes(self) -> None: for file in ["a", "b", "c"]: file_name = f"{TEST_BUCKET}/{shared_prefix}/{file}" - write(file_name, "my content".encode("utf-8")) + write(file_name, b"my content") copied_count = copy_objects( source_prefix=f"{TEST_BUCKET}/{shared_prefix}", @@ -142,7 +142,7 @@ def test_can_safely_copy_objects_from_unknown_prefix(self) -> None: for file in ["a", "b", "c"]: file_name = f"{TEST_BUCKET}/{shared_prefix}/{file}" - write(file_name, "my content".encode("utf-8")) + write(file_name, b"my content") copied_count = copy_objects( source_prefix=f"nothing_here", diff --git a/posthog/tasks/calculate_cohort.py b/posthog/tasks/calculate_cohort.py index 7dba512d6c86c..35ccc8fe9ece6 100644 --- a/posthog/tasks/calculate_cohort.py +++ b/posthog/tasks/calculate_cohort.py @@ -1,5 +1,5 @@ import time -from typing import Any, Dict, List, Optional +from typing import Any, Optional import structlog from celery import shared_task @@ -53,7 +53,7 @@ def calculate_cohort_ch(cohort_id: int, pending_version: int, initiating_user_id @shared_task(ignore_result=True, max_retries=1) -def calculate_cohort_from_list(cohort_id: int, items: List[str]) -> None: +def calculate_cohort_from_list(cohort_id: int, items: list[str]) -> None: start_time = time.time() cohort = Cohort.objects.get(pk=cohort_id) @@ -62,7 +62,7 @@ def calculate_cohort_from_list(cohort_id: int, items: List[str]) -> None: @shared_task(ignore_result=True, max_retries=1) -def insert_cohort_from_insight_filter(cohort_id: int, filter_data: Dict[str, Any]) -> None: +def insert_cohort_from_insight_filter(cohort_id: int, filter_data: dict[str, Any]) -> None: from posthog.api.cohort import ( insert_cohort_actors_into_ch, insert_cohort_people_into_pg, diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py index d06d15ee12ace..2d7198dc2d8ca 100644 --- a/posthog/tasks/email.py +++ b/posthog/tasks/email.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime -from typing import List, Optional +from typing import Optional import posthoganalytics import structlog @@ -281,7 +281,7 @@ def send_async_migration_errored_email(migration_key: str, time: str, error: str send_message_to_all_staff_users(message) -def get_users_for_orgs_with_no_ingested_events(org_created_from: datetime, org_created_to: datetime) -> List[User]: +def get_users_for_orgs_with_no_ingested_events(org_created_from: datetime, org_created_to: datetime) -> list[User]: # Get all users for organization that haven't ingested any events users = [] recently_created_organizations = Organization.objects.filter( diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 1030bb84a96ea..489bf64e74036 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -1,6 +1,7 @@ import datetime import io -from typing import Any, Dict, List, Optional, Tuple, Generator +from typing import Any, Optional +from collections.abc import Generator from urllib.parse import parse_qsl, quote, urlencode, urlparse, urlunparse import requests @@ -53,14 +54,14 @@ # 5. We save the final blob output and update the ExportedAsset -def add_query_params(url: str, params: Dict[str, str]) -> str: +def add_query_params(url: str, params: dict[str, str]) -> str: """ Uses parse_qsl because parse_qs turns all values into lists but doesn't unbox them when re-encoded """ parsed = urlparse(url) query_params = parse_qsl(parsed.query, keep_blank_values=True) - update_params: List[Tuple[str, Any]] = [] + update_params: list[tuple[str, Any]] = [] for param, value in query_params: if param in params: update_params.append((param, params.pop(param))) @@ -78,7 +79,12 @@ def add_query_params(url: str, params: Dict[str, str]) -> str: def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: if isinstance(data.get("results"), list): results = data.get("results") + elif isinstance(data.get("result"), list): + results = data.get("result") + else: + return None + if isinstance(data.get("results"), list): # query like if len(results) > 0 and (isinstance(results[0], list) or isinstance(results[0], tuple)) and "types" in data: # e.g. {'columns': ['count()'], 'hasMore': False, 'results': [[1775]], 'types': ['UInt64']} @@ -93,31 +99,26 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: yield row_dict return + if isinstance(results, list): + first_result = results[0] + # persons modal like if len(results) == 1 and set(results[0].keys()) == {"people", "count"}: yield from results[0].get("people") return - - # Pagination object - yield from results - return - elif data.get("result") and isinstance(data.get("result"), list): - items = data["result"] - first_result = items[0] - - if isinstance(first_result, list) or first_result.get("action_id"): - multiple_items = items if isinstance(first_result, list) else [items] + elif isinstance(first_result, list) or first_result.get("action_id"): + multiple_items = results if isinstance(first_result, list) else [results] # FUNNELS LIKE for items in multiple_items: yield from ( { - "name": x["custom_name"] or x["action_id"], + "name": x.get("custom_name") or x.get("action_id", ""), "breakdown_value": "::".join(x.get("breakdown_value", [])), - "action_id": x["action_id"], - "count": x["count"], - "median_conversion_time (seconds)": x["median_conversion_time"], - "average_conversion_time (seconds)": x["average_conversion_time"], + "action_id": x.get("action_id", ""), + "count": x.get("count", ""), + "median_conversion_time (seconds)": x.get("median_conversion_time", ""), + "average_conversion_time (seconds)": x.get("average_conversion_time", ""), } for x in items ) @@ -125,7 +126,7 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: elif first_result.get("appearances") and first_result.get("person"): # RETENTION PERSONS LIKE period = data["filters"]["period"] or "Day" - for item in items: + for item in results: line = {"person": item["person"]["name"]} for index, data in enumerate(item["appearances"]): line[f"{period} {index}"] = data @@ -134,7 +135,7 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: return elif first_result.get("values") and first_result.get("label"): # RETENTION LIKE - for item in items: + for item in results: if item.get("date"): # Dated means we create a grid line = { @@ -142,7 +143,7 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: "cohort size": item["values"][0]["count"], } for index, data in enumerate(item["values"]): - line[items[index]["label"]] = data["count"] + line[results[index]["label"]] = data["count"] else: # Otherwise we just specify "Period" for titles line = { @@ -156,7 +157,7 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: return elif isinstance(first_result.get("data"), list): # TRENDS LIKE - for index, item in enumerate(items): + for index, item in enumerate(results): line = {"series": item.get("label", f"Series #{index + 1}")} if item.get("action", {}).get("custom_name"): line["custom name"] = item.get("action").get("custom_name") @@ -169,17 +170,15 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: yield line return - else: - return items - elif data.get("result") and isinstance(data.get("result"), dict): - result = data["result"] - - if "bins" not in result: + elif results and isinstance(results, dict): + if "bins" in results: + for key, value in results["bins"]: + yield {"bin": key, "value": value} return - for key, value in result["bins"]: - yield {"bin": key, "value": value} - return None + # Pagination object + yield from results + return class UnexpectedEmptyJsonResponse(Exception): @@ -267,7 +266,7 @@ def get_from_hogql_query(exported_asset: ExportedAsset, limit: int, resource: di def _export_to_dict(exported_asset: ExportedAsset, limit: int) -> Any: resource = exported_asset.export_context - columns: List[str] = resource.get("columns", []) + columns: list[str] = resource.get("columns", []) returned_rows: Generator[Any, None, None] if resource.get("source"): @@ -312,7 +311,7 @@ def _export_to_excel(exported_asset: ExportedAsset, limit: int) -> None: for row_num, row_data in enumerate(renderer.tablize(all_csv_rows, header=render_context.get("header"))): for col_num, value in enumerate(row_data): - if value is not None and not isinstance(value, (str, int, float, bool)): + if value is not None and not isinstance(value, str | int | float | bool): value = str(value) worksheet.cell(row=row_num + 1, column=col_num + 1, value=value) diff --git a/posthog/tasks/exports/ordered_csv_renderer.py b/posthog/tasks/exports/ordered_csv_renderer.py index d183ee874b2bc..5b70e9bed911c 100644 --- a/posthog/tasks/exports/ordered_csv_renderer.py +++ b/posthog/tasks/exports/ordered_csv_renderer.py @@ -1,6 +1,7 @@ import itertools from collections import OrderedDict -from typing import Any, Dict, Generator +from typing import Any +from collections.abc import Generator from more_itertools import unique_everseen from rest_framework_csv.renderers import CSVRenderer @@ -28,7 +29,7 @@ def tablize(self, data: Any, header: Any = None, labels: Any = None) -> Generato # Get the set of all unique headers, and sort them. unique_fields = list(unique_everseen(itertools.chain(*(item.keys() for item in data)))) - ordered_fields: Dict[str, Any] = OrderedDict() + ordered_fields: dict[str, Any] = OrderedDict() for item in unique_fields: field = item.split(".") field = field[0] diff --git a/posthog/tasks/exports/test/test_csv_exporter.py b/posthog/tasks/exports/test/test_csv_exporter.py index 87d731dd6a192..d1c03ea5a3eeb 100644 --- a/posthog/tasks/exports/test/test_csv_exporter.py +++ b/posthog/tasks/exports/test/test_csv_exporter.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, Optional +from typing import Any, Optional from unittest import mock from unittest.mock import MagicMock, Mock, patch, ANY @@ -97,7 +97,7 @@ def patched_request(self): patched_request.return_value = mock_response yield patched_request - def _create_asset(self, extra_context: Optional[Dict] = None) -> ExportedAsset: + def _create_asset(self, extra_context: Optional[dict] = None) -> ExportedAsset: if extra_context is None: extra_context = {} @@ -588,7 +588,7 @@ def test_csv_exporter_empty_result(self, mocked_uuidt: Any) -> None: self.assertEqual(lines[0], "error") self.assertEqual(lines[1], "No data available or unable to format for export.") - def _split_to_dict(self, url: str) -> Dict[str, Any]: + def _split_to_dict(self, url: str) -> dict[str, Any]: first_split_parts = url.split("?") assert len(first_split_parts) == 2 return {bits[0]: bits[1] for bits in [param.split("=") for param in first_split_parts[1].split("&")]} diff --git a/posthog/tasks/exports/test/test_csv_exporter_renders.py b/posthog/tasks/exports/test/test_csv_exporter_renders.py index b6de162b0b66d..400cf5dbcff7b 100644 --- a/posthog/tasks/exports/test/test_csv_exporter_renders.py +++ b/posthog/tasks/exports/test/test_csv_exporter_renders.py @@ -21,10 +21,12 @@ @pytest.mark.parametrize("filename", fixtures) +@pytest.mark.parametrize("mode", ("legacy", "hogql")) @pytest.mark.django_db @patch("posthog.tasks.exports.csv_exporter.requests.request") +@patch("posthog.tasks.exports.csv_exporter.process_query") @patch("posthog.models.exported_asset.settings") -def test_csv_rendering(mock_settings, mock_request, filename): +def test_csv_rendering(mock_settings, mock_process_query, mock_request, filename, mode): mock_settings.OBJECT_STORAGE_ENABLED = False org = Organization.objects.create(name="org") team = Team.objects.create(organization=org, name="team") @@ -41,11 +43,27 @@ def test_csv_rendering(mock_settings, mock_request, filename): asset.export_context["columns"] = fixture["response"]["columns"] asset.save() - mock = Mock() - mock.status_code = 200 - mock.json.return_value = fixture["response"] - mock_request.return_value = mock - csv_exporter.export_tabular(asset) - csv_rows = asset.content.decode("utf-8").split("\r\n") + if mode == "legacy": + mock = Mock() + mock.status_code = 200 + mock.json.return_value = fixture["response"] + mock_request.return_value = mock + csv_exporter.export_tabular(asset) + csv_rows = asset.content.decode("utf-8").split("\r\n") - assert csv_rows == fixture["csv_rows"] + assert csv_rows == fixture["csv_rows"] + + if mode == "hogql": + asset.export_context["source"] = {"some": "query"} + asset.save() + if fixture.get("hogql_response"): + # If HogQL has a different response structure, add it to the fixture as `hogql_response` + mock_process_query.return_value = fixture["hogql_response"] + elif fixture["response"].get("results"): + mock_process_query.return_value = fixture["response"] + else: + mock_process_query.return_value = {"results": fixture["response"].pop("result"), **fixture["response"]} + csv_exporter.export_tabular(asset) + csv_rows = asset.content.decode("utf-8").split("\r\n") + + assert csv_rows == fixture["csv_rows"] diff --git a/posthog/tasks/sync_all_organization_available_features.py b/posthog/tasks/sync_all_organization_available_features.py index 87e425fa5ca81..ec16a0e0a5a91 100644 --- a/posthog/tasks/sync_all_organization_available_features.py +++ b/posthog/tasks/sync_all_organization_available_features.py @@ -1,4 +1,5 @@ -from typing import Sequence, cast +from typing import cast +from collections.abc import Sequence from posthog.models.organization import Organization diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index dcf75cb638fac..6f4ac17e6a872 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -98,6 +98,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -355,6 +356,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr index 81a832aad0398..d70a35b720584 100644 --- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr +++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr @@ -30,26 +30,22 @@ FROM events WHERE team_id = 2 AND event='local evaluation usage' - AND timestamp between '2022-01-01 00:00:00' AND '2022-01-10 23:59:59' + AND timestamp between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' AND has(['correct'], replaceRegexpAll(JSONExtractRaw(properties, 'token'), '^"|"$', '')) GROUP BY team ''' # --- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.11 ''' - WITH JSONExtractInt(log_comment, 'team_id') as team_id, - JSONExtractString(log_comment, 'query_type') as query_type, - JSONExtractString(log_comment, 'access_method') as access_method - SELECT team_id, - sum(read_bytes) as count - FROM clusterAllReplicas(posthog, system.query_log) - WHERE (type = 'QueryFinish' - OR type = 'ExceptionWhileProcessing') - AND is_initial_query = 1 - AND query_type IN (['hogql_query', 'HogQLQuery']) - AND query_start_time between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' - AND access_method = '' - GROUP BY team_id + + SELECT distinct_id as team, + sum(JSONExtractInt(properties, 'count')) as sum + FROM events + WHERE team_id = 2 + AND event='local evaluation usage' + AND timestamp between '2022-01-01 00:00:00' AND '2022-01-10 23:59:59' + AND has(['correct'], replaceRegexpAll(JSONExtractRaw(properties, 'token'), '^"|"$', '')) + GROUP BY team ''' # --- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.12 @@ -58,7 +54,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(read_rows) as count + sum(read_bytes) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -75,7 +71,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(query_duration_ms) as count + sum(read_rows) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -92,14 +88,14 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(read_bytes) as count + sum(query_duration_ms) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') AND is_initial_query = 1 AND query_type IN (['hogql_query', 'HogQLQuery']) AND query_start_time between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' - AND access_method = 'personal_api_key' + AND access_method = '' GROUP BY team_id ''' # --- @@ -109,7 +105,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(read_rows) as count + sum(read_bytes) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -126,7 +122,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(query_duration_ms) as count + sum(read_rows) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -143,14 +139,14 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(read_bytes) as count + sum(query_duration_ms) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') AND is_initial_query = 1 - AND query_type IN (['EventsQuery']) + AND query_type IN (['hogql_query', 'HogQLQuery']) AND query_start_time between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' - AND access_method = '' + AND access_method = 'personal_api_key' GROUP BY team_id ''' # --- @@ -160,7 +156,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(read_rows) as count + sum(read_bytes) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -177,7 +173,7 @@ JSONExtractString(log_comment, 'query_type') as query_type, JSONExtractString(log_comment, 'access_method') as access_method SELECT team_id, - sum(query_duration_ms) as count + sum(read_rows) as count FROM clusterAllReplicas(posthog, system.query_log) WHERE (type = 'QueryFinish' OR type = 'ExceptionWhileProcessing') @@ -204,6 +200,23 @@ ''' # --- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.20 + ''' + WITH JSONExtractInt(log_comment, 'team_id') as team_id, + JSONExtractString(log_comment, 'query_type') as query_type, + JSONExtractString(log_comment, 'access_method') as access_method + SELECT team_id, + sum(query_duration_ms) as count + FROM clusterAllReplicas(posthog, system.query_log) + WHERE (type = 'QueryFinish' + OR type = 'ExceptionWhileProcessing') + AND is_initial_query = 1 + AND query_type IN (['EventsQuery']) + AND query_start_time between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + AND access_method = '' + GROUP BY team_id + ''' +# --- +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.21 ''' WITH JSONExtractInt(log_comment, 'team_id') as team_id, JSONExtractString(log_comment, 'query_type') as query_type, @@ -220,7 +233,7 @@ GROUP BY team_id ''' # --- -# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.21 +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.22 ''' WITH JSONExtractInt(log_comment, 'team_id') as team_id, JSONExtractString(log_comment, 'query_type') as query_type, @@ -237,7 +250,7 @@ GROUP BY team_id ''' # --- -# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.22 +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.23 ''' WITH JSONExtractInt(log_comment, 'team_id') as team_id, JSONExtractString(log_comment, 'query_type') as query_type, @@ -254,7 +267,7 @@ GROUP BY team_id ''' # --- -# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.23 +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.24 ''' SELECT team_id, @@ -265,7 +278,7 @@ GROUP BY team_id ''' # --- -# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.24 +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.25 ''' SELECT team_id, @@ -276,7 +289,7 @@ GROUP BY team_id ''' # --- -# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.25 +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.26 ''' SELECT team, @@ -329,12 +342,6 @@ SELECT team_id, count(distinct session_id) as count FROM session_replay_events - WHERE min_first_timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' - AND session_id NOT IN - (SELECT DISTINCT session_id - FROM session_replay_events - WHERE min_first_timestamp BETWEEN '2022-01-09 00:00:00' AND '2022-01-10 00:00:00' - GROUP BY session_id) GROUP BY team_id ''' # --- @@ -343,21 +350,39 @@ SELECT team_id, count(distinct session_id) as count - FROM session_replay_events + FROM + (SELECT any(team_id) as team_id, + session_id + FROM session_replay_events + WHERE min_first_timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + GROUP BY session_id + HAVING ifNull(argMinMerge(snapshot_source), 'web') == 'web') + WHERE session_id NOT IN + (SELECT DISTINCT session_id + FROM session_replay_events + WHERE min_first_timestamp BETWEEN '2022-01-09 00:00:00' AND '2022-01-10 00:00:00' + GROUP BY session_id) GROUP BY team_id ''' # --- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.7 ''' - SELECT distinct_id as team, - sum(JSONExtractInt(properties, 'count')) as sum - FROM events - WHERE team_id = 2 - AND event='decide usage' - AND timestamp between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' - AND has(['correct'], replaceRegexpAll(JSONExtractRaw(properties, 'token'), '^"|"$', '')) - GROUP BY team + SELECT team_id, + count(distinct session_id) as count + FROM + (SELECT any(team_id) as team_id, + session_id + FROM session_replay_events + WHERE min_first_timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + GROUP BY session_id + HAVING ifNull(argMinMerge(snapshot_source), 'web') == 'mobile') + WHERE session_id NOT IN + (SELECT DISTINCT session_id + FROM session_replay_events + WHERE min_first_timestamp BETWEEN '2022-01-09 00:00:00' AND '2022-01-10 00:00:00' + GROUP BY session_id) + GROUP BY team_id ''' # --- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.8 @@ -368,7 +393,7 @@ FROM events WHERE team_id = 2 AND event='decide usage' - AND timestamp between '2022-01-01 00:00:00' AND '2022-01-10 23:59:59' + AND timestamp between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' AND has(['correct'], replaceRegexpAll(JSONExtractRaw(properties, 'token'), '^"|"$', '')) GROUP BY team ''' @@ -380,8 +405,8 @@ sum(JSONExtractInt(properties, 'count')) as sum FROM events WHERE team_id = 2 - AND event='local evaluation usage' - AND timestamp between '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + AND event='decide usage' + AND timestamp between '2022-01-01 00:00:00' AND '2022-01-10 23:59:59' AND has(['correct'], replaceRegexpAll(JSONExtractRaw(properties, 'token'), '^"|"$', '')) GROUP BY team ''' diff --git a/posthog/tasks/test/test_calculate_cohort.py b/posthog/tasks/test/test_calculate_cohort.py index 0c81076c8fa81..ff2c534a91039 100644 --- a/posthog/tasks/test/test_calculate_cohort.py +++ b/posthog/tasks/test/test_calculate_cohort.py @@ -1,4 +1,4 @@ -from typing import Callable +from collections.abc import Callable from unittest.mock import MagicMock, patch from freezegun import freeze_time diff --git a/posthog/tasks/test/test_email.py b/posthog/tasks/test/test_email.py index 447d0d442bfc8..b89127b48b7a5 100644 --- a/posthog/tasks/test/test_email.py +++ b/posthog/tasks/test/test_email.py @@ -1,5 +1,4 @@ import datetime as dt -from typing import Tuple from unittest.mock import MagicMock, patch import pytest @@ -28,7 +27,7 @@ from posthog.test.base import APIBaseTest, ClickhouseTestMixin -def create_org_team_and_user(creation_date: str, email: str, ingested_event: bool = False) -> Tuple[Organization, User]: +def create_org_team_and_user(creation_date: str, email: str, ingested_event: bool = False) -> tuple[Organization, User]: with freeze_time(creation_date): org = Organization.objects.create(name="too_late_org") Team.objects.create(organization=org, name="Default Project", ingested_event=ingested_event) diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index d977f27560b51..43955612e5544 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Any, Dict, List +from typing import Any from unittest.mock import ANY, MagicMock, Mock, call, patch from uuid import uuid4 @@ -56,6 +56,81 @@ logger = structlog.get_logger(__name__) +def _setup_replay_data(team_id: int, include_mobile_replay: bool) -> None: + # recordings in period - 5 sessions + for i in range(1, 6): + session_id = str(i) + timestamp = now() - relativedelta(hours=12) + produce_replay_summary( + team_id=team_id, + session_id=session_id, + distinct_id=str(uuid4()), + first_timestamp=timestamp, + last_timestamp=timestamp, + ) + + if include_mobile_replay: + timestamp = now() - relativedelta(hours=12) + produce_replay_summary( + team_id=team_id, + session_id="a-single-mobile-recording", + distinct_id=str(uuid4()), + first_timestamp=timestamp, + last_timestamp=timestamp, + snapshot_source="mobile", + ) + + # recordings out of period - 11 sessions + for i in range(1, 11): + id1 = str(i + 10) + timestamp1 = now() - relativedelta(hours=48) + produce_replay_summary( + team_id=team_id, + session_id=id1, + distinct_id=str(uuid4()), + first_timestamp=timestamp1, + last_timestamp=timestamp1, + ) + # we maybe also include a single mobile recording out of period + if i == 1 and include_mobile_replay: + produce_replay_summary( + team_id=team_id, + session_id=f"{id1}-mobile", + distinct_id=str(uuid4()), + first_timestamp=timestamp1, + last_timestamp=timestamp1, + snapshot_source="mobile", + ) + + # ensure there is a recording that starts before the period and ends during the period + # report is going to be for "yesterday" relative to the test so... + start_of_day = datetime.combine(now().date(), datetime.min.time()) - relativedelta(days=1) + session_that_will_not_match = "session-that-will-not-match-because-it-starts-before-the-period" + timestamp2 = start_of_day - relativedelta(hours=1) + produce_replay_summary( + team_id=team_id, + session_id=session_that_will_not_match, + distinct_id=str(uuid4()), + first_timestamp=timestamp2, + last_timestamp=timestamp2, + ) + produce_replay_summary( + team_id=team_id, + session_id=session_that_will_not_match, + distinct_id=str(uuid4()), + first_timestamp=start_of_day, + last_timestamp=start_of_day, + ) + timestamp3 = start_of_day + relativedelta(hours=1) + produce_replay_summary( + team_id=team_id, + session_id=session_that_will_not_match, + distinct_id=str(uuid4()), + first_timestamp=timestamp3, + last_timestamp=timestamp3, + ) + + @freeze_time("2022-01-10T00:01:00Z") class UsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin): def setUp(self) -> None: @@ -232,59 +307,8 @@ def _create_sample_usage_data(self) -> None: team=self.org_1_team_2, ) - # recordings in period - 5 sessions with 5 snapshots each - for i in range(1, 6): - for _ in range(0, 5): - session_id = str(i) - timestamp = now() - relativedelta(hours=12) - produce_replay_summary( - team_id=self.org_1_team_2.id, - session_id=session_id, - distinct_id=distinct_id, - first_timestamp=timestamp, - last_timestamp=timestamp, - ) + _setup_replay_data(team_id=self.org_1_team_2.id, include_mobile_replay=False) - # recordings out of period - 5 sessions with 5 snapshots each - for i in range(1, 11): - for _ in range(0, 5): - id1 = str(i + 10) - timestamp1 = now() - relativedelta(hours=48) - produce_replay_summary( - team_id=self.org_1_team_2.id, - session_id=id1, - distinct_id=distinct_id, - first_timestamp=timestamp1, - last_timestamp=timestamp1, - ) - - # ensure there is a recording that starts before the period and ends during the period - # report is going to be for "yesterday" relative to the test so... - start_of_day = datetime.combine(now().date(), datetime.min.time()) - relativedelta(days=1) - session_that_will_not_match = "session-that-will-not-match-because-it-starts-before-the-period" - timestamp2 = start_of_day - relativedelta(hours=1) - produce_replay_summary( - team_id=self.org_1_team_2.id, - session_id=session_that_will_not_match, - distinct_id=distinct_id, - first_timestamp=timestamp2, - last_timestamp=timestamp2, - ) - produce_replay_summary( - team_id=self.org_1_team_2.id, - session_id=session_that_will_not_match, - distinct_id=distinct_id, - first_timestamp=start_of_day, - last_timestamp=start_of_day, - ) - timestamp3 = start_of_day + relativedelta(hours=1) - produce_replay_summary( - team_id=self.org_1_team_2.id, - session_id=session_that_will_not_match, - distinct_id=distinct_id, - first_timestamp=timestamp3, - last_timestamp=timestamp3, - ) _create_event( distinct_id=distinct_id, event="$feature_flag_called", @@ -324,14 +348,14 @@ def _create_sample_usage_data(self) -> None: flush_persons_and_events() - def _select_report_by_org_id(self, org_id: str, reports: List[Dict]) -> Dict: + def _select_report_by_org_id(self, org_id: str, reports: list[dict]) -> dict: return next(report for report in reports if report["organization_id"] == org_id) def _create_plugin(self, name: str, enabled: bool) -> None: plugin = Plugin.objects.create(organization_id=self.team.organization.pk, name=name) PluginConfig.objects.create(plugin=plugin, enabled=enabled, order=1) - def _test_usage_report(self) -> List[dict]: + def _test_usage_report(self) -> list[dict]: with self.settings(SITE_URL="http://test.posthog.com"): self._create_sample_usage_data() self._create_plugin("Installed but not enabled", False) @@ -383,6 +407,7 @@ def _test_usage_report(self) -> List[dict]: "event_count_with_groups_in_period": 2, "recording_count_in_period": 5, "recording_count_total": 16, + "mobile_recording_count_in_period": 0, "group_types_total": 2, "dashboard_count": 2, "dashboard_template_count": 0, @@ -426,6 +451,7 @@ def _test_usage_report(self) -> List[dict]: "event_count_with_groups_in_period": 2, "recording_count_in_period": 0, "recording_count_total": 0, + "mobile_recording_count_in_period": 0, "group_types_total": 2, "dashboard_count": 2, "dashboard_template_count": 0, @@ -463,6 +489,7 @@ def _test_usage_report(self) -> List[dict]: "event_count_with_groups_in_period": 0, "recording_count_in_period": 5, "recording_count_total": 16, + "mobile_recording_count_in_period": 0, "group_types_total": 0, "dashboard_count": 0, "dashboard_template_count": 0, @@ -523,6 +550,7 @@ def _test_usage_report(self) -> List[dict]: "event_count_with_groups_in_period": 0, "recording_count_in_period": 0, "recording_count_total": 0, + "mobile_recording_count_in_period": 0, "group_types_total": 0, "dashboard_count": 0, "dashboard_template_count": 0, @@ -566,6 +594,7 @@ def _test_usage_report(self) -> List[dict]: "event_count_with_groups_in_period": 0, "recording_count_in_period": 0, "recording_count_total": 0, + "mobile_recording_count_in_period": 0, "group_types_total": 0, "dashboard_count": 0, "dashboard_template_count": 0, @@ -658,6 +687,41 @@ def test_unlicensed_usage_report(self, mock_post: MagicMock, mock_client: MagicM mock_posthog.capture.assert_has_calls(calls, any_order=True) +@freeze_time("2022-01-09T00:01:00Z") +class ReplayUsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin): + def test_usage_report_replay(self) -> None: + _setup_replay_data(self.team.pk, include_mobile_replay=False) + + period = get_previous_day() + period_start, period_end = period + + all_reports = _get_all_usage_data_as_team_rows(period_start, period_end) + report = _get_team_report(all_reports, self.team) + + assert all_reports["teams_with_recording_count_total"] == {self.team.pk: 16} + assert report.recording_count_in_period == 5 + assert report.recording_count_total == 16 + + assert report.mobile_recording_count_in_period == 0 + + def test_usage_report_replay_with_mobile(self) -> None: + _setup_replay_data(self.team.pk, include_mobile_replay=True) + + period = get_previous_day() + period_start, period_end = period + + all_reports = _get_all_usage_data_as_team_rows(period_start, period_end) + report = _get_team_report(all_reports, self.team) + + # we don't split mobile recordings out of the total since that field is not used + assert all_reports["teams_with_recording_count_total"] == {self.team.pk: 18} + assert report.recording_count_total == 18 + + # but we do split them out of the daily usage since that field is used + assert report.recording_count_in_period == 5 + assert report.mobile_recording_count_in_period == 1 + + class HogQLUsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin): def test_usage_report_hogql_queries(self) -> None: for _ in range(0, 100): diff --git a/posthog/tasks/test/utils_email_tests.py b/posthog/tasks/test/utils_email_tests.py index d9be8cdd3bc7e..ccb998b3dc38d 100644 --- a/posthog/tasks/test/utils_email_tests.py +++ b/posthog/tasks/test/utils_email_tests.py @@ -1,12 +1,12 @@ import os -from typing import Any, List +from typing import Any from unittest.mock import MagicMock from posthog.email import EmailMessage from posthog.utils import get_absolute_path -def mock_email_messages(MockEmailMessage: MagicMock, path: str = "tasks/test/__emails__/") -> List[Any]: +def mock_email_messages(MockEmailMessage: MagicMock, path: str = "tasks/test/__emails__/") -> list[Any]: """ Takes a mocked EmailMessage class and returns a list of all subsequently created EmailMessage instances The "send" method is spyed on to write the generated email to a file diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index 958601d1ec3ca..d7f01fae36c45 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -4,16 +4,13 @@ from datetime import datetime from typing import ( Any, - Dict, - List, Literal, Optional, - Sequence, - Tuple, TypedDict, Union, cast, ) +from collections.abc import Sequence import requests import structlog @@ -52,8 +49,15 @@ logger = structlog.get_logger(__name__) -Period = TypedDict("Period", {"start_inclusive": str, "end_inclusive": str}) -TableSizes = TypedDict("TableSizes", {"posthog_event": int, "posthog_sessionrecordingevent": int}) + +class Period(TypedDict): + start_inclusive: str + end_inclusive: str + + +class TableSizes(TypedDict): + posthog_event: int + posthog_sessionrecordingevent: int CH_BILLING_SETTINGS = { @@ -81,9 +85,12 @@ class UsageReportCounters: event_count_with_groups_in_period: int # event_count_by_lib: Dict # event_count_by_name: Dict + # Recordings recording_count_in_period: int recording_count_total: int + mobile_recording_count_in_period: int + # Persons and Groups group_types_total: int # person_count_total: int @@ -133,13 +140,13 @@ class InstanceMetadata: product: str helm: Optional[dict] clickhouse_version: Optional[str] - users_who_logged_in: Optional[List[Dict[str, Union[str, int]]]] + users_who_logged_in: Optional[list[dict[str, Union[str, int]]]] users_who_logged_in_count: Optional[int] - users_who_signed_up: Optional[List[Dict[str, Union[str, int]]]] + users_who_signed_up: Optional[list[dict[str, Union[str, int]]]] users_who_signed_up_count: Optional[int] table_sizes: Optional[TableSizes] - plugins_installed: Optional[Dict] - plugins_enabled: Optional[Dict] + plugins_installed: Optional[dict] + plugins_enabled: Optional[dict] instance_tag: str @@ -151,7 +158,7 @@ class OrgReport(UsageReportCounters): organization_created_at: str organization_user_count: int team_count: int - teams: Dict[str, UsageReportCounters] + teams: dict[str, UsageReportCounters] @dataclasses.dataclass @@ -163,7 +170,7 @@ def fetch_table_size(table_name: str) -> int: return fetch_sql("SELECT pg_total_relation_size(%s) as size", (table_name,))[0].size -def fetch_sql(sql_: str, params: Tuple[Any, ...]) -> List[Any]: +def fetch_sql(sql_: str, params: tuple[Any, ...]) -> list[Any]: with connection.cursor() as cursor: cursor.execute(sql.SQL(sql_), params) return namedtuplefetchall(cursor) @@ -178,7 +185,7 @@ def get_product_name(realm: str, has_license: bool) -> str: return "unknown" -def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata: +def get_instance_metadata(period: tuple[datetime, datetime]) -> InstanceMetadata: has_license = False if settings.EE_AVAILABLE: @@ -288,7 +295,7 @@ def get_org_owner_or_first_user(organization_id: str) -> Optional[User]: @shared_task(**USAGE_REPORT_TASK_KWARGS, max_retries=3) -def send_report_to_billing_service(org_id: str, report: Dict[str, Any]) -> None: +def send_report_to_billing_service(org_id: str, report: dict[str, Any]) -> None: if not settings.EE_AVAILABLE: return @@ -340,7 +347,7 @@ def capture_event( pha_client: Client, name: str, organization_id: str, - properties: Dict[str, Any], + properties: dict[str, Any], timestamp: Optional[Union[datetime, str]] = None, ) -> None: if timestamp and isinstance(timestamp, str): @@ -373,7 +380,7 @@ def capture_event( @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_event_count_lifetime() -> List[Tuple[int, int]]: +def get_teams_with_event_count_lifetime() -> list[tuple[int, int]]: result = sync_execute( """ SELECT team_id, count(1) as count @@ -390,7 +397,7 @@ def get_teams_with_event_count_lifetime() -> List[Tuple[int, int]]: @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) def get_teams_with_billable_event_count_in_period( begin: datetime, end: datetime, count_distinct: bool = False -) -> List[Tuple[int, int]]: +) -> list[tuple[int, int]]: # count only unique events # Duplicate events will be eventually removed by ClickHouse and likely came from our library or pipeline. # We shouldn't bill for these. However counting unique events is more expensive, and likely to fail on longer time ranges. @@ -420,7 +427,7 @@ def get_teams_with_billable_event_count_in_period( @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) def get_teams_with_billable_enhanced_persons_event_count_in_period( begin: datetime, end: datetime, count_distinct: bool = False -) -> List[Tuple[int, int]]: +) -> list[tuple[int, int]]: # count only unique events # Duplicate events will be eventually removed by ClickHouse and likely came from our library or pipeline. # We shouldn't bill for these. However counting unique events is more expensive, and likely to fail on longer time ranges. @@ -448,7 +455,7 @@ def get_teams_with_billable_enhanced_persons_event_count_in_period( @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_event_count_with_groups_in_period(begin: datetime, end: datetime) -> List[Tuple[int, int]]: +def get_teams_with_event_count_with_groups_in_period(begin: datetime, end: datetime) -> list[tuple[int, int]]: result = sync_execute( """ SELECT team_id, count(1) as count @@ -466,7 +473,7 @@ def get_teams_with_event_count_with_groups_in_period(begin: datetime, end: datet @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_event_count_by_lib(begin: datetime, end: datetime) -> List[Tuple[int, str, int]]: +def get_teams_with_event_count_by_lib(begin: datetime, end: datetime) -> list[tuple[int, str, int]]: results = sync_execute( """ SELECT team_id, JSONExtractString(properties, '$lib') as lib, COUNT(1) as count @@ -483,7 +490,7 @@ def get_teams_with_event_count_by_lib(begin: datetime, end: datetime) -> List[Tu @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_event_count_by_name(begin: datetime, end: datetime) -> List[Tuple[int, str, int]]: +def get_teams_with_event_count_by_name(begin: datetime, end: datetime) -> list[tuple[int, str, int]]: results = sync_execute( """ SELECT team_id, event, COUNT(1) as count @@ -500,15 +507,22 @@ def get_teams_with_event_count_by_name(begin: datetime, end: datetime) -> List[T @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_recording_count_in_period(begin: datetime, end: datetime) -> List[Tuple[int, int]]: +def get_teams_with_recording_count_in_period( + begin: datetime, end: datetime, snapshot_source: Literal["mobile", "web"] = "web" +) -> list[tuple[int, int]]: previous_begin = begin - (end - begin) result = sync_execute( """ SELECT team_id, count(distinct session_id) as count - FROM session_replay_events - WHERE min_first_timestamp BETWEEN %(begin)s AND %(end)s - AND session_id NOT IN ( + FROM ( + SELECT any(team_id) as team_id, session_id + FROM session_replay_events + WHERE min_first_timestamp BETWEEN %(begin)s AND %(end)s + GROUP BY session_id + HAVING ifNull(argMinMerge(snapshot_source), 'web') == %(snapshot_source)s + ) + WHERE session_id NOT IN ( -- we want to exclude sessions that might have events with timestamps -- before the period we are interested in SELECT DISTINCT session_id @@ -521,7 +535,7 @@ def get_teams_with_recording_count_in_period(begin: datetime, end: datetime) -> ) GROUP BY team_id """, - {"previous_begin": previous_begin, "begin": begin, "end": end}, + {"previous_begin": previous_begin, "begin": begin, "end": end, "snapshot_source": snapshot_source}, workload=Workload.OFFLINE, settings=CH_BILLING_SETTINGS, ) @@ -531,7 +545,7 @@ def get_teams_with_recording_count_in_period(begin: datetime, end: datetime) -> @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_recording_count_total() -> List[Tuple[int, int]]: +def get_teams_with_recording_count_total() -> list[tuple[int, int]]: result = sync_execute( """ SELECT team_id, count(distinct session_id) as count @@ -549,10 +563,10 @@ def get_teams_with_recording_count_total() -> List[Tuple[int, int]]: def get_teams_with_hogql_metric( begin: datetime, end: datetime, - query_types: List[str], + query_types: list[str], access_method: str = "", metric: Literal["read_bytes", "read_rows", "query_duration_ms"] = "read_bytes", -) -> List[Tuple[int, int]]: +) -> list[tuple[int, int]]: if metric not in ["read_bytes", "read_rows", "query_duration_ms"]: # :TRICKY: Inlined into the query below. raise ValueError(f"Invalid metric {metric}") @@ -586,7 +600,7 @@ def get_teams_with_hogql_metric( @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) def get_teams_with_feature_flag_requests_count_in_period( begin: datetime, end: datetime, request_type: FlagRequestType -) -> List[Tuple[int, int]]: +) -> list[tuple[int, int]]: # depending on the region, events are stored in different teams team_to_query = 1 if get_instance_region() == "EU" else 2 validity_token = settings.DECIDE_BILLING_ANALYTICS_TOKEN @@ -620,7 +634,7 @@ def get_teams_with_feature_flag_requests_count_in_period( def get_teams_with_survey_responses_count_in_period( begin: datetime, end: datetime, -) -> List[Tuple[int, int]]: +) -> list[tuple[int, int]]: results = sync_execute( """ SELECT team_id, COUNT() as count @@ -638,7 +652,7 @@ def get_teams_with_survey_responses_count_in_period( @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) -def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> List[Tuple[int, int]]: +def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> list[tuple[int, int]]: team_to_query = 1 if get_instance_region() == "EU" else 2 # dedup by job id incase there were duplicates sent @@ -668,7 +682,7 @@ def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> List def capture_report( capture_event_name: str, org_id: str, - full_report_dict: Dict[str, Any], + full_report_dict: dict[str, Any], at_date: Optional[datetime] = None, ) -> None: pha_client = Client("sTMFPsFhdP1Ssg") @@ -689,13 +703,14 @@ def has_non_zero_usage(report: FullUsageReport) -> bool: report.event_count_in_period > 0 or report.enhanced_persons_event_count_in_period > 0 or report.recording_count_in_period > 0 + # explicitly not including mobile_recording_count_in_period for now or report.decide_requests_count_in_period > 0 or report.local_evaluation_requests_count_in_period > 0 or report.survey_responses_count_in_period > 0 ) -def convert_team_usage_rows_to_dict(rows: List[Union[dict, Tuple[int, int]]]) -> Dict[int, int]: +def convert_team_usage_rows_to_dict(rows: list[Union[dict, tuple[int, int]]]) -> dict[int, int]: team_id_map = {} for row in rows: if isinstance(row, dict) and "team_id" in row: @@ -708,7 +723,7 @@ def convert_team_usage_rows_to_dict(rows: List[Union[dict, Tuple[int, int]]]) -> return team_id_map -def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[str, Any]: +def _get_all_usage_data(period_start: datetime, period_end: datetime) -> dict[str, Any]: """ Gets all usage data for the specified period. Clickhouse is good at counting things so we count across all teams rather than doing it one by one @@ -729,8 +744,13 @@ def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[st ), # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end), # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end), - "teams_with_recording_count_in_period": get_teams_with_recording_count_in_period(period_start, period_end), "teams_with_recording_count_total": get_teams_with_recording_count_total(), + "teams_with_recording_count_in_period": get_teams_with_recording_count_in_period( + period_start, period_end, snapshot_source="web" + ), + "teams_with_mobile_recording_count_in_period": get_teams_with_recording_count_in_period( + period_start, period_end, snapshot_source="mobile" + ), "teams_with_decide_requests_count_in_period": get_teams_with_feature_flag_requests_count_in_period( period_start, period_end, FlagRequestType.DECIDE ), @@ -867,7 +887,7 @@ def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[st } -def _get_all_usage_data_as_team_rows(period_start: datetime, period_end: datetime) -> Dict[str, Any]: +def _get_all_usage_data_as_team_rows(period_start: datetime, period_end: datetime) -> dict[str, Any]: """ Gets all usage data for the specified period as a map of team_id -> value. This makes it faster to access the data than looping over all_data to find what we want. @@ -887,7 +907,7 @@ def _get_teams_for_usage_reports() -> Sequence[Team]: ) -def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounters: +def _get_team_report(all_data: dict[str, Any], team: Team) -> UsageReportCounters: decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0) decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0) local_evaluation_requests_count_in_period = all_data["teams_with_local_evaluation_requests_count_in_period"].get( @@ -906,8 +926,9 @@ def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounter event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(team.id, 0), # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0), # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0), - recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0), recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0), + recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0), + mobile_recording_count_in_period=all_data["teams_with_mobile_recording_count_in_period"].get(team.id, 0), group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0), decide_requests_count_in_period=decide_requests_count_in_period, decide_requests_count_in_month=decide_requests_count_in_month, @@ -942,7 +963,7 @@ def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounter def _add_team_report_to_org_reports( - org_reports: Dict[str, OrgReport], + org_reports: dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime, @@ -975,12 +996,12 @@ def _add_team_report_to_org_reports( ) -def _get_all_org_reports(period_start: datetime, period_end: datetime) -> Dict[str, OrgReport]: +def _get_all_org_reports(period_start: datetime, period_end: datetime) -> dict[str, OrgReport]: all_data = _get_all_usage_data_as_team_rows(period_start, period_end) teams = _get_teams_for_usage_reports() - org_reports: Dict[str, OrgReport] = {} + org_reports: dict[str, OrgReport] = {} print("Generating reports for teams...") # noqa T201 time_now = datetime.now() @@ -1000,7 +1021,7 @@ def _get_full_org_usage_report(org_report: OrgReport, instance_metadata: Instanc ) -def _get_full_org_usage_report_as_dict(full_report: FullUsageReport) -> Dict[str, Any]: +def _get_full_org_usage_report_as_dict(full_report: FullUsageReport) -> dict[str, Any]: return dataclasses.asdict(full_report) diff --git a/posthog/tasks/verify_persons_data_in_sync.py b/posthog/tasks/verify_persons_data_in_sync.py index 02a53b0176c7b..5ed2a3ec074db 100644 --- a/posthog/tasks/verify_persons_data_in_sync.py +++ b/posthog/tasks/verify_persons_data_in_sync.py @@ -1,7 +1,7 @@ import json from collections import Counter, defaultdict from datetime import timedelta -from typing import Any, Dict, List +from typing import Any import structlog from celery import shared_task @@ -80,7 +80,7 @@ def verify_persons_data_in_sync( return results -def _team_integrity_statistics(person_data: List[Any]) -> Counter: +def _team_integrity_statistics(person_data: list[Any]) -> Counter: person_ids = [id for id, _, _ in person_data] person_uuids = [uuid for _, uuid, _ in person_data] team_ids = list({team_id for _, _, team_id in person_data}) @@ -159,8 +159,8 @@ def _emit_metrics(integrity_results: Counter) -> None: statsd.gauge(f"posthog_person_integrity_{key}", value) -def _index_by(collection: List[Any], key_fn: Any, flat: bool = True) -> Dict: - result: Dict = {} if flat else defaultdict(list) +def _index_by(collection: list[Any], key_fn: Any, flat: bool = True) -> dict: + result: dict = {} if flat else defaultdict(list) for item in collection: if flat: result[key_fn(item)] = item diff --git a/posthog/templates/email/batch_export_run_failure.html b/posthog/templates/email/batch_export_run_failure.html index 64d6b0afb1487..04cf2021e342c 100644 --- a/posthog/templates/email/batch_export_run_failure.html +++ b/posthog/templates/email/batch_export_run_failure.html @@ -9,7 +9,7 @@

diff --git a/posthog/templatetags/posthog_assets.py b/posthog/templatetags/posthog_assets.py index 422bd687d9a07..dd8a1c1bb1981 100644 --- a/posthog/templatetags/posthog_assets.py +++ b/posthog/templatetags/posthog_assets.py @@ -1,5 +1,4 @@ import re -from typing import List from django.conf import settings from django.template import Library @@ -26,7 +25,7 @@ def absolute_asset_url(path: str) -> str: @register.simple_tag -def human_social_providers(providers: List[str]) -> str: +def human_social_providers(providers: list[str]) -> str: """ Returns a human-friendly name for a social login provider. Example: diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index 4e4b3ac11a445..68ea47c19c6a8 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -51,7 +51,7 @@ -- These 'timestamp' checks are a heuristic to exploit the sort key. -- Ideally, we need a schema that serves our needs, i.e. with a sort key on the _timestamp field used for batch exports. -- As a side-effect, this heuristic will discard historical loads older than a day. -AND timestamp >= toDateTime64({data_interval_start}, 6, 'UTC') - INTERVAL 2 DAY +AND timestamp >= toDateTime64({data_interval_start}, 6, 'UTC') - INTERVAL 4 DAY AND timestamp < toDateTime64({data_interval_end}, 6, 'UTC') + INTERVAL 1 DAY """ @@ -188,14 +188,14 @@ def iter_records( timestamp_predicates = "" if fields is None: - query_fields = ",".join((f"{field['expression']} AS {field['alias']}" for field in default_fields())) + query_fields = ",".join(f"{field['expression']} AS {field['alias']}" for field in default_fields()) else: if "_inserted_at" not in [field["alias"] for field in fields]: control_fields = [BatchExportField(expression="COALESCE(inserted_at, _timestamp)", alias="_inserted_at")] else: control_fields = [] - query_fields = ",".join((f"{field['expression']} AS {field['alias']}" for field in fields + control_fields)) + query_fields = ",".join(f"{field['expression']} AS {field['alias']}" for field in fields + control_fields) query = SELECT_QUERY_TEMPLATE.substitute( fields=query_fields, @@ -219,8 +219,7 @@ def iter_records( else: query_parameters = base_query_parameters - for record_batch in client.stream_query_as_arrow(query, query_parameters=query_parameters): - yield record_batch + yield from client.stream_query_as_arrow(query, query_parameters=query_parameters) def get_data_interval(interval: str, data_interval_end: str | None) -> tuple[dt.datetime, dt.datetime]: @@ -593,10 +592,10 @@ async def execute_batch_export_insert_activity( inputs, non_retryable_error_types: list[str], finish_inputs: FinishBatchExportRunInputs, - start_to_close_timeout_seconds: int = 3600, + interval: str, heartbeat_timeout_seconds: int | None = 120, - maximum_attempts: int = 10, - initial_retry_interval_seconds: int = 10, + maximum_attempts: int = 15, + initial_retry_interval_seconds: int = 30, maximum_retry_interval_seconds: int = 120, ) -> None: """Execute the main insert activity of a batch export handling any errors. @@ -610,7 +609,7 @@ async def execute_batch_export_insert_activity( inputs: The inputs to the activity. non_retryable_error_types: A list of errors to not retry on when executing the activity. finish_inputs: Inputs to the 'finish_batch_export_run' to run at the end. - start_to_close_timeout: A timeout for the 'insert_into_*' activity function. + interval: The interval of the batch export used to set the start to close timeout. maximum_attempts: Maximum number of retries for the 'insert_into_*' activity function. Assuming the error that triggered the retry is not in non_retryable_error_types. initial_retry_interval_seconds: When retrying, seconds until the first retry. @@ -624,11 +623,23 @@ async def execute_batch_export_insert_activity( non_retryable_error_types=non_retryable_error_types, ) + if interval == "hour": + start_to_close_timeout = dt.timedelta(hours=1) + elif interval == "day": + start_to_close_timeout = dt.timedelta(days=1) + elif interval.startswith("every"): + _, value, unit = interval.split(" ") + kwargs = {unit: int(value)} + # TODO: Consider removing this 10 minute minimum once we are more confident about hitting 5 minute or lower SLAs. + start_to_close_timeout = max(dt.timedelta(minutes=10), dt.timedelta(**kwargs)) + else: + raise ValueError(f"Unsupported interval: '{interval}'") + try: records_completed = await workflow.execute_activity( activity, inputs, - start_to_close_timeout=dt.timedelta(seconds=start_to_close_timeout_seconds), + start_to_close_timeout=start_to_close_timeout, heartbeat_timeout=dt.timedelta(seconds=heartbeat_timeout_seconds) if heartbeat_timeout_seconds else None, retry_policy=retry_policy, ) diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index 93a2e522e1e7f..9e81aafe13883 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -432,6 +432,7 @@ async def run(self, inputs: BigQueryBatchExportInputs): await execute_batch_export_insert_activity( insert_into_bigquery_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ # Raised on missing permissions. "Forbidden", diff --git a/posthog/temporal/batch_exports/http_batch_export.py b/posthog/temporal/batch_exports/http_batch_export.py index f86703f3cf792..623cc53bed622 100644 --- a/posthog/temporal/batch_exports/http_batch_export.py +++ b/posthog/temporal/batch_exports/http_batch_export.py @@ -373,6 +373,7 @@ async def run(self, inputs: HttpBatchExportInputs): await execute_batch_export_insert_activity( insert_into_http_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ "NonRetryableResponseError", ], diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py index 6281862a72f21..a4c1712a12e3c 100644 --- a/posthog/temporal/batch_exports/postgres_batch_export.py +++ b/posthog/temporal/batch_exports/postgres_batch_export.py @@ -98,7 +98,7 @@ async def copy_tsv_to_postgres( # TODO: Switch to binary encoding as CSV has a million edge cases. sql.SQL("COPY {table_name} ({fields}) FROM STDIN WITH (FORMAT CSV, DELIMITER '\t')").format( table_name=sql.Identifier(table_name), - fields=sql.SQL(",").join((sql.Identifier(column) for column in schema_columns)), + fields=sql.SQL(",").join(sql.Identifier(column) for column in schema_columns), ) ) as copy: while data := tsv_file.read(): @@ -439,6 +439,7 @@ async def run(self, inputs: PostgresBatchExportInputs): await execute_batch_export_insert_activity( insert_into_postgres_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ # Raised on errors that are related to database operation. # For example: unexpected disconnect, database or other object not found. diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index e98fa9106c15f..61e5ad7c02655 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -469,6 +469,7 @@ async def run(self, inputs: RedshiftBatchExportInputs): await execute_batch_export_insert_activity( insert_into_redshift_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ # Raised on errors that are related to database operation. # For example: unexpected disconnect, database or other object not found. diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index febdac88b45cd..39a2755a72139 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -687,6 +687,7 @@ async def run(self, inputs: S3BatchExportInputs): await execute_batch_export_insert_activity( insert_into_s3_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ # S3 parameter validation failed. "ParamValidationError", diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py index 2d782c1f94d5c..373312303be2e 100644 --- a/posthog/temporal/batch_exports/snowflake_batch_export.py +++ b/posthog/temporal/batch_exports/snowflake_batch_export.py @@ -283,7 +283,7 @@ async def create_table_in_snowflake( table_name: fields: An iterable of (name, type) tuples representing the fields of the table. """ - field_ddl = ", ".join((f'"{field[0]}" {field[1]}' for field in fields)) + field_ddl = ", ".join(f'"{field[0]}" {field[1]}' for field in fields) await execute_async_query( connection, @@ -631,6 +631,7 @@ async def run(self, inputs: SnowflakeBatchExportInputs): await execute_batch_export_insert_activity( insert_into_snowflake_activity, insert_inputs, + interval=inputs.interval, non_retryable_error_types=[ # Raised when we cannot connect to Snowflake. "DatabaseError", diff --git a/posthog/temporal/batch_exports/utils.py b/posthog/temporal/batch_exports/utils.py index f165ae070a83f..c10ede32d778c 100644 --- a/posthog/temporal/batch_exports/utils.py +++ b/posthog/temporal/batch_exports/utils.py @@ -24,8 +24,7 @@ def peek_first_and_rewind( def rewind_gen() -> collections.abc.Generator[T, None, None]: """Yield the item we popped to rewind the generator.""" yield first - for i in gen: - yield i + yield from gen return (first, rewind_gen()) diff --git a/posthog/temporal/common/__init__.py b/posthog/temporal/common/__init__.py index e69de29bb2d1d..792d6005489eb 100644 --- a/posthog/temporal/common/__init__.py +++ b/posthog/temporal/common/__init__.py @@ -0,0 +1 @@ +# diff --git a/posthog/temporal/common/clickhouse.py b/posthog/temporal/common/clickhouse.py index d548d3871d805..2640bf95c1f97 100644 --- a/posthog/temporal/common/clickhouse.py +++ b/posthog/temporal/common/clickhouse.py @@ -24,7 +24,7 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: return b"NULL" case uuid.UUID(): - return f"{quote_char}{data}{quote_char}".encode("utf-8") + return f"{quote_char}{data}{quote_char}".encode() case int() | float(): return b"%d" % data @@ -35,8 +35,8 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: timezone_arg = f", '{data:%Z}'" if data.microsecond == 0: - return f"toDateTime('{data:%Y-%m-%d %H:%M:%S}'{timezone_arg})".encode("utf-8") - return f"toDateTime64('{data:%Y-%m-%d %H:%M:%S.%f}', 6{timezone_arg})".encode("utf-8") + return f"toDateTime('{data:%Y-%m-%d %H:%M:%S}'{timezone_arg})".encode() + return f"toDateTime64('{data:%Y-%m-%d %H:%M:%S.%f}', 6{timezone_arg})".encode() case list(): encoded_data = [encode_clickhouse_data(value) for value in data] @@ -62,7 +62,7 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: value = str(value) encoded_data.append( - f'"{str(key)}"'.encode("utf-8") + b":" + encode_clickhouse_data(value, quote_char=quote_char) + f'"{str(key)}"'.encode() + b":" + encode_clickhouse_data(value, quote_char=quote_char) ) result = b"{" + b",".join(encoded_data) + b"}" @@ -71,7 +71,7 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: case _: str_data = str(data) str_data = str_data.replace("\\", "\\\\").replace("'", "\\'") - return f"{quote_char}{str_data}{quote_char}".encode("utf-8") + return f"{quote_char}{str_data}{quote_char}".encode() class ClickHouseError(Exception): @@ -355,8 +355,7 @@ def stream_query_as_arrow( """ with self.post_query(query, *data, query_parameters=query_parameters, query_id=query_id) as response: with pa.ipc.open_stream(pa.PythonFile(response.raw)) as reader: - for batch in reader: - yield batch + yield from reader async def __aenter__(self): """Enter method part of the AsyncContextManager protocol.""" diff --git a/posthog/temporal/common/codec.py b/posthog/temporal/common/codec.py index faf91c31173cb..42e775a24ba3b 100644 --- a/posthog/temporal/common/codec.py +++ b/posthog/temporal/common/codec.py @@ -1,5 +1,5 @@ import base64 -from typing import Iterable +from collections.abc import Iterable from cryptography.fernet import Fernet from temporalio.api.common.v1 import Payload diff --git a/posthog/temporal/common/sentry.py b/posthog/temporal/common/sentry.py index 290cc0182d2d8..81af9367914cb 100644 --- a/posthog/temporal/common/sentry.py +++ b/posthog/temporal/common/sentry.py @@ -1,5 +1,5 @@ from dataclasses import is_dataclass -from typing import Any, Optional, Type, Union +from typing import Any, Optional, Union from temporalio import activity, workflow from temporalio.worker import ( @@ -83,5 +83,5 @@ def intercept_activity(self, next: ActivityInboundInterceptor) -> ActivityInboun def workflow_interceptor_class( self, input: WorkflowInterceptorClassInput - ) -> Optional[Type[WorkflowInboundInterceptor]]: + ) -> Optional[type[WorkflowInboundInterceptor]]: return _SentryWorkflowInterceptor diff --git a/posthog/temporal/common/utils.py b/posthog/temporal/common/utils.py index 022c8270d7748..e8e03332c1a98 100644 --- a/posthog/temporal/common/utils.py +++ b/posthog/temporal/common/utils.py @@ -103,7 +103,7 @@ def from_activity(cls, activity): async def should_resume_from_activity_heartbeat( - activity, heartbeat_type: typing.Type[HeartbeatType], logger + activity, heartbeat_type: type[HeartbeatType], logger ) -> tuple[bool, HeartbeatType | None]: """Check if a batch export should resume from an activity's heartbeat details. diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index 3259e91f002cf..2b162efa4c538 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -4,7 +4,6 @@ create_source_templates, import_data_activity, update_external_data_job_model, - validate_schema_activity, check_schedule_activity, ) @@ -14,7 +13,6 @@ create_external_data_job_model_activity, update_external_data_job_model, import_data_activity, - validate_schema_activity, create_source_templates, check_schedule_activity, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 62a47092d81bd..9529113928238 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -4,7 +4,6 @@ import uuid from asgiref.sync import sync_to_async -from dlt.common.schema.typing import TSchemaTables from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy @@ -24,7 +23,6 @@ ) from posthog.warehouse.data_load.source_templates import create_warehouse_templates_for_source -from posthog.warehouse.data_load.validate_schema import validate_schema_and_update_table from posthog.warehouse.external_data_source.jobs import ( update_external_job_status, ) @@ -34,7 +32,6 @@ ExternalDataSource, ) from posthog.temporal.common.logger import bind_temporal_worker_logger -from typing import Dict @dataclasses.dataclass @@ -61,31 +58,6 @@ async def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInpu ) -@dataclasses.dataclass -class ValidateSchemaInputs: - run_id: str - team_id: int - schema_id: uuid.UUID - table_schema: TSchemaTables - table_row_counts: Dict[str, int] - - -@activity.defn -async def validate_schema_activity(inputs: ValidateSchemaInputs) -> None: - await validate_schema_and_update_table( - run_id=inputs.run_id, - team_id=inputs.team_id, - schema_id=inputs.schema_id, - table_schema=inputs.table_schema, - table_row_counts=inputs.table_row_counts, - ) - - logger = await bind_temporal_worker_logger(team_id=inputs.team_id) - logger.info( - f"Validated schema for external data job {inputs.run_id}", - ) - - @dataclasses.dataclass class CreateSourceTemplateInputs: team_id: int @@ -157,7 +129,7 @@ async def run(self, inputs: ExternalDataWorkflowInputs): team_id=inputs.team_id, schema_id=inputs.external_data_schema_id, source_id=inputs.external_data_source_id ) - run_id = await workflow.execute_activity( + run_id, incremental = await workflow.execute_activity( create_external_data_job_model_activity, create_external_data_job_inputs, start_to_close_timeout=dt.timedelta(minutes=1), @@ -181,29 +153,18 @@ async def run(self, inputs: ExternalDataWorkflowInputs): source_id=inputs.external_data_source_id, ) + timeout_params = ( + {"start_to_close_timeout": dt.timedelta(weeks=1), "retry_policy": RetryPolicy(maximum_attempts=1)} + if incremental + else {"start_to_close_timeout": dt.timedelta(hours=5), "retry_policy": RetryPolicy(maximum_attempts=3)} + ) + table_schemas, table_row_counts = await workflow.execute_activity( import_data_activity, job_inputs, - start_to_close_timeout=dt.timedelta(hours=30), - retry_policy=RetryPolicy(maximum_attempts=5), heartbeat_timeout=dt.timedelta(minutes=1), - ) - - # check schema first - validate_inputs = ValidateSchemaInputs( - run_id=run_id, - team_id=inputs.team_id, - schema_id=inputs.external_data_schema_id, - table_schema=table_schemas, - table_row_counts=table_row_counts, - ) - - await workflow.execute_activity( - validate_schema_activity, - validate_inputs, - start_to_close_timeout=dt.timedelta(minutes=10), - retry_policy=RetryPolicy(maximum_attempts=2), - ) + **timeout_params, + ) # type: ignore # Create source templates await workflow.execute_activity( diff --git a/posthog/temporal/data_imports/pipelines/hubspot/__init__.py b/posthog/temporal/data_imports/pipelines/hubspot/__init__.py index 3ffa3c8ffa161..49d84aa41f2d9 100644 --- a/posthog/temporal/data_imports/pipelines/hubspot/__init__.py +++ b/posthog/temporal/data_imports/pipelines/hubspot/__init__.py @@ -23,7 +23,8 @@ >>> resources = hubspot(api_key="hubspot_access_code") """ -from typing import Literal, Sequence, Iterator, Iterable +from typing import Literal +from collections.abc import Sequence, Iterator, Iterable import dlt from dlt.common.typing import TDataItems @@ -114,13 +115,11 @@ def crm_objects( if len(props) > 10000: raise ValueError( - ( - "Your request to Hubspot is too long to process. " - "Maximum allowed query length is 10000 symbols, while " - f"your list of properties `{props[:200]}`... is {len(props)} " - "symbols long. Use the `props` argument of the resource to " - "set the list of properties to extract from the endpoint." - ) + "Your request to Hubspot is too long to process. " + "Maximum allowed query length is 10000 symbols, while " + f"your list of properties `{props[:200]}`... is {len(props)} " + "symbols long. Use the `props` argument of the resource to " + "set the list of properties to extract from the endpoint." ) params = {"properties": props, "limit": 100} diff --git a/posthog/temporal/data_imports/pipelines/hubspot/auth.py b/posthog/temporal/data_imports/pipelines/hubspot/auth.py index 490552cfe237d..b88aa731499bf 100644 --- a/posthog/temporal/data_imports/pipelines/hubspot/auth.py +++ b/posthog/temporal/data_imports/pipelines/hubspot/auth.py @@ -1,6 +1,5 @@ import requests from django.conf import settings -from typing import Tuple def refresh_access_token(refresh_token: str) -> str: @@ -21,7 +20,7 @@ def refresh_access_token(refresh_token: str) -> str: return res.json()["access_token"] -def get_access_token_from_code(code: str, redirect_uri: str) -> Tuple[str, str]: +def get_access_token_from_code(code: str, redirect_uri: str) -> tuple[str, str]: res = requests.post( "https://api.hubapi.com/oauth/v1/token", data={ diff --git a/posthog/temporal/data_imports/pipelines/hubspot/helpers.py b/posthog/temporal/data_imports/pipelines/hubspot/helpers.py index 0ef03b6db23d6..d47616f251abb 100644 --- a/posthog/temporal/data_imports/pipelines/hubspot/helpers.py +++ b/posthog/temporal/data_imports/pipelines/hubspot/helpers.py @@ -1,7 +1,8 @@ """Hubspot source helpers""" import urllib.parse -from typing import Iterator, Dict, Any, List, Optional +from typing import Any, Optional +from collections.abc import Iterator from dlt.sources.helpers import requests import requests as http_requests @@ -16,7 +17,7 @@ def get_url(endpoint: str) -> str: return urllib.parse.urljoin(BASE_URL, endpoint) -def _get_headers(api_key: str) -> Dict[str, str]: +def _get_headers(api_key: str) -> dict[str, str]: """ Return a dictionary of HTTP headers to use for API requests, including the specified API key. @@ -32,7 +33,7 @@ def _get_headers(api_key: str) -> Dict[str, str]: return {"authorization": f"Bearer {api_key}"} -def extract_property_history(objects: List[Dict[str, Any]]) -> Iterator[Dict[str, Any]]: +def extract_property_history(objects: list[dict[str, Any]]) -> Iterator[dict[str, Any]]: for item in objects: history = item.get("propertiesWithHistory") if not history: @@ -49,8 +50,8 @@ def fetch_property_history( endpoint: str, api_key: str, props: str, - params: Optional[Dict[str, Any]] = None, -) -> Iterator[List[Dict[str, Any]]]: + params: Optional[dict[str, Any]] = None, +) -> Iterator[list[dict[str, Any]]]: """Fetch property history from the given CRM endpoint. Args: @@ -91,8 +92,8 @@ def fetch_property_history( def fetch_data( - endpoint: str, api_key: str, refresh_token: str, params: Optional[Dict[str, Any]] = None -) -> Iterator[List[Dict[str, Any]]]: + endpoint: str, api_key: str, refresh_token: str, params: Optional[dict[str, Any]] = None +) -> Iterator[list[dict[str, Any]]]: """ Fetch data from HUBSPOT endpoint using a specified API key and yield the properties of each result. For paginated endpoint this function yields item from all pages. @@ -141,7 +142,7 @@ def fetch_data( # Yield the properties of each result in the API response while _data is not None: if "results" in _data: - _objects: List[Dict[str, Any]] = [] + _objects: list[dict[str, Any]] = [] for _result in _data["results"]: _obj = _result.get("properties", _result) if "id" not in _obj and "id" in _result: @@ -176,7 +177,7 @@ def fetch_data( _data = None -def _get_property_names(api_key: str, refresh_token: str, object_type: str) -> List[str]: +def _get_property_names(api_key: str, refresh_token: str, object_type: str) -> list[str]: """ Retrieve property names for a given entity from the HubSpot API. diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index d91ce311808a5..25842753dfda7 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -1,16 +1,20 @@ from dataclasses import dataclass -from typing import Dict, Literal +from typing import Literal from uuid import UUID import dlt from django.conf import settings from dlt.pipeline.exceptions import PipelineStepFailed +from asgiref.sync import async_to_sync import asyncio import os from posthog.settings.base_variables import TEST from structlog.typing import FilteringBoundLogger from dlt.sources import DltSource +from collections import Counter + +from posthog.warehouse.data_load.validate_schema import validate_schema_and_update_table @dataclass @@ -26,16 +30,42 @@ class PipelineInputs: class DataImportPipeline: loader_file_format: Literal["parquet"] = "parquet" - def __init__(self, inputs: PipelineInputs, source: DltSource, logger: FilteringBoundLogger): + def __init__( + self, inputs: PipelineInputs, source: DltSource, logger: FilteringBoundLogger, incremental: bool = False + ): self.inputs = inputs self.logger = logger - self.source = source + if incremental: + # Incremental syncs: Assuming each page is 100 items for now so bound each run at 50_000 items + self.source = source.add_limit(500) + else: + self.source = source + + self._incremental = incremental + + @property + def _get_pipeline_name_base(self): + return f"{self.inputs.job_type}_pipeline_{self.inputs.team_id}_run" def _get_pipeline_name(self): - return f"{self.inputs.job_type}_pipeline_{self.inputs.team_id}_run_{self.inputs.run_id}" + base = self._get_pipeline_name_base + + if self._incremental: + return f"{base}_{self.inputs.source_id}" + + return f"{base}_{self.inputs.run_id}" + + @property + def _get_pipelines_dir_base(self): + return f"{os.getcwd()}/.dlt/{self.inputs.team_id}" def _get_pipelines_dir(self): - return f"{os.getcwd()}/.dlt/{self.inputs.team_id}/{self.inputs.run_id}/{self.inputs.job_type}" + base = self._get_pipelines_dir_base + + if self._incremental: + return f"{base}/{self.inputs.source_id}/{self.inputs.job_type}" + + return f"{base}/{self.inputs.run_id}/{self.inputs.job_type}" def _get_destination(self): if TEST: @@ -68,17 +98,49 @@ def _create_pipeline(self): dataset_name=self.inputs.dataset_name, ) - def _run(self) -> Dict[str, int]: + def _run(self) -> dict[str, int]: pipeline = self._create_pipeline() - pipeline.run(self.source, loader_file_format=self.loader_file_format) - row_counts = pipeline.last_trace.last_normalize_info.row_counts - # Remove any DLT tables from the counts - filtered_rows = filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items()) + total_counts: Counter[str] = Counter({}) - return dict(filtered_rows) + if self._incremental: + # will get overwritten + counts: Counter[str] = Counter({"start": 1}) - async def run(self) -> Dict[str, int]: + while counts: + pipeline.run(self.source, loader_file_format=self.loader_file_format) + + row_counts = pipeline.last_trace.last_normalize_info.row_counts + # Remove any DLT tables from the counts + filtered_rows = dict(filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items())) + counts = Counter(filtered_rows) + total_counts = counts + total_counts + + async_to_sync(validate_schema_and_update_table)( + run_id=self.inputs.run_id, + team_id=self.inputs.team_id, + schema_id=self.inputs.schema_id, + table_schema=self.source.schema.tables, + table_row_counts=filtered_rows, + ) + else: + pipeline.run(self.source, loader_file_format=self.loader_file_format) + row_counts = pipeline.last_trace.last_normalize_info.row_counts + filtered_rows = dict(filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items())) + counts = Counter(filtered_rows) + total_counts = total_counts + counts + + async_to_sync(validate_schema_and_update_table)( + run_id=self.inputs.run_id, + team_id=self.inputs.team_id, + schema_id=self.inputs.schema_id, + table_schema=self.source.schema.tables, + table_row_counts=filtered_rows, + ) + + return dict(total_counts) + + async def run(self) -> dict[str, int]: try: return await asyncio.to_thread(self._run) except PipelineStepFailed: diff --git a/posthog/temporal/data_imports/pipelines/postgres/__init__.py b/posthog/temporal/data_imports/pipelines/postgres/__init__.py index 438b25fbe9dac..07a368ed572e2 100644 --- a/posthog/temporal/data_imports/pipelines/postgres/__init__.py +++ b/posthog/temporal/data_imports/pipelines/postgres/__init__.py @@ -1,7 +1,8 @@ """Source that loads tables form any SQLAlchemy supported database, supports batching requests and incremental loads.""" -from typing import List, Optional, Union, Iterable, Any -from sqlalchemy import MetaData, Table, text +from typing import Optional, Union, List # noqa: UP035 +from collections.abc import Iterable +from sqlalchemy import MetaData, Table from sqlalchemy.engine import Engine import dlt @@ -35,7 +36,7 @@ def sql_database( credentials: Union[ConnectionStringCredentials, Engine, str] = dlt.secrets.value, schema: Optional[str] = dlt.config.value, metadata: Optional[MetaData] = None, - table_names: Optional[List[str]] = dlt.config.value, + table_names: Optional[List[str]] = dlt.config.value, # noqa: UP006 ) -> Iterable[DltResource]: """ A DLT source which loads data from an SQL database using SQLAlchemy. diff --git a/posthog/temporal/data_imports/pipelines/postgres/helpers.py b/posthog/temporal/data_imports/pipelines/postgres/helpers.py index a288205063f15..5805e7899189a 100644 --- a/posthog/temporal/data_imports/pipelines/postgres/helpers.py +++ b/posthog/temporal/data_imports/pipelines/postgres/helpers.py @@ -2,11 +2,10 @@ from typing import ( Any, - List, Optional, - Iterator, Union, ) +from collections.abc import Iterator import operator import dlt @@ -63,7 +62,7 @@ def make_query(self) -> Select[Any]: return query return query.where(filter_op(self.cursor_column, self.last_value)) # type: ignore - def load_rows(self) -> Iterator[List[TDataItem]]: + def load_rows(self) -> Iterator[list[TDataItem]]: query = self.make_query() with self.engine.connect() as conn: result = conn.execution_options(yield_per=self.chunk_size).execute(query) @@ -104,7 +103,7 @@ def engine_from_credentials(credentials: Union[ConnectionStringCredentials, Engi return create_engine(credentials) -def get_primary_key(table: Table) -> List[str]: +def get_primary_key(table: Table) -> list[str]: return [c.name for c in table.primary_key] diff --git a/posthog/temporal/data_imports/pipelines/schemas.py b/posthog/temporal/data_imports/pipelines/schemas.py index 1caea1364899a..8ee837695dca8 100644 --- a/posthog/temporal/data_imports/pipelines/schemas.py +++ b/posthog/temporal/data_imports/pipelines/schemas.py @@ -1,6 +1,9 @@ from posthog.temporal.data_imports.pipelines.zendesk.settings import BASE_ENDPOINTS, SUPPORT_ENDPOINTS from posthog.warehouse.models import ExternalDataSource -from posthog.temporal.data_imports.pipelines.stripe.settings import ENDPOINTS as STRIPE_ENDPOINTS +from posthog.temporal.data_imports.pipelines.stripe.settings import ( + ENDPOINTS as STRIPE_ENDPOINTS, + INCREMENTAL_ENDPOINTS as STRIPE_INCREMENTAL_ENDPOINTS, +) from posthog.temporal.data_imports.pipelines.hubspot.settings import ENDPOINTS as HUBSPOT_ENDPOINTS PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = { @@ -11,3 +14,10 @@ ), ExternalDataSource.Type.POSTGRES: (), } + +PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING = { + ExternalDataSource.Type.STRIPE: STRIPE_INCREMENTAL_ENDPOINTS, + ExternalDataSource.Type.HUBSPOT: (), + ExternalDataSource.Type.ZENDESK: (), + ExternalDataSource.Type.POSTGRES: (), +} diff --git a/posthog/temporal/data_imports/pipelines/stripe/helpers.py b/posthog/temporal/data_imports/pipelines/stripe/helpers.py index 0646b28238a9e..75bb0c7ed7044 100644 --- a/posthog/temporal/data_imports/pipelines/stripe/helpers.py +++ b/posthog/temporal/data_imports/pipelines/stripe/helpers.py @@ -1,6 +1,7 @@ """Stripe analytics source helpers""" -from typing import Any, Dict, Optional, Union, Iterable, Tuple +from typing import Any, Optional, Union +from collections.abc import Iterable import stripe import dlt @@ -10,6 +11,7 @@ from asgiref.sync import sync_to_async from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.data_imports.pipelines.helpers import check_limit +from posthog.temporal.data_imports.pipelines.stripe.settings import INCREMENTAL_ENDPOINTS from posthog.warehouse.models import ExternalDataJob stripe.api_version = "2022-11-15" @@ -31,7 +33,7 @@ async def stripe_get_data( start_date: Optional[Any] = None, end_date: Optional[Any] = None, **kwargs: Any, -) -> Dict[Any, Any]: +) -> dict[Any, Any]: if start_date: start_date = transform_date(start_date) if end_date: @@ -60,7 +62,8 @@ async def stripe_pagination( endpoint: str, team_id: int, job_id: str, - starting_after: Optional[str] = None, + schema_id: str, + starting_after: Optional[Any] = None, start_date: Optional[Any] = None, end_date: Optional[Any] = None, ): @@ -79,9 +82,21 @@ async def stripe_pagination( logger = await bind_temporal_worker_logger(team_id) logger.info(f"Stripe: getting {endpoint}") + if endpoint in INCREMENTAL_ENDPOINTS: + _cursor_state = dlt.current.resource_state(f"team_{team_id}_{schema_id}_{endpoint}").setdefault( + "cursors", {"ending_before": None, "starting_after": None} + ) + _starting_after = _cursor_state.get("starting_after", None) + _ending_before = _cursor_state.get("ending_before", None) if _starting_after is None else None + else: + _starting_after = starting_after + _ending_before = None + while True: - if starting_after is not None: - logger.info(f"Stripe: getting {endpoint} after {starting_after}") + if _ending_before is not None: + logger.info(f"Stripe: getting {endpoint} before {_ending_before}") + elif _starting_after is not None: + logger.info(f"Stripe: getting {endpoint} after {_starting_after}") count = 0 @@ -89,13 +104,35 @@ async def stripe_pagination( api_key, account_id, endpoint, - starting_after=starting_after, + ending_before=_ending_before, + starting_after=_starting_after, start_date=start_date, end_date=end_date, ) if len(response["data"]) > 0: - starting_after = response["data"][-1]["id"] + latest_value_in_response = response["data"][0]["id"] + earliest_value_in_response = response["data"][-1]["id"] + + if endpoint in INCREMENTAL_ENDPOINTS: + # First pass, store the latest value + if _starting_after is None and _ending_before is None: + _cursor_state["ending_before"] = latest_value_in_response + + # currently scrolling from past to present + if _ending_before is not None: + _cursor_state["ending_before"] = latest_value_in_response + _ending_before = latest_value_in_response + # otherwise scrolling from present to past + else: + _starting_after = earliest_value_in_response + _cursor_state["starting_after"] = earliest_value_in_response + else: + _starting_after = earliest_value_in_response + else: + if endpoint in INCREMENTAL_ENDPOINTS: + _cursor_state["starting_after"] = None + yield response["data"] count, status = await check_limit( @@ -112,9 +149,10 @@ async def stripe_pagination( def stripe_source( api_key: str, account_id: str, - endpoints: Tuple[str, ...], + endpoints: tuple[str, ...], team_id, job_id, + schema_id, starting_after: Optional[str] = None, start_date: Optional[Any] = None, end_date: Optional[Any] = None, @@ -124,12 +162,16 @@ def stripe_source( stripe_pagination, name=endpoint, write_disposition="append", + columns={ + "description": {"data_type": "text", "nullable": True}, + }, )( api_key=api_key, account_id=account_id, endpoint=endpoint, team_id=team_id, job_id=job_id, + schema_id=schema_id, starting_after=starting_after, start_date=start_date, end_date=end_date, diff --git a/posthog/temporal/data_imports/pipelines/stripe/settings.py b/posthog/temporal/data_imports/pipelines/stripe/settings.py index 70de092e09412..a1cd9ffefe8b9 100644 --- a/posthog/temporal/data_imports/pipelines/stripe/settings.py +++ b/posthog/temporal/data_imports/pipelines/stripe/settings.py @@ -4,3 +4,5 @@ # Full list of the Stripe API endpoints you can find here: https://stripe.com/docs/api. # These endpoints are converted into ExternalDataSchema objects when a source is linked. ENDPOINTS = ("BalanceTransaction", "Subscription", "Customer", "Product", "Price", "Invoice", "Charge") + +INCREMENTAL_ENDPOINTS = "Invoice" diff --git a/posthog/temporal/data_imports/pipelines/test/test_pipeline.py b/posthog/temporal/data_imports/pipelines/test/test_pipeline.py new file mode 100644 index 0000000000000..e83ae21d78d55 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/test/test_pipeline.py @@ -0,0 +1,106 @@ +from typing import Any +from unittest.mock import MagicMock, PropertyMock, patch +import uuid + +import pytest +import structlog +from asgiref.sync import sync_to_async +from posthog.temporal.data_imports.pipelines.pipeline import DataImportPipeline, PipelineInputs +from posthog.temporal.data_imports.pipelines.stripe.helpers import stripe_source +from posthog.test.base import APIBaseTest +from posthog.warehouse.models.external_data_job import ExternalDataJob +from posthog.warehouse.models.external_data_schema import ExternalDataSchema +from posthog.warehouse.models.external_data_source import ExternalDataSource + + +class TestDataImportPipeline(APIBaseTest): + async def _create_pipeline(self, schema_name: str, incremental: bool): + source = await sync_to_async(ExternalDataSource.objects.create)( + source_id=str(uuid.uuid4()), + connection_id=str(uuid.uuid4()), + destination_id=str(uuid.uuid4()), + team=self.team, + status="running", + source_type="Stripe", + ) + schema = await sync_to_async(ExternalDataSchema.objects.create)( + name=schema_name, + team_id=self.team.pk, + source_id=source.pk, + source=source, + ) + job = await sync_to_async(ExternalDataJob.objects.create)( + team_id=self.team.pk, + pipeline_id=source.pk, + pipeline=source, + schema_id=schema.pk, + schema=schema, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + workflow_id=str(uuid.uuid4()), + ) + + pipeline = DataImportPipeline( + inputs=PipelineInputs( + source_id=source.pk, + run_id=job.pk, + schema_id=schema.pk, + dataset_name=job.folder_path, + job_type="Stripe", + team_id=self.team.pk, + ), + source=stripe_source( + api_key="", + account_id="", + endpoints=tuple(schema_name), + team_id=self.team.pk, + job_id=job.pk, + schema_id=schema.pk, + start_date=None, + end_date=None, + ), + logger=structlog.get_logger(), + incremental=incremental, + ) + + return pipeline + + @pytest.mark.django_db(transaction=True) + @pytest.mark.asyncio + async def test_pipeline_non_incremental(self): + def mock_create_pipeline(local_self: Any): + mock = MagicMock() + mock.last_trace.last_normalize_info.row_counts = {"customer": 1} + return mock + + with ( + patch.object(DataImportPipeline, "_create_pipeline", mock_create_pipeline), + patch( + "posthog.temporal.data_imports.pipelines.pipeline.validate_schema_and_update_table" + ) as mock_validate_schema_and_update_table, + ): + pipeline = await self._create_pipeline("Customer", False) + res = await pipeline.run() + + assert res.get("customer") == 1 + assert mock_validate_schema_and_update_table.call_count == 1 + + @pytest.mark.django_db(transaction=True) + @pytest.mark.asyncio + async def test_pipeline_incremental(self): + def mock_create_pipeline(local_self: Any): + mock = MagicMock() + type(mock.last_trace.last_normalize_info).row_counts = PropertyMock(side_effect=[{"customer": 1}, {}]) + return mock + + with ( + patch.object(DataImportPipeline, "_create_pipeline", mock_create_pipeline), + patch( + "posthog.temporal.data_imports.pipelines.pipeline.validate_schema_and_update_table" + ) as mock_validate_schema_and_update_table, + ): + pipeline = await self._create_pipeline("Customer", True) + res = await pipeline.run() + + assert res.get("customer") == 1 + assert mock_validate_schema_and_update_table.call_count == 2 diff --git a/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py b/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py index a1747d96c78aa..c478060940d4f 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py @@ -1,4 +1,4 @@ -from typing import Optional, TypedDict, Dict +from typing import Optional, TypedDict from dlt.common import pendulum from dlt.common.time import ensure_pendulum_datetime @@ -18,7 +18,7 @@ def _parse_date_or_none(value: Optional[str]) -> Optional[pendulum.DateTime]: def process_ticket( ticket: DictStrAny, - custom_fields: Dict[str, TCustomFieldInfo], + custom_fields: dict[str, TCustomFieldInfo], pivot_custom_fields: bool = True, ) -> DictStrAny: """ @@ -78,7 +78,7 @@ def process_ticket( return ticket -def process_ticket_field(field: DictStrAny, custom_fields_state: Dict[str, TCustomFieldInfo]) -> TDataItem: +def process_ticket_field(field: DictStrAny, custom_fields_state: dict[str, TCustomFieldInfo]) -> TDataItem: """Update custom field mapping in dlt state for the given field.""" # grab id and update state dict # if the id is new, add a new key to indicate that this is the initial value for title diff --git a/posthog/temporal/data_imports/pipelines/zendesk/credentials.py b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py index 88a0659b7ce1a..d056528059530 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/credentials.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py @@ -2,7 +2,8 @@ This module handles how credentials are read in dlt sources """ -from typing import ClassVar, List, Union +from typing import ClassVar, Union +import dlt from dlt.common.configuration import configspec from dlt.common.configuration.specs import CredentialsConfiguration from dlt.common.typing import TSecretValue @@ -14,8 +15,8 @@ class ZendeskCredentialsBase(CredentialsConfiguration): The Base version of all the ZendeskCredential classes. """ - subdomain: str = "" - __config_gen_annotations__: ClassVar[List[str]] = [] + subdomain: str + __config_gen_annotations__: ClassVar[list[str]] = [] @configspec @@ -25,7 +26,7 @@ class ZendeskCredentialsEmailPass(ZendeskCredentialsBase): """ email: str = "" - password: TSecretValue + password: TSecretValue = dlt.secrets.value @configspec @@ -34,7 +35,7 @@ class ZendeskCredentialsOAuth(ZendeskCredentialsBase): This class is used to store credentials for OAuth Token Authentication """ - oauth_token: TSecretValue + oauth_token: TSecretValue = dlt.secrets.value @configspec @@ -44,7 +45,7 @@ class ZendeskCredentialsToken(ZendeskCredentialsBase): """ email: str = "" - token: TSecretValue + token: TSecretValue = dlt.secrets.value TZendeskCredentials = Union[ZendeskCredentialsEmailPass, ZendeskCredentialsToken, ZendeskCredentialsOAuth] diff --git a/posthog/temporal/data_imports/pipelines/zendesk/helpers.py b/posthog/temporal/data_imports/pipelines/zendesk/helpers.py index 8c0e0427c3fbb..c29f41279a06b 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/helpers.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/helpers.py @@ -1,4 +1,5 @@ -from typing import Iterator, Optional, Iterable, Tuple +from typing import Optional +from collections.abc import Iterator, Iterable from itertools import chain import dlt @@ -211,7 +212,7 @@ def chats_table_resource( def zendesk_support( team_id: int, credentials: TZendeskCredentials = dlt.secrets.value, - endpoints: Tuple[str, ...] = (), + endpoints: tuple[str, ...] = (), pivot_ticket_fields: bool = True, start_date: Optional[TAnyDateTime] = DEFAULT_START_DATE, end_date: Optional[TAnyDateTime] = None, diff --git a/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py b/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py index 5db9a28eafc74..4ebf375bf7050 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py @@ -1,5 +1,6 @@ from enum import Enum -from typing import Dict, Iterator, Optional, Tuple, Any +from typing import Optional, Any +from collections.abc import Iterator from dlt.common.typing import DictStrStr, TDataItems, TSecretValue from dlt.sources.helpers.requests import client @@ -27,7 +28,7 @@ class ZendeskAPIClient: subdomain: str = "" url: str = "" headers: Optional[DictStrStr] - auth: Optional[Tuple[str, TSecretValue]] + auth: Optional[tuple[str, TSecretValue]] def __init__(self, credentials: TZendeskCredentials, url_prefix: Optional[str] = None) -> None: """ @@ -64,7 +65,7 @@ def get_pages( endpoint: str, data_point_name: str, pagination: PaginationType, - params: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, ) -> Iterator[TDataItems]: """ Makes a request to a paginated endpoint and returns a generator of data items per page. diff --git a/posthog/temporal/data_imports/workflow_activities/create_job_model.py b/posthog/temporal/data_imports/workflow_activities/create_job_model.py index d03e4173e2de5..e6407e9f78598 100644 --- a/posthog/temporal/data_imports/workflow_activities/create_job_model.py +++ b/posthog/temporal/data_imports/workflow_activities/create_job_model.py @@ -10,10 +10,7 @@ from posthog.warehouse.external_data_source.jobs import ( create_external_data_job, ) -from posthog.warehouse.models import ( - sync_old_schemas_with_new_schemas, - ExternalDataSource, -) +from posthog.warehouse.models import sync_old_schemas_with_new_schemas, ExternalDataSource, aget_schema_by_id from posthog.warehouse.models.external_data_schema import ExternalDataSchema, get_postgres_schemas from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -26,7 +23,7 @@ class CreateExternalDataJobModelActivityInputs: @activity.defn -async def create_external_data_job_model_activity(inputs: CreateExternalDataJobModelActivityInputs) -> str: +async def create_external_data_job_model_activity(inputs: CreateExternalDataJobModelActivityInputs) -> tuple[str, bool]: run = await sync_to_async(create_external_data_job)( team_id=inputs.team_id, external_data_source_id=inputs.source_id, @@ -62,7 +59,11 @@ async def create_external_data_job_model_activity(inputs: CreateExternalDataJobM logger = await bind_temporal_worker_logger(team_id=inputs.team_id) logger.info( - f"Created external data job with for external data source {inputs.source_id}", + f"Created external data job for external data source {inputs.source_id}", ) - return str(run.id) + schema_model = await aget_schema_by_id(inputs.schema_id, inputs.team_id) + if schema_model is None: + raise ValueError(f"Schema with ID {inputs.schema_id} not found") + + return str(run.id), schema_model.is_incremental diff --git a/posthog/temporal/data_imports/workflow_activities/import_data.py b/posthog/temporal/data_imports/workflow_activities/import_data.py index bd2a9d4cf85ba..5da7e1fc1b109 100644 --- a/posthog/temporal/data_imports/workflow_activities/import_data.py +++ b/posthog/temporal/data_imports/workflow_activities/import_data.py @@ -17,7 +17,6 @@ get_external_data_job, ) from posthog.temporal.common.logger import bind_temporal_worker_logger -from typing import Dict, Tuple import asyncio from django.conf import settings from django.utils import timezone @@ -34,7 +33,7 @@ class ImportDataActivityInputs: @activity.defn -async def import_data_activity(inputs: ImportDataActivityInputs) -> Tuple[TSchemaTables, Dict[str, int]]: # noqa: F821 +async def import_data_activity(inputs: ImportDataActivityInputs) -> tuple[TSchemaTables, dict[str, int]]: # noqa: F821 model: ExternalDataJob = await get_external_data_job( job_id=inputs.run_id, ) @@ -68,9 +67,8 @@ async def import_data_activity(inputs: ImportDataActivityInputs) -> Tuple[TSchem # Hacky just for specific user region = get_instance_region() if region == "EU" and inputs.team_id == 11870: - prev_day = timezone.now() - dt.timedelta(days=1) - start_date = prev_day.replace(hour=0, minute=0, second=0, microsecond=0) - end_date = start_date + dt.timedelta(1) + start_date = timezone.now().replace(day=1, hour=0, minute=0, second=0, microsecond=0) + end_date = start_date + dt.timedelta(weeks=5) else: start_date = None end_date = None @@ -81,6 +79,7 @@ async def import_data_activity(inputs: ImportDataActivityInputs) -> Tuple[TSchem endpoints=tuple(endpoints), team_id=inputs.team_id, job_id=inputs.run_id, + schema_id=str(inputs.schema_id), start_date=start_date, end_date=end_date, ) @@ -125,10 +124,11 @@ async def import_data_activity(inputs: ImportDataActivityInputs) -> Tuple[TSchem from posthog.temporal.data_imports.pipelines.zendesk.helpers import zendesk_support # NOTE: this line errors on CI mypy but not locally. Putting arguments within the function causes the opposite error - credentials = ZendeskCredentialsToken() - credentials.token = model.pipeline.job_inputs.get("zendesk_api_key") - credentials.subdomain = model.pipeline.job_inputs.get("zendesk_subdomain") - credentials.email = model.pipeline.job_inputs.get("zendesk_email_address") + credentials = ZendeskCredentialsToken( + token=model.pipeline.job_inputs.get("zendesk_api_key"), + subdomain=model.pipeline.job_inputs.get("zendesk_subdomain"), + email=model.pipeline.job_inputs.get("zendesk_email_address"), + ) data_support = zendesk_support(credentials=credentials, endpoints=tuple(endpoints), team_id=inputs.team_id) # Uncomment to support zendesk chat and talk @@ -148,7 +148,7 @@ async def heartbeat() -> None: heartbeat_task = asyncio.create_task(heartbeat()) try: - table_row_counts = await DataImportPipeline(job_inputs, source, logger).run() + table_row_counts = await DataImportPipeline(job_inputs, source, logger, schema.is_incremental).run() total_rows_synced = sum(table_row_counts.values()) await aupdate_job_count(inputs.run_id, inputs.team_id, total_rows_synced) diff --git a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py index b0163b8fee798..7b7e2b566743f 100644 --- a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py @@ -190,8 +190,9 @@ async def test_insert_into_http_activity_inserts_data_into_http_endpoint( ) mock_server = MockServer() - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=200, callback=mock_server.post, repeat=True) await activity_environment.run(insert_into_http_activity, insert_inputs) @@ -239,22 +240,25 @@ async def test_insert_into_http_activity_throws_on_bad_http_status( **http_config, ) - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=400, repeat=True) with pytest.raises(NonRetryableResponseError): await activity_environment.run(insert_into_http_activity, insert_inputs) - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=429, repeat=True) with pytest.raises(RetryableResponseError): await activity_environment.run(insert_into_http_activity, insert_inputs) - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=500, repeat=True) with pytest.raises(RetryableResponseError): @@ -352,8 +356,9 @@ async def test_http_export_workflow( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=200, callback=mock_server.post, repeat=True) @@ -589,8 +594,9 @@ def assert_heartbeat_details(*raw_details): ) mock_server = MockServer() - with aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, override_settings( - BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 + with ( + aioresponses(passthrough=[settings.CLICKHOUSE_HTTP_URL]) as m, + override_settings(BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2), ): m.post(TEST_URL, status=200, callback=mock_server.post, repeat=True) await activity_environment.run(insert_into_http_activity, insert_inputs) diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 16605492ea442..f05c4db6e9fe4 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -292,7 +292,7 @@ async def assert_clickhouse_records_in_s3( assert len(s3_data) == len(expected_records) assert s3_data[0] == expected_records[0] - assert s3_data == expected_records + assert s3_data == expected_records, f"Not all s3 records match expected records. Not printing due to large size." TEST_S3_SCHEMAS: list[BatchExportSchema | None] = [ diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 459dff8dc3c00..6652ac224b22a 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -175,7 +175,7 @@ def query_request_handler(request: PreparedRequest): # contents as a string in `staged_files`. if match := re.match(r"^PUT file://(?P.*) @%(?P.*)$", sql_text): file_path = match.group("file_path") - with open(file_path, "r") as f: + with open(file_path) as f: staged_files.append(f.read()) if fail == "put": @@ -414,9 +414,12 @@ async def test_snowflake_export_workflow_exports_events( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with unittest.mock.patch( - "posthog.temporal.batch_exports.snowflake_batch_export.snowflake.connector.connect", - ) as mock, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + with ( + unittest.mock.patch( + "posthog.temporal.batch_exports.snowflake_batch_export.snowflake.connector.connect", + ) as mock, + override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1), + ): fake_conn = FakeSnowflakeConnection() mock.return_value = fake_conn @@ -482,10 +485,13 @@ async def test_snowflake_export_workflow_without_events(ateam, snowflake_batch_e ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send", - assert_all_requests_are_fired=False, - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): + with ( + responses.RequestsMock( + target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send", + assert_all_requests_are_fired=False, + ) as rsps, + override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2), + ): queries, staged_files = add_mock_snowflake_api(rsps) await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index 44470724a9c5f..a8b499a500500 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -7,11 +7,9 @@ from posthog.temporal.data_imports.external_data_job import ( UpdateExternalDataJobStatusInputs, - ValidateSchemaInputs, check_schedule_activity, create_source_templates, update_external_data_job_model, - validate_schema_activity, ) from posthog.temporal.data_imports.external_data_job import ( ExternalDataJobWorkflow, @@ -25,11 +23,9 @@ from posthog.warehouse.external_data_source.jobs import create_external_data_job from posthog.warehouse.models import ( get_latest_run_if_exists, - DataWarehouseTable, ExternalDataJob, ExternalDataSource, ExternalDataSchema, - DataWarehouseCredential, ) from posthog.temporal.data_imports.pipelines.schemas import ( @@ -154,7 +150,7 @@ async def test_create_external_job_activity(activity_environment, team, **kwargs team_id=team.id, source_id=new_source.pk, schema_id=test_1_schema.id ) - run_id = await activity_environment.run(create_external_data_job_model_activity, inputs) + run_id, _ = await activity_environment.run(create_external_data_job_model_activity, inputs) runs = ExternalDataJob.objects.filter(id=run_id) assert await sync_to_async(runs.exists)() @@ -180,7 +176,7 @@ async def test_create_external_job_activity_schemas_exist(activity_environment, inputs = CreateExternalDataJobModelActivityInputs(team_id=team.id, source_id=new_source.pk, schema_id=schema.id) - run_id = await activity_environment.run(create_external_data_job_model_activity, inputs) + run_id, _ = await activity_environment.run(create_external_data_job_model_activity, inputs) runs = ExternalDataJob.objects.filter(id=run_id) assert await sync_to_async(runs.exists)() @@ -207,7 +203,7 @@ async def test_create_external_job_activity_update_schemas(activity_environment, inputs = CreateExternalDataJobModelActivityInputs(team_id=team.id, source_id=new_source.pk, schema_id=schema.id) - run_id = await activity_environment.run(create_external_data_job_model_activity, inputs) + run_id, _ = await activity_environment.run(create_external_data_job_model_activity, inputs) runs = ExternalDataJob.objects.filter(id=run_id) assert await sync_to_async(runs.exists)() @@ -316,13 +312,13 @@ async def setup_job_2(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - invoice_schema = await _create_schema("Invoice", new_source, team) + charge_schema = await _create_schema("Charge", new_source, team) inputs = ImportDataActivityInputs( team_id=team.id, run_id=new_job.pk, source_id=new_source.pk, - schema_id=invoice_schema.id, + schema_id=charge_schema.id, ) return new_job, inputs @@ -330,12 +326,14 @@ async def setup_job_2(): job_1, job_1_inputs = await setup_job_1() job_2, job_2_inputs = await setup_job_2() - with mock.patch("stripe.Customer.list") as mock_customer_list, mock.patch( - "stripe.Invoice.list" - ) as mock_invoice_list, override_settings( - BUCKET_URL=f"s3://{BUCKET_NAME}", - AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + with ( + mock.patch("stripe.Customer.list") as mock_customer_list, + mock.patch("stripe.Charge.list") as mock_charge_list, + override_settings( + BUCKET_URL=f"s3://{BUCKET_NAME}", + AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, + AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + ), ): mock_customer_list.return_value = { "data": [ @@ -347,10 +345,10 @@ async def setup_job_2(): "has_more": False, } - mock_invoice_list.return_value = { + mock_charge_list.return_value = { "data": [ { - "id": "inv_123", + "id": "chg_123", "customer": "cus_1", } ], @@ -366,10 +364,10 @@ async def setup_job_2(): ) assert len(job_1_customer_objects["Contents"]) == 1 - job_2_invoice_objects = await minio_client.list_objects_v2( - Bucket=BUCKET_NAME, Prefix=f"{job_2.folder_path}/invoice/" + job_2_charge_objects = await minio_client.list_objects_v2( + Bucket=BUCKET_NAME, Prefix=f"{job_2.folder_path}/charge/" ) - assert len(job_2_invoice_objects["Contents"]) == 1 + assert len(job_2_charge_objects["Contents"]) == 1 @pytest.mark.django_db(transaction=True) @@ -410,10 +408,13 @@ async def setup_job_1(): job_1, job_1_inputs = await setup_job_1() - with mock.patch("stripe.Customer.list") as mock_customer_list, override_settings( - BUCKET_URL=f"s3://{BUCKET_NAME}", - AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + with ( + mock.patch("stripe.Customer.list") as mock_customer_list, + override_settings( + BUCKET_URL=f"s3://{BUCKET_NAME}", + AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, + AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + ), ): mock_customer_list.return_value = { "data": [ @@ -475,12 +476,14 @@ async def setup_job_1(): job_1, job_1_inputs = await setup_job_1() - with mock.patch("stripe.Customer.list") as mock_customer_list, mock.patch( - "posthog.temporal.data_imports.pipelines.helpers.CHUNK_SIZE", 0 - ), override_settings( - BUCKET_URL=f"s3://{BUCKET_NAME}", - AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + with ( + mock.patch("stripe.Customer.list") as mock_customer_list, + mock.patch("posthog.temporal.data_imports.pipelines.helpers.CHUNK_SIZE", 0), + override_settings( + BUCKET_URL=f"s3://{BUCKET_NAME}", + AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, + AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, + ), ): mock_customer_list.return_value = { "data": [ @@ -505,232 +508,6 @@ async def setup_job_1(): assert job_1.rows_synced == 1 -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team) - - with mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns" - ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity_with_existing(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - prefix="stripe_", - ) - - old_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.COMPLETED, - rows_synced=0, - ) - - old_credential = await sync_to_async(DataWarehouseCredential.objects.create)( - team=team, - access_key=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - access_secret=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - ) - - url_pattern = await sync_to_async(old_job.url_pattern_by_schema)("test-1") - - existing_table = await sync_to_async(DataWarehouseTable.objects.create)( - credential=old_credential, - name="stripe_test-1", - format="Parquet", - url_pattern=url_pattern, - team_id=team.pk, - external_data_source_id=new_source.pk, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team, table_id=existing_table.id) - - with mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns" - ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity_half_run(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - with mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns") as mock_get_columns, mock.patch( - "posthog.warehouse.data_load.validate_schema.validate_schema", - ) as mock_validate, override_settings(**AWS_BUCKET_MOCK_SETTINGS): - mock_get_columns.return_value = {"id": "string"} - credential = await sync_to_async(DataWarehouseCredential.objects.create)( - team=team, - access_key=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - access_secret=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - ) - - mock_validate.side_effect = [ - Exception, - { - "credential": credential, - "format": "Parquet", - "name": "test_schema", - "url_pattern": "test_url_pattern", - "team_id": team.pk, - }, - ] - - broken_schema = await _create_schema("broken_schema", new_source, team) - - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=broken_schema.id, - table_schema={ - "broken_schema": { - "name": "broken_schema", - "resource": "broken_schema", - "columns": {"id": {"data_type": "text"}}, - }, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 0 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 0 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_create_schema_activity(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team) - - with mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns" - ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - all_tables = DataWarehouseTable.objects.all() - table_length = await sync_to_async(len)(all_tables) - assert table_length == 1 - - @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_external_data_job_workflow_with_schema(team, **kwargs): @@ -763,9 +540,10 @@ async def test_external_data_job_workflow_with_schema(team, **kwargs): async def mock_async_func(inputs): return {} - with mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns", return_value={"id": "string"} - ), mock.patch.object(DataImportPipeline, "run", mock_async_func): + with ( + mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns", return_value={"id": "string"}), + mock.patch.object(DataImportPipeline, "run", mock_async_func), + ): with override_settings(AIRBYTE_BUCKET_KEY="test-key", AIRBYTE_BUCKET_SECRET="test-secret"): async with await WorkflowEnvironment.start_time_skipping() as activity_environment: async with Worker( @@ -777,7 +555,6 @@ async def mock_async_func(inputs): create_external_data_job_model_activity, update_external_data_job_model, import_data_activity, - validate_schema_activity, create_source_templates, ], workflow_runner=UnsandboxedWorkflowRunner(), @@ -795,8 +572,6 @@ async def mock_async_func(inputs): assert run is not None assert run.status == ExternalDataJob.Status.COMPLETED - assert await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio @@ -910,13 +685,17 @@ async def test_check_schedule_activity_with_missing_schema_id_but_with_schedule( should_sync=True, ) - with mock.patch( - "posthog.temporal.data_imports.external_data_job.a_external_data_workflow_exists", return_value=True - ), mock.patch( - "posthog.temporal.data_imports.external_data_job.a_delete_external_data_schedule", return_value=True - ), mock.patch( - "posthog.temporal.data_imports.external_data_job.a_trigger_external_data_workflow" - ) as mock_a_trigger_external_data_workflow: + with ( + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_external_data_workflow_exists", return_value=True + ), + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_delete_external_data_schedule", return_value=True + ), + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_trigger_external_data_workflow" + ) as mock_a_trigger_external_data_workflow, + ): should_exit = await activity_environment.run( check_schedule_activity, ExternalDataWorkflowInputs( @@ -950,13 +729,17 @@ async def test_check_schedule_activity_with_missing_schema_id_and_no_schedule(ac should_sync=True, ) - with mock.patch( - "posthog.temporal.data_imports.external_data_job.a_external_data_workflow_exists", return_value=False - ), mock.patch( - "posthog.temporal.data_imports.external_data_job.a_delete_external_data_schedule", return_value=True - ), mock.patch( - "posthog.temporal.data_imports.external_data_job.a_sync_external_data_job_workflow" - ) as mock_a_sync_external_data_job_workflow: + with ( + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_external_data_workflow_exists", return_value=False + ), + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_delete_external_data_schedule", return_value=True + ), + mock.patch( + "posthog.temporal.data_imports.external_data_job.a_sync_external_data_job_workflow" + ) as mock_a_sync_external_data_job_workflow, + ): should_exit = await activity_environment.run( check_schedule_activity, ExternalDataWorkflowInputs( diff --git a/posthog/temporal/tests/utils/events.py b/posthog/temporal/tests/utils/events.py index 71ce7f7f61615..ce48257381801 100644 --- a/posthog/temporal/tests/utils/events.py +++ b/posthog/temporal/tests/utils/events.py @@ -44,6 +44,7 @@ def generate_test_events( site_url: str | None = "", set_field: dict | None = None, set_once: dict | None = None, + start: int = 0, ): """Generate a list of events for testing.""" _timestamp = random.choice(possible_datetimes) @@ -77,7 +78,7 @@ def generate_test_events( "set": set_field, "set_once": set_once, } - for i in range(count) + for i in range(start, count + start) ] return events @@ -138,6 +139,7 @@ async def generate_test_events_in_clickhouse( person_properties: dict | None = None, inserted_at: str | dt.datetime | None = "_timestamp", duplicate: bool = False, + batch_size: int = 10000, ) -> tuple[list[EventValues], list[EventValues], list[EventValues]]: """Insert test events into the sharded_events table. @@ -165,20 +167,27 @@ async def generate_test_events_in_clickhouse( possible_datetimes = list(date_range(start_time, end_time, dt.timedelta(minutes=1))) # Base events - events = generate_test_events( - count=count, - team_id=team_id, - possible_datetimes=possible_datetimes, - event_name=event_name, - properties=properties, - person_properties=person_properties, - inserted_at=inserted_at, - ) + events: list[EventValues] = [] + while len(events) < count: + events_to_insert = generate_test_events( + count=min(count - len(events), batch_size), + team_id=team_id, + possible_datetimes=possible_datetimes, + event_name=event_name, + properties=properties, + person_properties=person_properties, + inserted_at=inserted_at, + start=len(events), + ) + + # Add duplicates if required + duplicate_events = [] + if duplicate is True: + duplicate_events = events_to_insert - # Add duplicates if required - duplicate_events = [] - if duplicate is True: - duplicate_events = events + await insert_event_values_in_clickhouse(client=client, events=events_to_insert + duplicate_events) + + events.extend(events_to_insert) # Events outside original date range delta = end_time - start_time @@ -207,7 +216,5 @@ async def generate_test_events_in_clickhouse( inserted_at=inserted_at, ) - await insert_event_values_in_clickhouse( - client=client, events=events + events_outside_range + events_from_other_team + duplicate_events - ) + await insert_event_values_in_clickhouse(client=client, events=events_outside_range + events_from_other_team) return (events, events_outside_range, events_from_other_team) diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index df9d02c4049c2..e58f1d580d257 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -151,6 +151,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -317,6 +318,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -700,6 +702,7 @@ "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/test/base.py b/posthog/test/base.py index c96738aafa139..2ebfa6178e259 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -7,7 +7,8 @@ import uuid from contextlib import contextmanager from functools import wraps -from typing import Any, Dict, List, Optional, Tuple, Union, Generator +from typing import Any, Optional, Union +from collections.abc import Generator from unittest.mock import patch import freezegun @@ -86,8 +87,8 @@ freezegun.configure(extend_ignore_list=["posthog.test.assert_faster_than"]) # type: ignore -persons_cache_tests: List[Dict[str, Any]] = [] -events_cache_tests: List[Dict[str, Any]] = [] +persons_cache_tests: list[dict[str, Any]] = [] +events_cache_tests: list[dict[str, Any]] = [] persons_ordering_int: int = 1 @@ -124,7 +125,7 @@ class FuzzyInt(int): highest: int def __new__(cls, lowest, highest): - obj = super(FuzzyInt, cls).__new__(cls, highest) + obj = super().__new__(cls, highest) obj.lowest = lowest obj.highest = highest return obj @@ -144,7 +145,7 @@ class ErrorResponsesMixin: "attr": None, } - def not_found_response(self, message: str = "Not found.") -> Dict[str, Optional[str]]: + def not_found_response(self, message: str = "Not found.") -> dict[str, Optional[str]]: return { "type": "invalid_request", "code": "not_found", @@ -154,7 +155,7 @@ def not_found_response(self, message: str = "Not found.") -> Dict[str, Optional[ def permission_denied_response( self, message: str = "You do not have permission to perform this action." - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: return { "type": "authentication_error", "code": "permission_denied", @@ -162,7 +163,7 @@ def permission_denied_response( "attr": None, } - def method_not_allowed_response(self, method: str) -> Dict[str, Optional[str]]: + def method_not_allowed_response(self, method: str) -> dict[str, Optional[str]]: return { "type": "invalid_request", "code": "method_not_allowed", @@ -174,7 +175,7 @@ def unauthenticated_response( self, message: str = "Authentication credentials were not provided.", code: str = "not_authenticated", - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: return { "type": "authentication_error", "code": code, @@ -187,7 +188,7 @@ def validation_error_response( message: str = "Malformed request", code: str = "invalid_input", attr: Optional[str] = None, - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: return { "type": "validation_error", "code": code, @@ -820,7 +821,7 @@ def capture_select_queries(self): return self.capture_queries(("SELECT", "WITH", "select", "with")) @contextmanager - def capture_queries(self, query_prefixes: Union[str, Tuple[str, ...]]): + def capture_queries(self, query_prefixes: Union[str, tuple[str, ...]]): queries = [] original_get_client = ch_pool.get_client @@ -863,7 +864,7 @@ def raise_hook(args: threading.ExceptHookArgs): threading.excepthook = old_hook -def run_clickhouse_statement_in_parallel(statements: List[str]): +def run_clickhouse_statement_in_parallel(statements: list[str]): jobs = [] with failhard_threadhook_context(): for item in statements: @@ -1063,8 +1064,8 @@ def fn_with_poe_v2(self, *args, **kwargs): def _create_insight( - team: Team, insight_filters: Dict[str, Any], dashboard_filters: Dict[str, Any] -) -> Tuple[Insight, Dashboard, DashboardTile]: + team: Team, insight_filters: dict[str, Any], dashboard_filters: dict[str, Any] +) -> tuple[Insight, Dashboard, DashboardTile]: dashboard = Dashboard.objects.create(team=team, filters=dashboard_filters) insight = Insight.objects.create(team=team, filters=insight_filters) dashboard_tile = DashboardTile.objects.create(dashboard=dashboard, insight=insight) @@ -1088,7 +1089,7 @@ def create_person_id_override_by_distinct_id( """ ) - person_id_from, person_id_to = [row[1] for row in person_ids_result] + person_id_from, person_id_to = (row[1] for row in person_ids_result) sync_execute( f""" diff --git a/posthog/test/db_context_capturing.py b/posthog/test/db_context_capturing.py index 6060023545637..44c1b05d23cc0 100644 --- a/posthog/test/db_context_capturing.py +++ b/posthog/test/db_context_capturing.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from typing import Generator +from collections.abc import Generator from django.db import DEFAULT_DB_ALIAS, connections from django.test.utils import CaptureQueriesContext diff --git a/posthog/test/test_feature_flag.py b/posthog/test/test_feature_flag.py index 38afbe7dbbcd7..91db555b31b9e 100644 --- a/posthog/test/test_feature_flag.py +++ b/posthog/test/test_feature_flag.py @@ -2784,8 +2784,9 @@ def test_multiple_flags(self): key="variant", ) - with self.assertNumQueries(10), snapshot_postgres_queries_context( - self + with ( + self.assertNumQueries(10), + snapshot_postgres_queries_context(self), ): # 1 to fill group cache, 2 to match feature flags with group properties (of each type), 1 to match feature flags with person properties matches, reasons, payloads, _ = FeatureFlagMatcher( [ @@ -2859,8 +2860,9 @@ def test_multiple_flags(self): self.assertEqual(payloads, {"variant": {"color": "blue"}}) - with self.assertNumQueries(9), snapshot_postgres_queries_context( - self + with ( + self.assertNumQueries(9), + snapshot_postgres_queries_context(self), ): # 1 to fill group cache, 1 to match feature flags with group properties (only 1 group provided), 1 to match feature flags with person properties matches, reasons, payloads, _ = FeatureFlagMatcher( [ @@ -6016,8 +6018,9 @@ def __call__(self, execute, sql, *args, **kwargs): properties={"email": "tim@posthog.com", "team": "posthog"}, ) - with snapshot_postgres_queries_context(self, capture_all_queries=True), connection.execute_wrapper( - InsertFailOnce() + with ( + snapshot_postgres_queries_context(self, capture_all_queries=True), + connection.execute_wrapper(InsertFailOnce()), ): flags, reasons, payloads, errors = get_all_feature_flags( team.pk, "other_id", {}, hash_key_override="example_id" diff --git a/posthog/test/test_feature_flag_analytics.py b/posthog/test/test_feature_flag_analytics.py index f5a5f37e0ac0a..ed8228ff21170 100644 --- a/posthog/test/test_feature_flag_analytics.py +++ b/posthog/test/test_feature_flag_analytics.py @@ -77,8 +77,9 @@ def test_capture_team_decide_usage(self): team_uuid = "team-uuid" other_team_uuid = "other-team-uuid" - with freeze_time("2022-05-07 12:23:07") as frozen_datetime, self.settings( - DECIDE_BILLING_ANALYTICS_TOKEN="token" + with ( + freeze_time("2022-05-07 12:23:07") as frozen_datetime, + self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="token"), ): for _ in range(10): # 10 requests in first bucket @@ -299,8 +300,9 @@ def test_no_interference_between_different_types_of_new_incoming_increments(self other_team_id = 1243 team_uuid = "team-uuid" - with freeze_time("2022-05-07 12:23:07") as frozen_datetime, self.settings( - DECIDE_BILLING_ANALYTICS_TOKEN="token" + with ( + freeze_time("2022-05-07 12:23:07") as frozen_datetime, + self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="token"), ): for _ in range(10): # 10 requests in first bucket @@ -400,8 +402,9 @@ def test_locking_works_for_capture_team_decide_usage(self): team_uuid = "team-uuid" other_team_uuid = "other-team-uuid" - with freeze_time("2022-05-07 12:23:07") as frozen_datetime, self.settings( - DECIDE_BILLING_ANALYTICS_TOKEN="token" + with ( + freeze_time("2022-05-07 12:23:07") as frozen_datetime, + self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="token"), ): for _ in range(10): # 10 requests in first bucket @@ -489,8 +492,9 @@ def test_locking_in_redis_doesnt_block_new_incoming_increments(self): other_team_id = 1243 team_uuid = "team-uuid" - with freeze_time("2022-05-07 12:23:07") as frozen_datetime, self.settings( - DECIDE_BILLING_ANALYTICS_TOKEN="token" + with ( + freeze_time("2022-05-07 12:23:07") as frozen_datetime, + self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="token"), ): for _ in range(10): # 10 requests in first bucket diff --git a/posthog/test/test_health.py b/posthog/test/test_health.py index 89611fb11ee31..2ce4e464e8cf7 100644 --- a/posthog/test/test_health.py +++ b/posthog/test/test_health.py @@ -1,7 +1,7 @@ import logging from contextlib import contextmanager import random -from typing import List, Optional +from typing import Optional from unittest import mock from unittest.mock import patch @@ -70,7 +70,13 @@ def test_livez_returns_200_and_doesnt_require_any_dependencies(client: Client): just be an indicator that the python process hasn't hung. """ - with simulate_postgres_error(), simulate_kafka_cannot_connect(), simulate_clickhouse_cannot_connect(), simulate_celery_cannot_connect(), simulate_cache_cannot_connect(): + with ( + simulate_postgres_error(), + simulate_kafka_cannot_connect(), + simulate_clickhouse_cannot_connect(), + simulate_celery_cannot_connect(), + simulate_cache_cannot_connect(), + ): resp = get_livez(client) assert resp.status_code == 200, resp.content @@ -263,7 +269,7 @@ def test_readyz_complains_if_role_does_not_exist(client: Client): assert data["error"] == "InvalidRole" -def get_readyz(client: Client, exclude: Optional[List[str]] = None, role: Optional[str] = None) -> HttpResponse: +def get_readyz(client: Client, exclude: Optional[list[str]] = None, role: Optional[str] = None) -> HttpResponse: return client.get("/_readyz", data={"exclude": exclude or [], "role": role or ""}) diff --git a/posthog/test/test_journeys.py b/posthog/test/test_journeys.py index 0e535437076e9..69bb2050d8f3b 100644 --- a/posthog/test/test_journeys.py +++ b/posthog/test/test_journeys.py @@ -3,7 +3,7 @@ import json from datetime import datetime import os -from typing import Any, Dict, List +from typing import Any from uuid import UUID, uuid4 from django.utils import timezone @@ -15,10 +15,10 @@ def journeys_for( - events_by_person: Dict[str, List[Dict[str, Any]]], + events_by_person: dict[str, list[dict[str, Any]]], team: Team, create_people: bool = True, -) -> Dict[str, Person]: +) -> dict[str, Person]: """ Helper for creating specific events for a team. @@ -115,11 +115,11 @@ def journeys_for( return people -def _create_all_events_raw(all_events: List[Dict]): +def _create_all_events_raw(all_events: list[dict]): parsed = "" for event in all_events: timestamp = timezone.now() - data: Dict[str, Any] = { + data: dict[str, Any] = { "properties": {}, "timestamp": timestamp.strftime("%Y-%m-%d %H:%M:%S.%f"), "person_id": str(uuid4()), @@ -162,7 +162,7 @@ def _create_all_events_raw(all_events: List[Dict]): ) -def create_all_events(all_events: List[dict]): +def create_all_events(all_events: list[dict]): for event in all_events: _create_event(**event) @@ -175,15 +175,15 @@ class InMemoryEvent: distinct_id: str team: Team timestamp: str - properties: Dict + properties: dict person_id: str person_created_at: datetime - person_properties: Dict - group0_properties: Dict - group1_properties: Dict - group2_properties: Dict - group3_properties: Dict - group4_properties: Dict + person_properties: dict + group0_properties: dict + group1_properties: dict + group2_properties: dict + group3_properties: dict + group4_properties: dict group0_created_at: datetime group1_created_at: datetime group2_created_at: datetime @@ -191,7 +191,7 @@ class InMemoryEvent: group4_created_at: datetime -def update_or_create_person(distinct_ids: List[str], team_id: int, **kwargs): +def update_or_create_person(distinct_ids: list[str], team_id: int, **kwargs): (person, _) = Person.objects.update_or_create( persondistinctid__distinct_id__in=distinct_ids, persondistinctid__team_id=team_id, diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index e0f5283dd3cae..728ce4936a645 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -325,6 +325,15 @@ def test_project_redirects_to_current_team_when_accessing_inaccessible_project_b assert res.status_code == 302 assert res.headers["Location"] == f"/project/{self.team.pk}/home" + def test_project_redirects_including_query_params(self): + res = self.client.get(f"/project/phc_123?t=1") + assert res.status_code == 302 + assert res.headers["Location"] == f"/project/{self.team.pk}?t=1" + + res = self.client.get(f"/project/phc_123/home?t=1") + assert res.status_code == 302 + assert res.headers["Location"] == f"/project/{self.team.pk}/home?t=1" + @override_settings(CLOUD_DEPLOYMENT="US") # As PostHog Cloud class TestPostHogTokenCookieMiddleware(APIBaseTest): diff --git a/posthog/test/test_utils.py b/posthog/test/test_utils.py index 827c5dd1de851..dab6a4d1e0ea7 100644 --- a/posthog/test/test_utils.py +++ b/posthog/test/test_utils.py @@ -434,7 +434,7 @@ def test_should_not_refresh_with_refresh_gibberish(self): def test_refresh_requested_by_client_with_data_true(self): drf_request = Request(HttpRequest()) drf_request._full_data = {"refresh": True} # type: ignore - self.assertTrue(refresh_requested_by_client((drf_request))) + self.assertTrue(refresh_requested_by_client(drf_request)) def test_should_not_refresh_with_data_false(self): drf_request = Request(HttpRequest()) diff --git a/posthog/urls.py b/posthog/urls.py index b047f897307e4..3681f4a1ca4f2 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, List, Optional, cast +from typing import Any, Optional, cast +from collections.abc import Callable from posthog.models.instance_setting import get_instance_setting from urllib.parse import urlparse @@ -60,7 +61,7 @@ logger = structlog.get_logger(__name__) -ee_urlpatterns: List[Any] = [] +ee_urlpatterns: list[Any] = [] try: from ee.urls import extend_api_router from ee.urls import urlpatterns as ee_urlpatterns diff --git a/posthog/user_permissions.py b/posthog/user_permissions.py index 30a6bfca298b1..7b4d9b07728ca 100644 --- a/posthog/user_permissions.py +++ b/posthog/user_permissions.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from uuid import UUID from posthog.constants import AvailableFeature @@ -32,10 +32,10 @@ def __init__(self, user: User, team: Optional[Team] = None): self.user = user self._current_team = team - self._tiles: Optional[List[DashboardTile]] = None - self._team_permissions: Dict[int, UserTeamPermissions] = {} - self._dashboard_permissions: Dict[int, UserDashboardPermissions] = {} - self._insight_permissions: Dict[int, UserInsightPermissions] = {} + self._tiles: Optional[list[DashboardTile]] = None + self._team_permissions: dict[int, UserTeamPermissions] = {} + self._dashboard_permissions: dict[int, UserDashboardPermissions] = {} + self._insight_permissions: dict[int, UserInsightPermissions] = {} @cached_property def current_team(self) -> "UserTeamPermissions": @@ -68,7 +68,7 @@ def insight(self, insight: Insight) -> "UserInsightPermissions": return self._insight_permissions[insight.pk] @cached_property - def team_ids_visible_for_user(self) -> List[int]: + def team_ids_visible_for_user(self) -> list[int]: candidate_teams = Team.objects.filter(organization_id__in=self.organizations.keys()).only( "pk", "organization_id", "access_control" ) @@ -86,16 +86,16 @@ def get_organization(self, organization_id: UUID) -> Optional[Organization]: return self.organizations.get(organization_id) @cached_property - def organizations(self) -> Dict[UUID, Organization]: + def organizations(self) -> dict[UUID, Organization]: return {member.organization_id: member.organization for member in self.organization_memberships.values()} @cached_property - def organization_memberships(self) -> Dict[UUID, OrganizationMembership]: + def organization_memberships(self) -> dict[UUID, OrganizationMembership]: memberships = OrganizationMembership.objects.filter(user=self.user).select_related("organization") return {membership.organization_id: membership for membership in memberships} @cached_property - def explicit_team_memberships(self) -> Dict[int, Any]: + def explicit_team_memberships(self) -> dict[int, Any]: try: from ee.models import ExplicitTeamMembership except ImportError: @@ -107,7 +107,7 @@ def explicit_team_memberships(self) -> Dict[int, Any]: return {membership.team_id: membership.level for membership in memberships} @cached_property - def dashboard_privileges(self) -> Dict[int, Dashboard.PrivilegeLevel]: + def dashboard_privileges(self) -> dict[int, Dashboard.PrivilegeLevel]: try: from ee.models import DashboardPrivilege @@ -116,14 +116,14 @@ def dashboard_privileges(self) -> Dict[int, Dashboard.PrivilegeLevel]: except ImportError: return {} - def set_preloaded_dashboard_tiles(self, tiles: List[DashboardTile]): + def set_preloaded_dashboard_tiles(self, tiles: list[DashboardTile]): """ Allows for speeding up insight-related permissions code """ self._tiles = tiles @cached_property - def preloaded_insight_dashboards(self) -> Optional[List[Dashboard]]: + def preloaded_insight_dashboards(self) -> Optional[list[Dashboard]]: if self._tiles is None: return None diff --git a/posthog/utils.py b/posthog/utils.py index 19e110507ab9b..cdc0a4ed48fd2 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -19,15 +19,11 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - Generator, - List, - Mapping, Optional, - Tuple, Union, cast, ) +from collections.abc import Generator, Mapping from urllib.parse import urljoin, urlparse from zoneinfo import ZoneInfo @@ -46,6 +42,7 @@ from django.http import HttpRequest, HttpResponse from django.template.loader import get_template from django.utils import timezone +from django.utils.cache import patch_cache_control from rest_framework.request import Request from sentry_sdk import configure_scope from sentry_sdk.api import capture_exception @@ -124,7 +121,7 @@ def absolute_uri(url: Optional[str] = None) -> str: return urljoin(settings.SITE_URL.rstrip("/") + "/", url.lstrip("/")) -def get_previous_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.datetime, datetime.datetime]: +def get_previous_day(at: Optional[datetime.datetime] = None) -> tuple[datetime.datetime, datetime.datetime]: """ Returns a pair of datetimes, representing the start and end of the preceding day. `at` is the datetime to use as a reference point. @@ -148,7 +145,7 @@ def get_previous_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.d return (period_start, period_end) -def get_current_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.datetime, datetime.datetime]: +def get_current_day(at: Optional[datetime.datetime] = None) -> tuple[datetime.datetime, datetime.datetime]: """ Returns a pair of datetimes, representing the start and end of the current day. `at` is the datetime to use as a reference point. @@ -178,7 +175,7 @@ def relative_date_parse_with_delta_mapping( *, always_truncate: bool = False, now: Optional[datetime.datetime] = None, -) -> Tuple[datetime.datetime, Optional[Dict[str, int]], str | None]: +) -> tuple[datetime.datetime, Optional[dict[str, int]], str | None]: """Returns the parsed datetime, along with the period mapping - if the input was a relative datetime string.""" try: try: @@ -201,7 +198,7 @@ def relative_date_parse_with_delta_mapping( regex = r"\-?(?P[0-9]+)?(?P[a-z])(?PStart|End)?" match = re.search(regex, input) parsed_dt = (now or dt.datetime.now()).astimezone(timezone_info) - delta_mapping: Dict[str, int] = {} + delta_mapping: dict[str, int] = {} if not match: return parsed_dt, delta_mapping, None if match.group("type") == "h": @@ -275,7 +272,7 @@ def get_js_url(request: HttpRequest) -> str: def render_template( template_name: str, request: HttpRequest, - context: Optional[Dict] = None, + context: Optional[dict] = None, *, team_for_public_context: Optional["Team"] = None, ) -> HttpResponse: @@ -330,13 +327,13 @@ def render_template( except: year_in_hog_url = None - posthog_app_context: Dict[str, Any] = { + posthog_app_context: dict[str, Any] = { "persisted_feature_flags": settings.PERSISTED_FEATURE_FLAGS, "anonymous": not request.user or not request.user.is_authenticated, "year_in_hog_url": year_in_hog_url, } - posthog_bootstrap: Dict[str, Any] = {} + posthog_bootstrap: dict[str, Any] = {} posthog_distinct_id: Optional[str] = None # Set the frontend app context @@ -418,7 +415,10 @@ def render_template( context["posthog_js_uuid_version"] = settings.POSTHOG_JS_UUID_VERSION html = template.render(context, request=request) - return HttpResponse(html) + response = HttpResponse(html) + if not request.user.is_anonymous: + patch_cache_control(response, no_store=True) + return response def get_self_capture_api_token(request: Optional[HttpRequest]) -> Optional[str]: @@ -449,7 +449,7 @@ def get_default_event_name(team: "Team"): return "$pageview" -def get_frontend_apps(team_id: int) -> Dict[int, Dict[str, Any]]: +def get_frontend_apps(team_id: int) -> dict[int, dict[str, Any]]: from posthog.models import Plugin, PluginSourceFile plugin_configs = ( @@ -537,10 +537,10 @@ def convert_property_value(input: Union[str, bool, dict, list, int, Optional[str def get_compare_period_dates( date_from: datetime.datetime, date_to: datetime.datetime, - date_from_delta_mapping: Optional[Dict[str, int]], - date_to_delta_mapping: Optional[Dict[str, int]], + date_from_delta_mapping: Optional[dict[str, int]], + date_to_delta_mapping: Optional[dict[str, int]], interval: str, -) -> Tuple[datetime.datetime, datetime.datetime]: +) -> tuple[datetime.datetime, datetime.datetime]: diff = date_to - date_from new_date_from = date_from - diff if interval == "hour": @@ -779,7 +779,7 @@ def get_plugin_server_version() -> Optional[str]: return None -def get_plugin_server_job_queues() -> Optional[List[str]]: +def get_plugin_server_job_queues() -> Optional[list[str]]: cache_key_value = get_client().get("@posthog-plugin-server/enabled-job-queues") if cache_key_value: qs = cache_key_value.decode("utf-8").replace('"', "") @@ -857,13 +857,13 @@ def get_can_create_org(user: Union["AbstractBaseUser", "AnonymousUser"]) -> bool return False -def get_instance_available_sso_providers() -> Dict[str, bool]: +def get_instance_available_sso_providers() -> dict[str, bool]: """ Returns a dictionary containing final determination to which SSO providers are available. SAML is not included in this method as it can only be configured domain-based and not instance-based (see `OrganizationDomain` for details) Validates configuration settings and license validity (if applicable). """ - output: Dict[str, bool] = { + output: dict[str, bool] = { "github": bool(settings.SOCIAL_AUTH_GITHUB_KEY and settings.SOCIAL_AUTH_GITHUB_SECRET), "gitlab": bool(settings.SOCIAL_AUTH_GITLAB_KEY and settings.SOCIAL_AUTH_GITLAB_SECRET), "google-oauth2": False, @@ -893,7 +893,7 @@ def get_instance_available_sso_providers() -> Dict[str, bool]: return output -def flatten(i: Union[List, Tuple], max_depth=10) -> Generator: +def flatten(i: Union[list, tuple], max_depth=10) -> Generator: for el in i: if isinstance(el, list) and max_depth > 0: yield from flatten(el, max_depth=max_depth - 1) @@ -905,7 +905,7 @@ def get_daterange( start_date: Optional[datetime.datetime], end_date: Optional[datetime.datetime], frequency: str, -) -> List[Any]: +) -> list[Any]: """ Returns list of a fixed frequency Datetime objects between given bounds. @@ -977,7 +977,7 @@ class GenericEmails: """ def __init__(self): - with open(get_absolute_path("helpers/generic_emails.txt"), "r") as f: + with open(get_absolute_path("helpers/generic_emails.txt")) as f: self.emails = {x.rstrip(): True for x in f} def is_generic(self, email: str) -> bool: @@ -988,7 +988,7 @@ def is_generic(self, email: str) -> bool: @lru_cache(maxsize=1) -def get_available_timezones_with_offsets() -> Dict[str, float]: +def get_available_timezones_with_offsets() -> dict[str, float]: now = dt.datetime.now() result = {} for tz in pytz.common_timezones: @@ -1062,7 +1062,7 @@ def get_milliseconds_between_dates(d1: dt.datetime, d2: dt.datetime) -> int: return abs(int((d1 - d2).total_seconds() * 1000)) -def encode_get_request_params(data: Dict[str, Any]) -> Dict[str, str]: +def encode_get_request_params(data: dict[str, Any]) -> dict[str, str]: return { key: encode_value_as_param(value=value) for key, value in data.items() @@ -1079,7 +1079,7 @@ def default(self, o): def encode_value_as_param(value: Union[str, list, dict, datetime.datetime]) -> str: - if isinstance(value, (list, dict, tuple)): + if isinstance(value, list | dict | tuple): return json.dumps(value, cls=DataclassJSONEncoder) elif isinstance(value, Enum): return value.value @@ -1307,7 +1307,7 @@ def patch(wrapper): def label_for_team_id_to_track(team_id: int) -> str: - team_id_filter: List[str] = settings.DECIDE_TRACK_TEAM_IDS + team_id_filter: list[str] = settings.DECIDE_TRACK_TEAM_IDS team_id_as_string = str(team_id) diff --git a/posthog/version_requirement.py b/posthog/version_requirement.py index 0f60d553e762e..ad0979abc3b32 100644 --- a/posthog/version_requirement.py +++ b/posthog/version_requirement.py @@ -1,5 +1,3 @@ -from typing import Tuple - from semantic_version.base import SimpleSpec, Version from posthog import redis @@ -24,7 +22,7 @@ def __init__(self, service, supported_version): f"The provided supported_version for service {service} is invalid. See the Docs for SimpleSpec: https://pypi.org/project/semantic-version/" ) - def is_service_in_accepted_version(self) -> Tuple[bool, Version]: + def is_service_in_accepted_version(self) -> tuple[bool, Version]: service_version = self.get_service_version() return service_version in self.supported_version, service_version diff --git a/posthog/views.py b/posthog/views.py index b9cae80fde3d7..6797b3ab7f823 100644 --- a/posthog/views.py +++ b/posthog/views.py @@ -1,6 +1,6 @@ import os from functools import wraps -from typing import Dict, Union +from typing import Union import sentry_sdk from django.conf import settings @@ -70,7 +70,7 @@ def health(request): def stats(request): - stats_response: Dict[str, Union[int, str]] = {} + stats_response: dict[str, Union[int, str]] = {} stats_response["worker_heartbeat"] = get_celery_heartbeat() return JsonResponse(stats_response) diff --git a/posthog/warehouse/api/external_data_schema.py b/posthog/warehouse/api/external_data_schema.py index 41cd9bff2dbdc..c02f6c146f7c9 100644 --- a/posthog/warehouse/api/external_data_schema.py +++ b/posthog/warehouse/api/external_data_schema.py @@ -1,26 +1,42 @@ from rest_framework import serializers -from posthog.warehouse.models import ExternalDataSchema -from typing import Optional, Dict, Any +import structlog +import temporalio +from posthog.warehouse.models import ExternalDataSchema, ExternalDataJob +from typing import Optional, Any from posthog.api.routing import TeamAndOrgViewSetMixin -from rest_framework import viewsets, filters +from rest_framework import viewsets, filters, status +from rest_framework.decorators import action from rest_framework.exceptions import NotAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response from posthog.models import User from posthog.hogql.database.database import create_hogql_database + from posthog.warehouse.data_load.service import ( external_data_workflow_exists, + is_any_external_data_job_paused, sync_external_data_job_workflow, pause_external_data_schedule, + trigger_external_data_workflow, unpause_external_data_schedule, + cancel_external_data_workflow, + delete_data_import_folder, ) +logger = structlog.get_logger(__name__) + class ExternalDataSchemaSerializer(serializers.ModelSerializer): table = serializers.SerializerMethodField(read_only=True) + incremental = serializers.SerializerMethodField(read_only=True) class Meta: model = ExternalDataSchema - fields = ["id", "name", "table", "should_sync", "last_synced_at", "latest_error"] + fields = ["id", "name", "table", "should_sync", "last_synced_at", "latest_error", "incremental", "status"] + + def get_incremental(self, schema: ExternalDataSchema) -> bool: + return schema.is_incremental def get_table(self, schema: ExternalDataSchema) -> Optional[dict]: from posthog.warehouse.api.table import SimpleTableSerializer @@ -31,7 +47,7 @@ def get_table(self, schema: ExternalDataSchema) -> Optional[dict]: return SimpleTableSerializer(schema.table, context={"database": hogql_context}).data or None - def update(self, instance: ExternalDataSchema, validated_data: Dict[str, Any]) -> ExternalDataSchema: + def update(self, instance: ExternalDataSchema, validated_data: dict[str, Any]) -> ExternalDataSchema: should_sync = validated_data.get("should_sync", None) schedule_exists = external_data_workflow_exists(str(instance.id)) @@ -61,7 +77,7 @@ class ExternalDataSchemaViewset(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): search_fields = ["name"] ordering = "-created_at" - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() context["database"] = create_hogql_database(team_id=self.team_id) return context @@ -74,3 +90,66 @@ def get_queryset(self): return self.queryset.filter(team_id=self.team_id).prefetch_related("created_by").order_by(self.ordering) return self.queryset.filter(team_id=self.team_id).prefetch_related("created_by").order_by(self.ordering) + + @action(methods=["POST"], detail=True) + def reload(self, request: Request, *args: Any, **kwargs: Any): + instance: ExternalDataSchema = self.get_object() + + if is_any_external_data_job_paused(self.team_id): + return Response( + status=status.HTTP_400_BAD_REQUEST, + data={"message": "Monthly sync limit reached. Please contact PostHog support to increase your limit."}, + ) + + try: + trigger_external_data_workflow(instance) + except temporalio.service.RPCError as e: + logger.exception(f"Could not trigger external data job for schema {instance.id}", exc_info=e) + + except Exception as e: + logger.exception(f"Could not trigger external data job for schema {instance.id}", exc_info=e) + raise + + instance.status = ExternalDataSchema.Status.RUNNING + instance.save() + return Response(status=status.HTTP_200_OK) + + @action(methods=["POST"], detail=True) + def resync(self, request: Request, *args: Any, **kwargs: Any): + instance: ExternalDataSchema = self.get_object() + + if is_any_external_data_job_paused(self.team_id): + return Response( + status=status.HTTP_400_BAD_REQUEST, + data={"message": "Monthly sync limit reached. Please contact PostHog support to increase your limit."}, + ) + + latest_running_job = ( + ExternalDataJob.objects.filter(schema_id=instance.pk, team_id=instance.team_id) + .order_by("-created_at") + .first() + ) + + if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running": + cancel_external_data_workflow(latest_running_job.workflow_id) + + all_jobs = ExternalDataJob.objects.filter( + schema_id=instance.pk, team_id=instance.team_id, status="Completed" + ).all() + + # Unnecessary to iterate for incremental jobs since they'll all by identified by the schema_id. Be over eager just to clear remnants + for job in all_jobs: + try: + delete_data_import_folder(job.folder_path) + except Exception as e: + logger.exception(f"Could not clean up data import folder: {job.folder_path}", exc_info=e) + pass + + try: + trigger_external_data_workflow(instance) + except temporalio.service.RPCError as e: + logger.exception(f"Could not trigger external data job for schema {instance.id}", exc_info=e) + + instance.status = ExternalDataSchema.Status.RUNNING + instance.save() + return Response(status=status.HTTP_200_OK) diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 4d6eee2f08779..12d3f27c3270b 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -1,5 +1,5 @@ import uuid -from typing import Any, List, Tuple, Dict +from typing import Any import structlog from rest_framework import filters, serializers, status, viewsets @@ -71,7 +71,7 @@ def get_last_run_at(self, instance: ExternalDataSource) -> str: return latest_completed_run.created_at if latest_completed_run else None def get_status(self, instance: ExternalDataSource) -> str: - active_schemas: List[ExternalDataSchema] = list(instance.schemas.filter(should_sync=True).all()) + active_schemas: list[ExternalDataSchema] = list(instance.schemas.filter(should_sync=True).all()) any_failures = any(schema.status == ExternalDataSchema.Status.ERROR for schema in active_schemas) any_cancelled = any(schema.status == ExternalDataSchema.Status.CANCELLED for schema in active_schemas) any_paused = any(schema.status == ExternalDataSchema.Status.PAUSED for schema in active_schemas) @@ -122,7 +122,7 @@ class ExternalDataSourceViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): search_fields = ["source_id"] ordering = "-created_at" - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() context["database"] = create_hogql_database(team_id=self.team_id) return context @@ -193,7 +193,7 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: disabled_schemas = [schema for schema in default_schemas if schema not in enabled_schemas] - active_schemas: List[ExternalDataSchema] = [] + active_schemas: list[ExternalDataSchema] = [] for schema in enabled_schemas: active_schemas.append( @@ -289,7 +289,7 @@ def _handle_hubspot_source(self, request: Request, *args: Any, **kwargs: Any) -> def _handle_postgres_source( self, request: Request, *args: Any, **kwargs: Any - ) -> Tuple[ExternalDataSource, List[Any]]: + ) -> tuple[ExternalDataSource, list[Any]]: payload = request.data["payload"] prefix = request.data.get("prefix", None) source_type = request.data["source_type"] @@ -348,18 +348,14 @@ def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running": cancel_external_data_workflow(latest_running_job.workflow_id) - latest_completed_job = ( - ExternalDataJob.objects.filter(pipeline_id=instance.pk, team_id=instance.team_id, status="Completed") - .order_by("-created_at") - .first() - ) - if latest_completed_job: + all_jobs = ExternalDataJob.objects.filter( + pipeline_id=instance.pk, team_id=instance.team_id, status="Completed" + ).all() + for job in all_jobs: try: - delete_data_import_folder(latest_completed_job.folder_path) + delete_data_import_folder(job.folder_path) except Exception as e: - logger.exception( - f"Could not clean up data import folder: {latest_completed_job.folder_path}", exc_info=e - ) + logger.exception(f"Could not clean up data import folder: {job.folder_path}", exc_info=e) pass for schema in ExternalDataSchema.objects.filter( @@ -448,7 +444,7 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any): }, ) - result_mapped_to_options = [{"table": row, "should_sync": False} for row in result] + result_mapped_to_options = [{"table": row, "should_sync": True} for row in result] return Response(status=status.HTTP_200_OK, data=result_mapped_to_options) # Return the possible endpoints for all other source types @@ -459,7 +455,7 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any): data={"message": "Invalid parameter: source_type"}, ) - options = [{"table": row, "should_sync": False} for row in schemas] + options = [{"table": row, "should_sync": True} for row in schemas] return Response(status=status.HTTP_200_OK, data=options) @action(methods=["POST"], detail=False) diff --git a/posthog/warehouse/api/saved_query.py b/posthog/warehouse/api/saved_query.py index f341b5779d0b3..581593377f299 100644 --- a/posthog/warehouse/api/saved_query.py +++ b/posthog/warehouse/api/saved_query.py @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any from django.conf import settings from rest_framework import exceptions, filters, serializers, viewsets @@ -33,7 +33,7 @@ class Meta: ] read_only_fields = ["id", "created_by", "created_at", "columns"] - def get_columns(self, view: DataWarehouseSavedQuery) -> List[SerializedField]: + def get_columns(self, view: DataWarehouseSavedQuery) -> list[SerializedField]: team_id = self.context["team_id"] context = HogQLContext(team_id=team_id, database=create_hogql_database(team_id=team_id)) diff --git a/posthog/warehouse/api/table.py b/posthog/warehouse/api/table.py index fcfdd7eee8843..7e149b0faba19 100644 --- a/posthog/warehouse/api/table.py +++ b/posthog/warehouse/api/table.py @@ -1,4 +1,4 @@ -from typing import Any, List, Dict +from typing import Any from rest_framework import filters, request, response, serializers, status, viewsets from rest_framework.exceptions import NotAuthenticated @@ -53,7 +53,7 @@ class Meta: ] read_only_fields = ["id", "created_by", "created_at", "columns", "external_data_source", "external_schema"] - def get_columns(self, table: DataWarehouseTable) -> List[SerializedField]: + def get_columns(self, table: DataWarehouseTable) -> list[SerializedField]: hogql_context = self.context.get("database", None) if not hogql_context: hogql_context = create_hogql_database(team_id=self.context["team_id"]) @@ -91,7 +91,7 @@ class Meta: fields = ["id", "name", "columns", "row_count"] read_only_fields = ["id", "name", "columns", "row_count"] - def get_columns(self, table: DataWarehouseTable) -> List[SerializedField]: + def get_columns(self, table: DataWarehouseTable) -> list[SerializedField]: hogql_context = self.context.get("database", None) if not hogql_context: hogql_context = create_hogql_database(team_id=self.context["team_id"]) @@ -111,7 +111,7 @@ class TableViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): search_fields = ["name"] ordering = "-created_at" - def get_serializer_context(self) -> Dict[str, Any]: + def get_serializer_context(self) -> dict[str, Any]: context = super().get_serializer_context() context["database"] = create_hogql_database(team_id=self.team_id) return context diff --git a/posthog/warehouse/api/test/test_external_data_source.py b/posthog/warehouse/api/test/test_external_data_source.py index ceb9f3f62bea3..1c23807b82328 100644 --- a/posthog/warehouse/api/test/test_external_data_source.py +++ b/posthog/warehouse/api/test/test_external_data_source.py @@ -109,10 +109,12 @@ def test_get_external_data_source_with_schema(self): [ { "id": str(schema.pk), + "incremental": False, "last_synced_at": schema.last_synced_at, "name": schema.name, "should_sync": schema.should_sync, "latest_error": schema.latest_error, + "status": schema.status, "table": schema.table, } ], @@ -226,7 +228,7 @@ def test_internal_postgres(self, patch_get_postgres_schemas): }, ) self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), [{"should_sync": False, "table": "table_1"}]) + self.assertEqual(response.json(), [{"should_sync": True, "table": "table_1"}]) new_team = Team.objects.create(name="new_team", organization=self.team.organization) @@ -260,7 +262,7 @@ def test_internal_postgres(self, patch_get_postgres_schemas): }, ) self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), [{"should_sync": False, "table": "table_1"}]) + self.assertEqual(response.json(), [{"should_sync": True, "table": "table_1"}]) new_team = Team.objects.create(name="new_team", organization=self.team.organization) diff --git a/posthog/warehouse/data_load/validate_schema.py b/posthog/warehouse/data_load/validate_schema.py index dcfbb69595aa3..bda0f6ebf3368 100644 --- a/posthog/warehouse/data_load/validate_schema.py +++ b/posthog/warehouse/data_load/validate_schema.py @@ -23,17 +23,18 @@ get_table_by_schema_id, aget_schema_by_id, ) + +from posthog.temporal.data_imports.pipelines.schemas import PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING from posthog.warehouse.models.external_data_job import ExternalDataJob from posthog.temporal.common.logger import bind_temporal_worker_logger from clickhouse_driver.errors import ServerException from asgiref.sync import sync_to_async -from typing import Dict, Type from posthog.utils import camel_to_snake_case from posthog.warehouse.models.external_data_schema import ExternalDataSchema def dlt_to_hogql_type(dlt_type: TDataType | None) -> str: - hogql_type: Type[DatabaseField] = DatabaseField + hogql_type: type[DatabaseField] = DatabaseField if dlt_type is None: hogql_type = StringDatabaseField @@ -67,7 +68,7 @@ def dlt_to_hogql_type(dlt_type: TDataType | None) -> str: async def validate_schema( credential: DataWarehouseCredential, table_name: str, new_url_pattern: str, team_id: int, row_count: int -) -> Dict: +) -> dict: params = { "credential": credential, "name": table_name, @@ -95,7 +96,7 @@ async def validate_schema_and_update_table( team_id: int, schema_id: uuid.UUID, table_schema: TSchemaTables, - table_row_counts: Dict[str, int], + table_row_counts: dict[str, int], ) -> None: """ @@ -125,9 +126,11 @@ async def validate_schema_and_update_table( _schema_id = external_data_schema.id _schema_name: str = external_data_schema.name + incremental = _schema_name in PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING[job.pipeline.source_type] table_name = f"{job.pipeline.prefix or ''}{job.pipeline.source_type}_{_schema_name}".lower() new_url_pattern = job.url_pattern_by_schema(camel_to_snake_case(_schema_name)) + row_count = table_row_counts.get(_schema_name.lower(), 0) # Check @@ -141,18 +144,14 @@ async def validate_schema_and_update_table( ) # create or update - table_created = None - if last_successful_job: - try: - table_created = await get_table_by_schema_id(_schema_id, team_id) - if not table_created: - raise DataWarehouseTable.DoesNotExist - except Exception: - table_created = None + table_created: DataWarehouseTable | None = await get_table_by_schema_id(_schema_id, team_id) + if table_created: + table_created.url_pattern = new_url_pattern + if incremental: + table_created.row_count = await sync_to_async(table_created.get_count)() else: - table_created.url_pattern = new_url_pattern table_created.row_count = row_count - await asave_datawarehousetable(table_created) + await asave_datawarehousetable(table_created) if not table_created: table_created = await acreate_datawarehousetable(external_data_source_id=job.pipeline.id, **data) @@ -160,7 +159,7 @@ async def validate_schema_and_update_table( for schema in table_schema.values(): if schema.get("resource") == _schema_name: schema_columns = schema.get("columns") or {} - db_columns: Dict[str, str] = await sync_to_async(table_created.get_columns)() + db_columns: dict[str, str] = await sync_to_async(table_created.get_columns)() columns = {} for column_name, db_column_type in db_columns.items(): @@ -201,7 +200,10 @@ async def validate_schema_and_update_table( exc_info=e, ) - if last_successful_job: + if ( + last_successful_job + and _schema_name not in PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING[job.pipeline.source_type] + ): try: last_successful_job.delete_data_in_bucket() except Exception as e: diff --git a/posthog/warehouse/external_data_source/source.py b/posthog/warehouse/external_data_source/source.py index f722bae1f33b4..99e49a39a1df0 100644 --- a/posthog/warehouse/external_data_source/source.py +++ b/posthog/warehouse/external_data_source/source.py @@ -1,5 +1,5 @@ import datetime as dt -from typing import Dict, Optional +from typing import Optional from pydantic import BaseModel, field_validator @@ -71,7 +71,7 @@ def create_stripe_source(payload: StripeSourcePayload, workspace_id: str) -> Ext return _create_source(payload) -def _create_source(payload: Dict) -> ExternalDataSource: +def _create_source(payload: dict) -> ExternalDataSource: response = send_request(AIRBYTE_SOURCE_URL, method="POST", payload=payload) return ExternalDataSource( source_id=response["sourceId"], diff --git a/posthog/warehouse/models/datawarehouse_saved_query.py b/posthog/warehouse/models/datawarehouse_saved_query.py index ffa890ba45b8a..0513cc3b7d1c2 100644 --- a/posthog/warehouse/models/datawarehouse_saved_query.py +++ b/posthog/warehouse/models/datawarehouse_saved_query.py @@ -1,5 +1,4 @@ import re -from typing import Dict from sentry_sdk import capture_exception from django.core.exceptions import ValidationError from django.db import models @@ -47,7 +46,7 @@ class Meta: ) ] - def get_columns(self) -> Dict[str, str]: + def get_columns(self) -> dict[str, str]: from posthog.api.services.query import process_query # TODO: catch and raise error diff --git a/posthog/warehouse/models/external_data_job.py b/posthog/warehouse/models/external_data_job.py index b095f8667d934..161780a63639e 100644 --- a/posthog/warehouse/models/external_data_job.py +++ b/posthog/warehouse/models/external_data_job.py @@ -1,4 +1,5 @@ from django.db import models +from django.db.models import Prefetch from django.conf import settings from posthog.models.team import Team from posthog.models.utils import CreatedMetaFields, UUIDModel, sane_repr @@ -31,6 +32,9 @@ class Status(models.TextChoices): @property def folder_path(self) -> str: + if self.schema and self.schema.is_incremental: + return f"team_{self.team_id}_{self.pipeline.source_type}_{str(self.schema.pk)}".lower().replace("-", "_") + return f"team_{self.team_id}_{self.pipeline.source_type}_{str(self.pk)}".lower().replace("-", "_") def url_pattern_by_schema(self, schema: str) -> str: @@ -43,7 +47,11 @@ def delete_data_in_bucket(self) -> None: @database_sync_to_async def get_external_data_job(job_id: UUID) -> ExternalDataJob: - return ExternalDataJob.objects.prefetch_related("pipeline").get(pk=job_id) + from posthog.warehouse.models import ExternalDataSchema + + return ExternalDataJob.objects.prefetch_related( + "pipeline", Prefetch("schema", queryset=ExternalDataSchema.objects.prefetch_related("source")) + ).get(pk=job_id) @database_sync_to_async diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index add9350230593..d42ac79e10e1c 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -1,10 +1,10 @@ -from typing import Any, List +from typing import Any from django.db import models from posthog.models.team import Team from posthog.models.utils import CreatedMetaFields, UUIDModel, sane_repr import uuid -import psycopg +import psycopg2 from django.conf import settings from posthog.warehouse.util import database_sync_to_async @@ -34,6 +34,12 @@ class Status(models.TextChoices): __repr__ = sane_repr("name") + @property + def is_incremental(self): + from posthog.temporal.data_imports.pipelines.schemas import PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING + + return self.name in PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING[self.source.source_type] + @database_sync_to_async def asave_external_data_schema(schema: ExternalDataSchema) -> None: @@ -52,7 +58,7 @@ def aget_schema_if_exists(schema_name: str, team_id: int, source_id: uuid.UUID) @database_sync_to_async def aget_schema_by_id(schema_id: str, team_id: int) -> ExternalDataSchema | None: - return ExternalDataSchema.objects.get(id=schema_id, team_id=team_id) + return ExternalDataSchema.objects.prefetch_related("source").get(id=schema_id, team_id=team_id) @database_sync_to_async @@ -74,8 +80,8 @@ def sync_old_schemas_with_new_schemas(new_schemas: list, source_id: uuid.UUID, t ExternalDataSchema.objects.create(name=schema, team_id=team_id, source_id=source_id, should_sync=False) -def get_postgres_schemas(host: str, port: str, database: str, user: str, password: str, schema: str) -> List[Any]: - connection = psycopg.Connection.connect( +def get_postgres_schemas(host: str, port: str, database: str, user: str, password: str, schema: str) -> list[Any]: + connection = psycopg2.connect( host=host, port=int(port), dbname=database, diff --git a/posthog/warehouse/models/external_table_definitions.py b/posthog/warehouse/models/external_table_definitions.py index 405ffa150e6ae..6a684d96eca60 100644 --- a/posthog/warehouse/models/external_table_definitions.py +++ b/posthog/warehouse/models/external_table_definitions.py @@ -1,4 +1,3 @@ -from typing import Dict from posthog.hogql import ast from posthog.hogql.database.models import ( BooleanDatabaseField, @@ -10,7 +9,7 @@ ) -external_tables: Dict[str, Dict[str, FieldOrTable]] = { +external_tables: dict[str, dict[str, FieldOrTable]] = { "*": { "__dlt_id": StringDatabaseField(name="_dlt_id", hidden=True), "__dlt_load_id": StringDatabaseField(name="_dlt_load_id", hidden=True), diff --git a/posthog/warehouse/models/join.py b/posthog/warehouse/models/join.py index 5a3e46658fdbb..d3edfb864c434 100644 --- a/posthog/warehouse/models/join.py +++ b/posthog/warehouse/models/join.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from warnings import warn from django.db import models @@ -45,7 +45,7 @@ def join_function(self): def _join_function( from_table: str, to_table: str, - requested_fields: Dict[str, Any], + requested_fields: dict[str, Any], context: HogQLContext, node: SelectQuery, ): diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index f0d2c67c046d4..229c81168a8d3 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional +from typing import Optional from django.db import models from posthog.client import sync_execute @@ -111,7 +111,7 @@ def table_name_without_prefix(self) -> str: prefix = "" return self.name[len(prefix) :] - def get_columns(self, safe_expose_ch_error=True) -> Dict[str, str]: + def get_columns(self, safe_expose_ch_error=True) -> dict[str, str]: try: result = sync_execute( """DESCRIBE TABLE ( @@ -135,11 +135,32 @@ def get_columns(self, safe_expose_ch_error=True) -> Dict[str, str]: return {item[0]: item[1] for item in result} + def get_count(self, safe_expose_ch_error=True) -> int: + try: + result = sync_execute( + """SELECT count() FROM + s3(%(url_pattern)s, %(access_key)s, %(access_secret)s, %(format)s)""", + { + "url_pattern": self.url_pattern, + "access_key": self.credential.access_key, + "access_secret": self.credential.access_secret, + "format": self.format, + }, + ) + except Exception as err: + capture_exception(err) + if safe_expose_ch_error: + self._safe_expose_ch_error(err) + else: + raise err + + return result[0][0] + def hogql_definition(self) -> S3Table: if not self.columns: raise Exception("Columns must be fetched and saved to use in HogQL.") - fields: Dict[str, FieldOrTable] = {} + fields: dict[str, FieldOrTable] = {} structure = [] for column, type in self.columns.items(): # Support for 'old' style columns diff --git a/posthog/year_in_posthog/calculate_2023.py b/posthog/year_in_posthog/calculate_2023.py index 29477cfd15007..03428d2711d30 100644 --- a/posthog/year_in_posthog/calculate_2023.py +++ b/posthog/year_in_posthog/calculate_2023.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Dict, Optional +from typing import Optional from django.conf import settings from django.db import connection @@ -147,7 +147,7 @@ def dictfetchall(cursor): @cache_for(timedelta(seconds=0 if settings.DEBUG else 30)) -def calculate_year_in_posthog_2023(user_uuid: str) -> Optional[Dict]: +def calculate_year_in_posthog_2023(user_uuid: str) -> Optional[dict]: with connection.cursor() as cursor: cursor.execute(query, {"user_uuid": user_uuid}) rows = dictfetchall(cursor) diff --git a/posthog/year_in_posthog/year_in_posthog.py b/posthog/year_in_posthog/year_in_posthog.py index 3bf05d821c27e..a6ac65fa2fdaa 100644 --- a/posthog/year_in_posthog/year_in_posthog.py +++ b/posthog/year_in_posthog/year_in_posthog.py @@ -2,7 +2,7 @@ from django.template.loader import get_template from django.views.decorators.cache import cache_control import os -from typing import Dict, List, Union +from typing import Union import structlog @@ -58,7 +58,7 @@ } -def stats_for_user(data: Dict) -> List[Dict[str, Union[int, str]]]: +def stats_for_user(data: dict) -> list[dict[str, Union[int, str]]]: stats = data["stats"] return [ @@ -75,7 +75,7 @@ def stats_for_user(data: Dict) -> List[Dict[str, Union[int, str]]]: ] -def sort_list_based_on_preference(badges: List[str]) -> str: +def sort_list_based_on_preference(badges: list[str]) -> str: """sort a list based on its order in badge_preferences and then choose the last one""" if len(badges) >= 3: return "champion" diff --git a/production.Dockerfile b/production.Dockerfile index 0d4c8880a4556..4802bb7e51da7 100644 --- a/production.Dockerfile +++ b/production.Dockerfile @@ -131,7 +131,7 @@ RUN apt-get update && \ && \ rm -rf /var/lib/apt/lists/* && \ mkdir share && \ - ( curl -s -L "https://mmdbcdn.posthog.net/" | brotli --decompress --output=./share/GeoLite2-City.mmdb ) && \ + ( curl -s -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) && \ chmod -R 755 ./share/GeoLite2-City.mmdb diff --git a/pyproject.toml b/pyproject.toml index 2701b5a74d699..cb19ccadb8178 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,6 @@ +[project] +requires-python = ">=3.10" + [tool.black] line-length = 120 target-version = ['py310'] @@ -28,6 +31,8 @@ ignore = [ "F403", "F541", "F601", + "UP007", + "UP032", ] select = [ "B", @@ -40,6 +45,7 @@ select = [ "RUF015", "RUF019", "T2", + "UP", "W", ] diff --git a/requirements-dev.in b/requirements-dev.in index 44bd424b1bcc4..b27d179b06e98 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -1,8 +1,8 @@ # In order to add, delete or modify a dependency, please update # the reference here and then run: # -# - `pip-compile --rebuild requirements.in` -# - `pip-compile --rebuild requirements-dev.in` +# - `uv pip compile requirements.in -o requirements.txt` +# - `uv pip compile requirements-dev.in -o requirements-dev.txt` # # Make sure we use production deps for constraining installed dev packages. This # is important as otherwise we could be running tests with different versions @@ -12,7 +12,6 @@ -c requirements.txt ruff~=0.3.7 -pip-tools==7.3.0 mypy~=1.8.0 mypy-baseline~=0.6.1 mypy-extensions==1.0.0 @@ -23,7 +22,8 @@ Faker==17.5.0 fakeredis[lua]==2.11.0 freezegun==1.2.2 packaging==23.1 -black~=22.8.0 +black~=23.9.1 +boto3-stubs[s3] types-markdown==3.3.9 types-PyYAML==6.0.1 types-freezegun==1.1.10 @@ -48,3 +48,5 @@ responses==0.23.1 syrupy~=4.6.0 flaky==3.7.0 aioresponses==0.7.6 +prance==23.06.21.0 +openapi-spec-validator==0.7.1 # Needed for prance as a validation backend diff --git a/requirements-dev.txt b/requirements-dev.txt index 5b8893ea256bb..ddca38248e65e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,107 +1,70 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile requirements-dev.in -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements-dev.in -o requirements-dev.txt aiohttp==3.9.3 - # via - # -c requirements.txt - # aioresponses + # via aioresponses aioresponses==0.7.6 - # via -r requirements-dev.in aiosignal==1.2.0 - # via - # -c requirements.txt - # aiohttp + # via aiohttp annotated-types==0.5.0 - # via - # -c requirements.txt - # pydantic + # via pydantic argcomplete==2.0.0 # via datamodel-code-generator asgiref==3.7.2 - # via - # -c requirements.txt - # django + # via django async-timeout==4.0.2 # via - # -c requirements.txt # aiohttp # redis attrs==23.2.0 # via - # -c requirements.txt # aiohttp -black==22.8.0 - # via - # -r requirements-dev.in - # datamodel-code-generator -build==0.10.0 - # via pip-tools + # jsonschema + # referencing +black==23.9.1 + # via datamodel-code-generator +boto3-stubs==1.34.84 +botocore-stubs==1.34.84 + # via boto3-stubs certifi==2019.11.28 - # via - # -c requirements.txt - # requests + # via requests +chardet==5.2.0 + # via prance charset-normalizer==2.1.0 - # via - # -c requirements.txt - # requests + # via requests click==8.1.7 - # via - # -c requirements.txt - # black - # pip-tools + # via black colorama==0.4.4 # via pytest-watch coreapi==2.3.3 # via djangorestframework-stubs coreschema==0.0.4 # via coreapi -coverage[toml]==5.5 - # via - # coverage - # pytest-cov +coverage==5.5 + # via pytest-cov datamodel-code-generator==0.25.2 - # via -r requirements-dev.in django==4.2.11 # via - # -c requirements.txt # django-stubs # django-stubs-ext django-stubs==1.8.0 - # via - # -r requirements-dev.in - # djangorestframework-stubs + # via djangorestframework-stubs django-stubs-ext==0.7.0 # via django-stubs djangorestframework-stubs==1.4.0 - # via -r requirements-dev.in dnspython==2.2.1 - # via - # -c requirements.txt - # email-validator + # via email-validator docopt==0.6.2 # via pytest-watch email-validator==2.0.0.post2 # via pydantic -exceptiongroup==1.2.0 - # via - # -c requirements.txt - # pytest +exceptiongroup==1.2.1 + # via pytest faker==17.5.0 - # via -r requirements-dev.in -fakeredis[lua]==2.11.0 - # via - # -r requirements-dev.in - # fakeredis +fakeredis==2.11.0 flaky==3.7.0 - # via -r requirements-dev.in freezegun==1.2.2 - # via -r requirements-dev.in frozenlist==1.3.0 # via - # -c requirements.txt # aiohttp # aiosignal genson==1.2.2 @@ -110,7 +73,6 @@ icdiff==2.0.5 # via pytest-icdiff idna==2.8 # via - # -c requirements.txt # email-validator # requests # yarl @@ -126,62 +88,64 @@ jinja2==3.1.3 # via # coreschema # datamodel-code-generator +jsonschema==4.20.0 + # via + # openapi-schema-validator + # openapi-spec-validator +jsonschema-path==0.3.2 + # via openapi-spec-validator +jsonschema-specifications==2023.12.1 + # via + # jsonschema + # openapi-schema-validator +lazy-object-proxy==1.10.0 + # via openapi-spec-validator lupa==1.14.1 # via fakeredis markupsafe==2.1.5 # via jinja2 multidict==6.0.2 # via - # -c requirements.txt # aiohttp # yarl mypy==1.8.0 # via - # -r requirements-dev.in # django-stubs # djangorestframework-stubs mypy-baseline==0.6.1 - # via -r requirements-dev.in +mypy-boto3-s3==1.34.65 + # via boto3-stubs mypy-extensions==1.0.0 # via - # -r requirements-dev.in # black # mypy +openapi-schema-validator==0.6.2 + # via openapi-spec-validator +openapi-spec-validator==0.7.1 packaging==23.1 # via - # -c requirements.txt - # -r requirements-dev.in - # build + # black # datamodel-code-generator + # prance # pytest parameterized==0.9.0 - # via -r requirements-dev.in -pathspec==0.9.0 +pathable==0.4.3 + # via jsonschema-path +pathspec==0.12.1 # via black -pip-tools==7.3.0 - # via -r requirements-dev.in platformdirs==3.11.0 - # via - # -c requirements.txt - # black + # via black pluggy==0.13.1 # via pytest pprintpp==0.4.0 # via pytest-icdiff -pydantic[email]==2.5.3 - # via - # -c requirements.txt - # datamodel-code-generator - # pydantic +prance==23.6.21.0 +pydantic==2.5.3 + # via datamodel-code-generator pydantic-core==2.14.6 - # via - # -c requirements.txt - # pydantic -pyproject-hooks==1.0.0 - # via build + # via pydantic pytest==7.4.4 # via - # -r requirements-dev.in # pytest-asyncio # pytest-cov # pytest-django @@ -192,124 +156,101 @@ pytest==7.4.4 # pytest-watch # syrupy pytest-asyncio==0.21.1 - # via -r requirements-dev.in pytest-cov==4.1.0 - # via -r requirements-dev.in pytest-django==4.5.2 - # via -r requirements-dev.in pytest-env==0.8.2 - # via -r requirements-dev.in pytest-icdiff==0.6 - # via -r requirements-dev.in pytest-mock==3.11.1 - # via -r requirements-dev.in pytest-split==0.8.1 - # via -r requirements-dev.in pytest-watch==4.2.0 - # via -r requirements-dev.in python-dateutil==2.8.2 # via - # -c requirements.txt - # -r requirements-dev.in # faker # freezegun pyyaml==6.0.1 # via - # -c requirements.txt # datamodel-code-generator + # jsonschema-path # responses redis==4.5.4 + # via fakeredis +referencing==0.31.1 # via - # -c requirements.txt - # fakeredis + # jsonschema + # jsonschema-path + # jsonschema-specifications requests==2.31.0 # via - # -c requirements.txt # coreapi # djangorestframework-stubs + # jsonschema-path + # prance # responses responses==0.23.1 - # via -r requirements-dev.in +rfc3339-validator==0.1.4 + # via openapi-schema-validator +rpds-py==0.16.2 + # via + # jsonschema + # referencing +ruamel-yaml==0.18.6 + # via prance +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml ruff==0.3.7 - # via -r requirements-dev.in six==1.16.0 # via - # -c requirements.txt + # prance # python-dateutil + # rfc3339-validator sortedcontainers==2.4.0 - # via - # -c requirements.txt - # fakeredis + # via fakeredis sqlparse==0.4.4 - # via - # -c requirements.txt - # django + # via django syrupy==4.6.0 - # via -r requirements-dev.in toml==0.10.1 # via # coverage # datamodel-code-generator -tomli==1.2.3 +tomli==2.0.1 # via # black - # build # mypy - # pip-tools - # pyproject-hooks # pytest +types-awscrt==0.20.9 + # via botocore-stubs types-freezegun==1.1.10 - # via -r requirements-dev.in types-markdown==3.3.9 - # via -r requirements-dev.in types-python-dateutil==2.8.3 - # via -r requirements-dev.in types-pytz==2023.3.0.0 - # via - # -r requirements-dev.in - # types-tzlocal + # via types-tzlocal types-pyyaml==6.0.1 - # via - # -r requirements-dev.in - # responses + # via responses types-redis==4.3.20 - # via -r requirements-dev.in types-requests==2.26.1 - # via -r requirements-dev.in types-retry==0.9.9.4 - # via -r requirements-dev.in +types-s3transfer==0.10.1 + # via boto3-stubs types-tzlocal==5.1.0.1 - # via -r requirements-dev.in typing-extensions==4.7.1 # via - # -c requirements.txt # asgiref + # black + # boto3-stubs # django-stubs # django-stubs-ext # djangorestframework-stubs # mypy + # mypy-boto3-s3 # pydantic # pydantic-core uritemplate==4.1.1 - # via - # -c requirements.txt - # coreapi + # via coreapi urllib3==1.26.18 # via - # -c requirements.txt # requests # responses watchdog==2.1.8 # via pytest-watch -wheel==0.42.0 - # via - # -c requirements.txt - # pip-tools yarl==1.7.2 - # via - # -c requirements.txt - # aiohttp - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools + # via aiohttp diff --git a/requirements.in b/requirements.in index 68660085de0d1..d15b56d2d6520 100644 --- a/requirements.in +++ b/requirements.in @@ -1,23 +1,20 @@ # In order to add, delete or modify a dependency, please update # the reference here and then run: # -# - `pip-compile --rebuild requirements.in` -# - `pip-compile --rebuild requirements-dev.in` +# - `uv pip compile requirements.in -o requirements.txt` +# - `uv pip compile requirements-dev.in -o requirements-dev.txt` # aiohttp>=3.9.0 aioboto3==12.0.0 aiokafka>=0.8 antlr4-python3-runtime==4.13.1 -amqp==5.1.1 boto3==1.28.16 -boto3-stubs[s3] brotli==1.1.0 celery==5.3.4 celery-redbeat==2.1.1 -clickhouse-driver==0.2.4 +clickhouse-driver==0.2.6 clickhouse-pool==0.5.3 cryptography==37.0.2 -defusedxml==0.6.0 dj-database-url==0.5.0 Django~=4.2.11 django-axes==5.9.0 @@ -36,36 +33,27 @@ djangorestframework==3.14.0 djangorestframework-csv==2.1.1 djangorestframework-dataclasses==1.2.0 django-fernet-encrypted-fields==0.1.3 -dlt==0.4.7 +dlt==0.4.9a2 dnspython==2.2.1 drf-exceptions-hog==0.4.0 drf-extensions==0.7.0 drf-spectacular==0.27.1 -gevent==23.9.1 geoip2==4.6.0 google-cloud-bigquery==3.11.4 gunicorn==20.1.0 -idna==2.8 -importlib-metadata==6.8.0 infi-clickhouse-orm@ git+https://github.com/PostHog/infi.clickhouse_orm@9578c79f29635ee2c1d01b7979e89adab8383de2 kafka-python==2.0.2 -kafka-helper==0.2 kombu==5.3.2 lzstring==1.0.4 natsort==8.4.0 numpy==1.23.3 -openapi-spec-validator==0.7.1 openpyxl==3.1.2 orjson==3.9.10 pandas==2.2.0 -parso==0.8.1 -pexpect==4.7.0 -pickleshare==0.7.5 Pillow==10.2.0 posthoganalytics==3.5.0 -prance==23.06.21.0 psycopg2-binary==2.9.7 -psycopg[binary]==3.1.13 +psycopg[binary]==3.1.18 pyarrow==15.0.0 pydantic==2.5.3 pyjwt==2.4.0 @@ -75,11 +63,10 @@ pytz==2023.3 redis==4.5.4 retry==0.9.2 requests~=2.31.0 -requests-oauthlib==1.3.0 s3fs==2023.10.0 stripe==7.4.0 selenium==4.1.5 -sentry-sdk==1.14.0 +sentry-sdk[clickhouse-driver,celery,openai,django]~=1.44.1 semantic_version==2.8.5 scikit-learn==1.4.0 slack_sdk==3.17.1 @@ -103,4 +90,3 @@ openai==1.10.0 tiktoken==0.6.0 nh3==0.2.14 hogql-parser==1.0.7 -urllib3[secure,socks]==1.26.18 diff --git a/requirements.txt b/requirements.txt index 78255c273aef0..0f8edaf5652b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,36 +1,25 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile requirements.in -# +# This file was autogenerated by uv via the following command: +# uv pip compile requirements.in -o requirements.txt aioboto3==12.0.0 - # via -r requirements.in -aiobotocore[boto3]==2.7.0 +aiobotocore==2.7.0 # via # aioboto3 - # aiobotocore # s3fs aiohttp==3.9.3 # via - # -r requirements.in # aiobotocore # geoip2 # s3fs aioitertools==0.11.0 # via aiobotocore aiokafka==0.8.1 - # via -r requirements.in aiosignal==1.2.0 # via aiohttp amqp==5.1.1 - # via - # -r requirements.in - # kombu + # via kombu annotated-types==0.5.0 # via pydantic antlr4-python3-runtime==4.13.1 - # via -r requirements.in anyio==4.2.0 # via # httpx @@ -61,29 +50,23 @@ backoff==2.2.1 # via posthoganalytics billiard==4.1.0 # via celery +black==23.9.1 + # via dlt boto3==1.28.16 - # via - # -r requirements.in - # aiobotocore -boto3-stubs[s3]==1.26.138 - # via -r requirements.in + # via aiobotocore botocore==1.31.64 # via # aiobotocore # boto3 # s3transfer -botocore-stubs==1.29.130 - # via boto3-stubs brotli==1.1.0 - # via -r requirements.in cachetools==5.3.1 # via google-auth celery==5.3.4 # via - # -r requirements.in # celery-redbeat + # sentry-sdk celery-redbeat==2.1.1 - # via -r requirements.in certifi==2019.11.28 # via # httpcore @@ -96,14 +79,13 @@ cffi==1.14.5 # via # cryptography # snowflake-connector-python -chardet==5.1.0 - # via prance charset-normalizer==2.1.0 # via # requests # snowflake-connector-python click==8.1.7 # via + # black # celery # click-didyoumean # click-plugins @@ -115,17 +97,14 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -clickhouse-driver==0.2.4 +clickhouse-driver==0.2.6 # via - # -r requirements.in # clickhouse-pool + # sentry-sdk clickhouse-pool==0.5.3 - # via -r requirements.in cryptography==37.0.2 # via - # -r requirements.in # django-fernet-encrypted-fields - # kafka-helper # pyopenssl # snowflake-connector-python # social-auth-core @@ -138,16 +117,13 @@ decorator==5.1.1 # via retry defusedxml==0.6.0 # via - # -r requirements.in # python3-openid # social-auth-core distro==1.9.0 # via openai dj-database-url==0.5.0 - # via -r requirements.in django==4.2.11 # via - # -r requirements.in # django-axes # django-cors-headers # django-deprecate-fields @@ -165,18 +141,13 @@ django==4.2.11 # djangorestframework # djangorestframework-dataclasses # drf-spectacular + # sentry-sdk django-axes==5.9.0 - # via -r requirements.in django-cors-headers==3.5.0 - # via -r requirements.in django-deprecate-fields==0.1.1 - # via -r requirements.in django-extensions==3.1.2 - # via -r requirements.in django-fernet-encrypted-fields==0.1.3 - # via -r requirements.in django-filter==2.4.0 - # via -r requirements.in django-formtools==2.4 # via django-two-factor-auth django-ipware==3.0.2 @@ -184,50 +155,34 @@ django-ipware==3.0.2 # django-axes # django-structlog django-loginas==0.3.11 - # via -r requirements.in django-otp==1.1.4 # via django-two-factor-auth django-phonenumber-field==6.4.0 # via django-two-factor-auth django-picklefield==3.0.1 - # via -r requirements.in django-prometheus==2.2.0 - # via -r requirements.in django-redis==5.2.0 - # via -r requirements.in django-revproxy==0.12.0 - # via -r requirements.in django-statsd==2.5.2 - # via -r requirements.in django-structlog==2.1.3 - # via -r requirements.in django-two-factor-auth==1.14.0 - # via -r requirements.in djangorestframework==3.14.0 # via - # -r requirements.in # djangorestframework-csv # djangorestframework-dataclasses # drf-exceptions-hog # drf-extensions # drf-spectacular djangorestframework-csv==2.1.1 - # via -r requirements.in djangorestframework-dataclasses==1.2.0 - # via -r requirements.in -dlt==0.4.7 - # via -r requirements.in +dlt==0.4.9a2 dnspython==2.2.1 - # via -r requirements.in drf-exceptions-hog==0.4.0 - # via -r requirements.in drf-extensions==0.7.0 - # via -r requirements.in drf-spectacular==0.27.1 - # via -r requirements.in et-xmlfile==1.1.0 # via openpyxl -exceptiongroup==1.2.0 +exceptiongroup==1.2.1 # via anyio filelock==3.12.0 # via snowflake-connector-python @@ -242,18 +197,14 @@ fsspec==2023.10.0 future==0.18.3 # via lzstring geoip2==4.6.0 - # via -r requirements.in -gevent==23.9.1 - # via -r requirements.in gitdb==4.0.11 # via gitpython gitpython==3.1.40 # via dlt giturlparse==0.12.0 # via dlt -google-api-core[grpc]==2.11.1 +google-api-core==2.11.1 # via - # google-api-core # google-cloud-bigquery # google-cloud-core google-auth==2.22.0 @@ -261,7 +212,6 @@ google-auth==2.22.0 # google-api-core # google-cloud-core google-cloud-bigquery==3.11.4 - # via -r requirements.in google-cloud-core==2.3.3 # via google-cloud-bigquery google-crc32c==1.5.0 @@ -272,8 +222,6 @@ googleapis-common-protos==1.60.0 # via # google-api-core # grpcio-status -greenlet==3.0.3 - # via gevent grpcio==1.57.0 # via # google-api-core @@ -282,7 +230,6 @@ grpcio==1.57.0 grpcio-status==1.57.0 # via google-api-core gunicorn==20.1.0 - # via -r requirements.in h11==0.13.0 # via # httpcore @@ -290,7 +237,6 @@ h11==0.13.0 hexbytes==1.0.0 # via dlt hogql-parser==1.0.7 - # via -r requirements.in httpcore==1.0.2 # via httpx httpx==0.26.0 @@ -299,7 +245,6 @@ humanize==4.9.0 # via dlt idna==2.8 # via - # -r requirements.in # anyio # httpx # requests @@ -307,10 +252,7 @@ idna==2.8 # trio # urllib3 # yarl -importlib-metadata==6.8.0 - # via -r requirements.in infi-clickhouse-orm @ git+https://github.com/PostHog/infi.clickhouse_orm@9578c79f29635ee2c1d01b7979e89adab8383de2 - # via -r requirements.in inflection==0.5.1 # via drf-spectacular iso8601==0.1.12 @@ -326,58 +268,37 @@ joblib==1.3.2 jsonpath-ng==1.6.0 # via dlt jsonschema==4.20.0 - # via - # drf-spectacular - # openapi-schema-validator - # openapi-spec-validator -jsonschema-path==0.3.2 - # via openapi-spec-validator + # via drf-spectacular jsonschema-specifications==2023.12.1 - # via - # jsonschema - # openapi-schema-validator -kafka-helper==0.2 - # via -r requirements.in + # via jsonschema kafka-python==2.0.2 - # via - # -r requirements.in - # aiokafka + # via aiokafka kombu==5.3.2 - # via - # -r requirements.in - # celery -lazy-object-proxy==1.10.0 - # via openapi-spec-validator + # via celery lxml==4.9.4 # via # python3-saml # toronado # xmlsec lzstring==1.0.4 - # via -r requirements.in makefun==1.15.2 # via dlt maxminddb==2.2.0 # via geoip2 mimesis==5.2.1 - # via -r requirements.in monotonic==1.5 # via posthoganalytics more-itertools==9.0.0 - # via -r requirements.in multidict==6.0.2 # via # aiohttp # yarl -mypy-boto3-s3==1.26.127 - # via boto3-stubs +mypy-extensions==1.0.0 + # via black natsort==8.4.0 - # via -r requirements.in nh3==0.2.14 - # via -r requirements.in numpy==1.23.3 # via - # -r requirements.in # pandas # pyarrow # scikit-learn @@ -387,53 +308,36 @@ oauthlib==3.1.0 # requests-oauthlib # social-auth-core openai==1.10.0 - # via -r requirements.in -openapi-schema-validator==0.6.2 - # via openapi-spec-validator -openapi-spec-validator==0.7.1 - # via -r requirements.in + # via sentry-sdk openpyxl==3.1.2 - # via -r requirements.in orjson==3.9.10 - # via - # -r requirements.in - # dlt + # via dlt outcome==1.1.0 # via trio packaging==23.1 # via # aiokafka + # black # dlt # google-cloud-bigquery - # prance # snowflake-connector-python # webdriver-manager pandas==2.2.0 - # via -r requirements.in -parso==0.8.1 - # via -r requirements.in -pathable==0.4.3 - # via jsonschema-path +pathspec==0.12.1 + # via black pathvalidate==3.2.0 # via dlt pendulum==2.1.2 # via dlt -pexpect==4.7.0 - # via -r requirements.in phonenumberslite==8.13.6 - # via -r requirements.in -pickleshare==0.7.5 - # via -r requirements.in pillow==10.2.0 - # via -r requirements.in platformdirs==3.11.0 - # via snowflake-connector-python + # via + # black + # snowflake-connector-python ply==3.11 # via jsonpath-ng posthoganalytics==3.5.0 - # via -r requirements.in -prance==23.6.21.0 - # via -r requirements.in prometheus-client==0.14.1 # via django-prometheus prompt-toolkit==3.0.39 @@ -448,20 +352,13 @@ protobuf==4.22.1 # grpcio-status # proto-plus # temporalio -psycopg[binary]==3.1.13 - # via - # -r requirements.in - # psycopg -psycopg-binary==3.1.13 +psycopg==3.1.18 +psycopg-binary==3.1.18 # via psycopg psycopg2-binary==2.9.7 - # via -r requirements.in -ptyprocess==0.6.0 - # via pexpect py==1.11.0 # via retry pyarrow==15.0.0 - # via -r requirements.in pyasn1==0.5.0 # via # pyasn1-modules @@ -471,14 +368,11 @@ pyasn1-modules==0.3.0 pycparser==2.20 # via cffi pydantic==2.5.3 - # via - # -r requirements.in - # openai + # via openai pydantic-core==2.14.6 # via pydantic pyjwt==2.4.0 # via - # -r requirements.in # snowflake-connector-python # social-auth-core pyopenssl==22.0.0 @@ -491,7 +385,6 @@ pysocks==1.7.1 # via urllib3 python-dateutil==2.8.2 # via - # -r requirements.in # botocore # celery # celery-redbeat @@ -507,10 +400,8 @@ python-statsd==2.1.0 python3-openid==3.1.0 # via social-auth-core python3-saml==1.12.0 - # via -r requirements.in pytz==2023.3 # via - # -r requirements.in # clickhouse-driver # djangorestframework # dlt @@ -524,32 +415,26 @@ pyyaml==6.0.1 # via # dlt # drf-spectacular - # jsonschema-path qrcode==7.4.2 # via django-two-factor-auth redis==4.5.4 # via - # -r requirements.in # celery-redbeat # django-redis referencing==0.31.1 # via # jsonschema - # jsonschema-path # jsonschema-specifications regex==2023.12.25 # via tiktoken requests==2.31.0 # via - # -r requirements.in # dlt # geoip2 # google-api-core # google-cloud-bigquery # infi-clickhouse-orm - # jsonschema-path # posthoganalytics - # prance # requests-oauthlib # snowflake-connector-python # social-auth-core @@ -557,41 +442,32 @@ requests==2.31.0 # tiktoken # webdriver-manager requests-oauthlib==1.3.0 - # via - # -r requirements.in - # social-auth-core + # via social-auth-core requirements-parser==0.5.0 # via dlt retry==0.9.2 - # via -r requirements.in -rfc3339-validator==0.1.4 - # via openapi-schema-validator rpds-py==0.16.2 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.21 - # via prance -ruamel-yaml-clib==0.2.7 - # via ruamel-yaml s3fs==2023.10.0 - # via -r requirements.in s3transfer==0.6.0 # via boto3 scikit-learn==1.4.0 - # via -r requirements.in scipy==1.12.0 # via scikit-learn selenium==4.1.5 - # via -r requirements.in semantic-version==2.8.5 - # via -r requirements.in semver==3.0.2 # via dlt -sentry-sdk==1.14.0 - # via -r requirements.in +sentry-sdk==1.44.1 +setuptools==69.5.1 + # via + # dlt + # gunicorn + # infi-clickhouse-orm simplejson==3.19.2 # via dlt six==1.16.0 @@ -601,11 +477,8 @@ six==1.16.0 # google-auth # isodate # posthoganalytics - # prance # python-dateutil - # rfc3339-validator slack-sdk==3.17.1 - # via -r requirements.in smmap==5.0.1 # via gitdb sniffio==1.2.0 @@ -615,33 +488,21 @@ sniffio==1.2.0 # openai # trio snowflake-connector-python==3.6.0 - # via -r requirements.in social-auth-app-django==5.0.0 - # via -r requirements.in social-auth-core==4.3.0 - # via - # -r requirements.in - # social-auth-app-django + # via social-auth-app-django sortedcontainers==2.4.0 # via # snowflake-connector-python # trio sqlalchemy==2.0.23 - # via -r requirements.in sqlparse==0.4.4 - # via - # -r requirements.in - # django + # via django statshog==1.0.6 - # via -r requirements.in stripe==7.4.0 - # via -r requirements.in structlog==23.2.0 - # via - # -r requirements.in - # django-structlog + # via django-structlog temporalio==1.4.0 - # via -r requirements.in tenacity==8.2.3 # via # celery-redbeat @@ -649,15 +510,15 @@ tenacity==8.2.3 threadpoolctl==3.3.0 # via scikit-learn tiktoken==0.6.0 - # via -r requirements.in + # via sentry-sdk token-bucket==0.3.0 - # via -r requirements.in +tomli==2.0.1 + # via black tomlkit==0.12.3 # via # dlt # snowflake-connector-python toronado==0.1.0 - # via -r requirements.in tqdm==4.64.1 # via openai trio==0.20.0 @@ -666,20 +527,15 @@ trio==0.20.0 # trio-websocket trio-websocket==0.9.2 # via selenium -types-awscrt==0.16.17 - # via - # botocore-stubs - # types-s3transfer types-protobuf==4.22.0.0 # via temporalio -types-s3transfer==0.6.1 - # via boto3-stubs types-setuptools==69.0.0.0 # via requirements-parser typing-extensions==4.7.1 # via # anyio # asgiref + # black # dlt # openai # psycopg @@ -701,9 +557,8 @@ unicodecsv==0.14.1 # via djangorestframework-csv uritemplate==4.1.1 # via drf-spectacular -urllib3[secure,socks]==1.26.18 +urllib3==1.26.18 # via - # -r requirements.in # botocore # django-revproxy # geoip2 @@ -711,7 +566,6 @@ urllib3[secure,socks]==1.26.18 # requests # selenium # sentry-sdk - # urllib3 urllib3-secure-extra==0.1.0 # via urllib3 vine==5.0.0 @@ -722,11 +576,9 @@ vine==5.0.0 wcwidth==0.2.6 # via prompt-toolkit webdriver-manager==4.0.1 - # via -r requirements.in wheel==0.42.0 # via astunparse whitenoise==6.5.0 - # via -r requirements.in wrapt==1.15.0 # via aiobotocore wsproto==1.1.0 @@ -735,12 +587,3 @@ xmlsec==1.3.13 # via python3-saml yarl==1.7.2 # via aiohttp -zipp==3.17.0 - # via importlib-metadata -zope-event==5.0 - # via gevent -zope-interface==6.1 - # via gevent - -# The following packages are considered to be unsafe in a requirements file: -# setuptools