diff --git a/.eslintrc.js b/.eslintrc.js index 8847173b0c79f..91fe2aed3b1e8 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -12,7 +12,7 @@ const globals = { } module.exports = { - ignorePatterns: ['node_modules', 'plugin-server'], + ignorePatterns: ['node_modules', 'plugin-server', 'cypress'], env, settings: { react: { diff --git a/.github/workflows/ci-frontend.yml b/.github/workflows/ci-frontend.yml index d290a0594bbf0..2ea70218bd608 100644 --- a/.github/workflows/ci-frontend.yml +++ b/.github/workflows/ci-frontend.yml @@ -2,10 +2,6 @@ name: Frontend CI on: pull_request: - # NOTE: by running on master, aside from highlight issues on master it also - # ensures we have e.g. node modules cached for master, which can then be - # used for branches. See https://github.com/actions/cache#cache-scopes for - # scope details. push: branches: - master @@ -15,28 +11,71 @@ concurrency: cancel-in-progress: true jobs: + # Job to decide if we should run frontend ci + # See https://github.com/dorny/paths-filter#conditional-execution for more details + # we skip each step individually, so they are still reported as success + # because many of them are required for CI checks to be green + changes: + runs-on: ubuntu-latest + timeout-minutes: 5 + name: Determine need to run frontend checks + outputs: + frontend: ${{ steps.filter.outputs.frontend }} + steps: + # For pull requests it's not necessary to check out the code, but we + # also want this to run on master, so we need to check out + - uses: actions/checkout@v3 + + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + frontend: + # Avoid running frontend tests for irrelevant changes + # NOTE: we are at risk of missing a dependency here. + - 'bin/**' + - 'frontend/**' + # Make sure we run if someone is explicitly change the workflow + - .github/workflows/ci-frontend.yml + # various JS config files + - .eslintrc.js + - .prettier* + - babel.config.js + - jest.*.ts + - tsconfig.json + - tsconfig.*.json + - webpack.config.js + - postcss.config.js + - stylelint* + frontend-code-quality: name: Code quality checks + needs: changes # kea typegen and typescript:check need some more oomph runs-on: ubuntu-latest steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - name: Install pnpm + if: needs.changes.outputs.frontend == 'true' uses: pnpm/action-setup@v2 with: version: 8.x.x - name: Set up Node.js - uses: buildjet/setup-node@v3 + if: needs.changes.outputs.frontend == 'true' + uses: actions/setup-node@v3 with: node-version: 18 - name: Get pnpm cache directory path + if: needs.changes.outputs.frontend == 'true' id: pnpm-cache-dir run: echo "PNPM_STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - uses: actions/cache@v3 + if: needs.changes.outputs.frontend == 'true' id: pnpm-cache with: path: ${{ steps.pnpm-cache-dir.outputs.PNPM_STORE_PATH }} @@ -44,24 +83,31 @@ jobs: restore-keys: ${{ runner.os }}-pnpm-cypress- - name: Install package.json dependencies with pnpm + if: needs.changes.outputs.frontend == 'true' run: pnpm install --frozen-lockfile - name: Check formatting with prettier + if: needs.changes.outputs.frontend == 'true' run: pnpm prettier:check - name: Lint with Stylelint + if: needs.changes.outputs.frontend == 'true' run: pnpm lint:css - name: Generate logic types and run typescript with strict + if: needs.changes.outputs.frontend == 'true' run: pnpm typegen:write && pnpm typescript:check - name: Lint with ESLint + if: needs.changes.outputs.frontend == 'true' run: pnpm lint:js - name: Check if "schema.json" is up to date + if: needs.changes.outputs.frontend == 'true' run: pnpm schema:build:json && git diff --exit-code - name: Check toolbar bundle size + if: needs.changes.outputs.frontend == 'true' uses: preactjs/compressed-size-action@v2 with: build-script: 'build' @@ -70,6 +116,7 @@ jobs: jest: runs-on: ubuntu-latest + needs: changes name: Jest test (${{ matrix.chunk }}) strategy: @@ -79,24 +126,29 @@ jobs: chunk: [1, 2, 3] steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - name: Install pnpm + if: needs.changes.outputs.frontend == 'true' uses: pnpm/action-setup@v2 with: version: 8.x.x - name: Set up Node.js - uses: buildjet/setup-node@v3 + if: needs.changes.outputs.frontend == 'true' + uses: actions/setup-node@v3 with: node-version: 18 cache: pnpm - name: Install package.json dependencies with pnpm + if: needs.changes.outputs.frontend == 'true' run: pnpm install --frozen-lockfile - name: Test with Jest # set maxWorkers or Jest only uses 1 CPU in GitHub Actions run: pnpm test:unit --maxWorkers=2 --shard=${{ matrix.chunk }}/3 + if: needs.changes.outputs.frontend == 'true' env: NODE_OPTIONS: --max-old-space-size=6144 diff --git a/.run/Dev.run.xml b/.run/Dev.run.xml new file mode 100644 index 0000000000000..8e0efc8b0e7b3 --- /dev/null +++ b/.run/Dev.run.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index a9ed14cbd01cc..4047d14a6106b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,3 @@ # Changelog -Updates to the PostHog project can be found on [https://posthog.com/changelog](our changelog). \ No newline at end of file +Updates to the PostHog project can be found on [our changelog](https://posthog.com/changelog). diff --git a/cypress/e2e/early-access-management.cy.ts b/cypress/e2e/early-access-management.cy.ts index 9a594d8d1c34c..8736a39ab945a 100644 --- a/cypress/e2e/early-access-management.cy.ts +++ b/cypress/e2e/early-access-management.cy.ts @@ -6,7 +6,7 @@ describe('Early Access Management', () => { it('Early access feature new and list', () => { // load an empty early access feature page cy.get('h1').should('contain', 'Early Access Management') - cy.title().should('equal', 'Early Access Management • PostHog') + cy.title().should('equal', 'Early access features • PostHog') cy.get('h2').should('contain', 'Create your first feature') cy.get('[data-attr="product-introduction-docs-link"]').should( 'contain', diff --git a/cypress/e2e/insights.cy.ts b/cypress/e2e/insights.cy.ts index 0e449825b2194..5157d21429ba9 100644 --- a/cypress/e2e/insights.cy.ts +++ b/cypress/e2e/insights.cy.ts @@ -24,7 +24,7 @@ describe('Insights', () => { cy.get('[data-attr=breadcrumb-0]').should('contain', 'Hogflix') cy.get('[data-attr=breadcrumb-1]').should('contain', 'Hogflix Demo App') - cy.get('[data-attr=breadcrumb-2]').should('have.text', 'Insights') + cy.get('[data-attr=breadcrumb-2]').should('have.text', 'Product analytics') cy.get('[data-attr=breadcrumb-3]').should('have.text', 'insight name') }) diff --git a/docker/clickhouse/config.xml b/docker/clickhouse/config.xml index f3f858be7d117..7047c93e5c5d8 100644 --- a/docker/clickhouse/config.xml +++ b/docker/clickhouse/config.xml @@ -20,17 +20,20 @@ - trace - test (not for production usage) - [1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 + [1]: + https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 --> trace /var/log/clickhouse-server/clickhouse-server.log /var/log/clickhouse-server/clickhouse-server.err.log 1000M 10 - + - + @@ -217,7 +225,8 @@ /path/to/ssl_ca_cert_file - none @@ -232,10 +241,12 @@ false - + - + /etc/clickhouse-server/server.crt /etc/clickhouse-server/server.key + true true sslv2,sslv3 @@ -264,24 +276,30 @@ - + 100 0 @@ -302,21 +320,25 @@ --> 0.9 - 4194304 - 0 - @@ -341,14 +363,18 @@ - - - + true @@ -644,14 +698,16 @@ - + localhost 9000 - + @@ -666,22 +722,28 @@ Example: "yandex.ru", "yandex.ru." and "www.yandex.ru" are different hosts. If port is explicitly specified in URL, the host:port is checked as a whole. If host specified here without port, any port with this host allowed. - "yandex.ru" -> "yandex.ru:443", "yandex.ru:80" etc. is allowed, but "yandex.ru:80" -> only "yandex.ru:80" is allowed. - If the host is specified as IP address, it is checked as specified in URL. Example: "[2a02:6b8:a::a]". - If there are redirects and support for redirects is enabled, every redirect (the Location field) is checked. + "yandex.ru" -> "yandex.ru:443", "yandex.ru:80" etc. is allowed, but "yandex.ru:80" -> only + "yandex.ru:80" is allowed. + If the host is specified as IP address, it is checked as specified in URL. Example: + "[2a02:6b8:a::a]". + If there are redirects and support for redirects is enabled, every redirect (the Location field) is + checked. Host should be specified using the host xml tag: yandex.ru --> .* - @@ -701,7 +763,8 @@ @@ -710,7 +773,6 @@ - 3600 @@ -788,7 +850,8 @@ system query_log
toYYYYMM(event_date) - @@ -843,7 +909,8 @@ + Part log contains information about all actions with parts in MergeTree tables (creation, deletion, + merges, downloads).--> system part_log
@@ -852,8 +919,10 @@
- + system metric_log
@@ -933,7 +1003,8 @@ --> - + @@ -965,12 +1036,14 @@ --> - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + @@ -1032,7 +1107,8 @@ - + /var/lib/clickhouse/format_schemas/ - false - + false - + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 @@ -1183,4 +1267,4 @@ --> - + \ No newline at end of file diff --git a/docker/clickhouse/users-dev.xml b/docker/clickhouse/users-dev.xml index dd6e54d7c5de3..704e99ef9e961 100644 --- a/docker/clickhouse/users-dev.xml +++ b/docker/clickhouse/users-dev.xml @@ -15,7 +15,8 @@ with minimum number of different symbols between replica's hostname and local hostname (Hamming distance). in_order - first live replica is chosen in specified order. - first_or_random - if first replica one has higher number of errors, pick a random one from replicas with minimum number of errors. + first_or_random - if first replica one has higher number of errors, pick a random one from replicas + with minimum number of errors. --> random @@ -45,30 +46,39 @@ Password could be empty. If you want to specify SHA256, place it in 'password_sha256_hex' element. - Example: 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 - Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July 2019). + Example: + 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 + Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July + 2019). If you want to specify double SHA1, place it in 'password_double_sha1_hex' element. - Example: e395796d6546b1b65db9d665cd43f0e858dd4303 + Example: + e395796d6546b1b65db9d665cd43f0e858dd4303 - If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for authentication, + If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for + authentication, place its name in 'server' element inside 'ldap' element. Example: my_ldap_server - If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in the main config), + If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in + the main config), place 'kerberos' element instead of 'password' (and similar) elements. - The name part of the canonical principal name of the initiator must match the user name for authentication to succeed. - You can also place 'realm' element inside 'kerberos' element to further restrict authentication to only those requests + The name part of the canonical principal name of the initiator must match the user name for + authentication to succeed. + You can also place 'realm' element inside 'kerberos' element to further restrict authentication to + only those requests whose initiator's realm matches it. Example: Example: EXAMPLE.COM How to generate decent password: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha256sum | tr -d '-' In first line will be password and in second - corresponding SHA256. How to generate double SHA1: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' In first line will be password and in second - corresponding double SHA1. --> @@ -89,7 +99,8 @@ To check access, DNS query is performed, and all received addresses compared to peer address. Regular expression for host names. Example, ^server\d\d-\d\d-\d\.yandex\.ru$ To check access, DNS PTR query is performed for peer address and then regexp is applied. - Then, for result of PTR query, another DNS query is performed and all received addresses compared to peer address. + Then, for result of PTR query, another DNS query is performed and all received addresses compared + to peer address. Strongly recommended that regexp is ends with $ All results of DNS requests are cached till server restart. --> @@ -126,4 +137,4 @@ - + \ No newline at end of file diff --git a/docker/clickhouse/users.xml b/docker/clickhouse/users.xml index 49ac9f73e0de5..ece3df0f09fbe 100644 --- a/docker/clickhouse/users.xml +++ b/docker/clickhouse/users.xml @@ -15,7 +15,8 @@ with minimum number of different symbols between replica's hostname and local hostname (Hamming distance). in_order - first live replica is chosen in specified order. - first_or_random - if first replica one has higher number of errors, pick a random one from replicas with minimum number of errors. + first_or_random - if first replica one has higher number of errors, pick a random one from replicas + with minimum number of errors. --> random @@ -43,30 +44,39 @@ Password could be empty. If you want to specify SHA256, place it in 'password_sha256_hex' element. - Example: 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 - Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July 2019). + Example: + 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 + Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July + 2019). If you want to specify double SHA1, place it in 'password_double_sha1_hex' element. - Example: e395796d6546b1b65db9d665cd43f0e858dd4303 + Example: + e395796d6546b1b65db9d665cd43f0e858dd4303 - If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for authentication, + If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for + authentication, place its name in 'server' element inside 'ldap' element. Example: my_ldap_server - If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in the main config), + If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in + the main config), place 'kerberos' element instead of 'password' (and similar) elements. - The name part of the canonical principal name of the initiator must match the user name for authentication to succeed. - You can also place 'realm' element inside 'kerberos' element to further restrict authentication to only those requests + The name part of the canonical principal name of the initiator must match the user name for + authentication to succeed. + You can also place 'realm' element inside 'kerberos' element to further restrict authentication to + only those requests whose initiator's realm matches it. Example: Example: EXAMPLE.COM How to generate decent password: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha256sum | tr -d '-' In first line will be password and in second - corresponding SHA256. How to generate double SHA1: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' In first line will be password and in second - corresponding double SHA1. --> @@ -87,7 +97,8 @@ To check access, DNS query is performed, and all received addresses compared to peer address. Regular expression for host names. Example, ^server\d\d-\d\d-\d\.yandex\.ru$ To check access, DNS PTR query is performed for peer address and then regexp is applied. - Then, for result of PTR query, another DNS query is performed and all received addresses compared to peer address. + Then, for result of PTR query, another DNS query is performed and all received addresses compared + to peer address. Strongly recommended that regexp is ends with $ All results of DNS requests are cached till server restart. --> @@ -124,4 +135,4 @@ - + \ No newline at end of file diff --git a/ee/api/feature_flag_role_access.py b/ee/api/feature_flag_role_access.py index d3ca7a68c1a32..3ce77dca89599 100644 --- a/ee/api/feature_flag_role_access.py +++ b/ee/api/feature_flag_role_access.py @@ -35,7 +35,11 @@ def has_permission(self, request, view): return True try: feature_flag: FeatureFlag = FeatureFlag.objects.get(id=view.parents_query_dict["feature_flag_id"]) - if feature_flag.created_by.uuid == request.user.uuid: + if ( + hasattr(feature_flag, "created_by") + and feature_flag.created_by + and feature_flag.created_by.uuid == request.user.uuid + ): return True except FeatureFlag.DoesNotExist: raise exceptions.NotFound("Feature flag not found.") diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 88addd2d7f416..c37c3ee9d6482 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -2,9 +2,9 @@ from typing import Any, Dict, List from unittest.mock import MagicMock, patch from uuid import uuid4 +from zoneinfo import ZoneInfo import jwt -from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils.timezone import now from freezegun import freeze_time diff --git a/ee/api/test/test_feature_flag_role_access.py b/ee/api/test/test_feature_flag_role_access.py index f143f10505f0f..3cd4e947d90c9 100644 --- a/ee/api/test/test_feature_flag_role_access.py +++ b/ee/api/test/test_feature_flag_role_access.py @@ -37,6 +37,26 @@ def test_can_always_add_role_access_if_creator_of_feature_flag(self): self.assertEqual(flag_role.role.name, self.eng_role.name) self.assertEqual(flag_role.feature_flag.id, self.feature_flag.id) + def test_role_access_with_deleted_creator_of_feature_flag(self): + OrganizationResourceAccess.objects.create( + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, + access_level=OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW, + organization=self.organization, + ) + + flag = FeatureFlag.objects.create( + created_by=None, + team=self.team, + key="flag_role_access_none", + name="Flag role access", + ) + self.assertEqual(self.user.role_memberships.count(), 0) + flag_role_access_create_res = self.client.post( + f"/api/projects/@current/feature_flags/{flag.id}/role_access", + {"role_id": self.eng_role.id}, + ) + self.assertEqual(flag_role_access_create_res.status_code, status.HTTP_403_FORBIDDEN) + def test_cannot_add_role_access_if_feature_flags_access_level_too_low_and_not_creator(self): OrganizationResourceAccess.objects.create( resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, diff --git a/ee/api/test/test_organization.py b/ee/api/test/test_organization.py index 2f1b11bb95256..a77361dc579e8 100644 --- a/ee/api/test/test_organization.py +++ b/ee/api/test/test_organization.py @@ -1,6 +1,7 @@ import datetime as dt import random -from unittest.mock import ANY, patch +from unittest import mock +from unittest.mock import ANY, call, patch from freezegun.api import freeze_time from rest_framework import status @@ -104,11 +105,21 @@ def test_delete_last_organization(self, mock_capture): "Did not return a 404 on trying to delete a nonexistent org", ) - mock_capture.assert_called_once_with( - self.user.distinct_id, - "organization deleted", - organization_props, - groups={"instance": ANY, "organization": str(org_id)}, + mock_capture.assert_has_calls( + [ + call( + self.user.distinct_id, + "membership level changed", + properties={"new_level": 15, "previous_level": 1}, + groups=mock.ANY, + ), + call( + self.user.distinct_id, + "organization deleted", + organization_props, + groups={"instance": mock.ANY, "organization": str(org_id)}, + ), + ] ) def test_no_delete_organization_not_owning(self): diff --git a/ee/billing/billing_manager.py b/ee/billing/billing_manager.py index 5a8119c57df9b..324b158fe071d 100644 --- a/ee/billing/billing_manager.py +++ b/ee/billing/billing_manager.py @@ -6,6 +6,7 @@ import structlog from django.utils import timezone from rest_framework.exceptions import NotAuthenticated +from sentry_sdk import capture_exception from ee.billing.billing_types import BillingStatus from ee.billing.quota_limiting import set_org_usage_summary, sync_org_quota_limits @@ -13,7 +14,7 @@ from ee.settings import BILLING_SERVICE_URL from posthog.cloud_utils import get_cached_instance_license from posthog.models import Organization -from posthog.models.organization import OrganizationUsageInfo +from posthog.models.organization import OrganizationMembership, OrganizationUsageInfo logger = structlog.get_logger(__name__) @@ -114,6 +115,14 @@ def update_billing_distinct_ids(self, organization: Organization) -> None: distinct_ids = list(organization.members.values_list("distinct_id", flat=True)) self.update_billing(organization, {"distinct_ids": distinct_ids}) + def update_billing_customer_email(self, organization: Organization) -> None: + try: + owner_membership = OrganizationMembership.objects.get(organization=organization, level=15) + user = owner_membership.user + self.update_billing(organization, {"org_customer_email": user.email}) + except Exception as e: + capture_exception(e) + def deactivate_products(self, organization: Organization, products: str) -> None: res = requests.get( f"{BILLING_SERVICE_URL}/api/billing/deactivate?products={products}", diff --git a/ee/billing/test/test_billing_manager.py b/ee/billing/test/test_billing_manager.py index e0c09e0d071fb..1dbbcb464f068 100644 --- a/ee/billing/test/test_billing_manager.py +++ b/ee/billing/test/test_billing_manager.py @@ -33,3 +33,26 @@ def test_update_billing_distinct_ids(self, billing_patch_request_mock: MagicMock BillingManager(license).update_billing_distinct_ids(organization) assert billing_patch_request_mock.call_count == 1 assert len(billing_patch_request_mock.call_args[1]["json"]["distinct_ids"]) == 2 + + @patch( + "ee.billing.billing_manager.requests.patch", + return_value=MagicMock(status_code=200, json=MagicMock(return_value={"text": "ok"})), + ) + def test_update_billing_customer_email(self, billing_patch_request_mock: MagicMock): + organization = self.organization + license = super(LicenseManager, cast(LicenseManager, License.objects)).create( + key="key123::key123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), + ) + User.objects.create_and_join( + organization=organization, + email="y@x.com", + password=None, + level=OrganizationMembership.Level.OWNER, + ) + organization.refresh_from_db() + assert len(organization.members.values_list("distinct_id", flat=True)) == 2 # one exists in the test base + BillingManager(license).update_billing_customer_email(organization) + assert billing_patch_request_mock.call_count == 1 + assert billing_patch_request_mock.call_args[1]["json"]["org_customer_email"] == "y@x.com" diff --git a/ee/clickhouse/test/test_client.py b/ee/clickhouse/test/test_client.py deleted file mode 100644 index ab5ba1b4a53e0..0000000000000 --- a/ee/clickhouse/test/test_client.py +++ /dev/null @@ -1,129 +0,0 @@ -from unittest.mock import patch - -import fakeredis -from clickhouse_driver.errors import ServerException -from django.test import TestCase - -from posthog.clickhouse.client import execute_async as client -from posthog.client import sync_execute -from posthog.test.base import ClickhouseTestMixin - - -class ClickhouseClientTestCase(TestCase, ClickhouseTestMixin): - def setUp(self): - self.redis_client = fakeredis.FakeStrictRedis() - - def test_async_query_client(self): - query = "SELECT 1+1" - team_id = 2 - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - result = client.get_status_or_results(team_id, query_id) - self.assertFalse(result.error) - self.assertTrue(result.complete) - self.assertEqual(result.results, [[2]]) - - def test_async_query_client_errors(self): - query = "SELECT WOW SUCH DATA FROM NOWHERE THIS WILL CERTAINLY WORK" - team_id = 2 - self.assertRaises( - ServerException, - client.enqueue_execute_with_progress, - **{"team_id": team_id, "query": query, "bypass_celery": True}, - ) - try: - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - except Exception: - pass - - result = client.get_status_or_results(team_id, query_id) - self.assertTrue(result.error) - self.assertRegex(result.error_message, "Code: 62.\nDB::Exception: Syntax error:") - - def test_async_query_client_does_not_leak(self): - query = "SELECT 1+1" - team_id = 2 - wrong_team = 5 - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - result = client.get_status_or_results(wrong_team, query_id) - self.assertTrue(result.error) - self.assertEqual(result.error_message, "Requesting team is not executing team") - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_is_lazy(self, execute_sync_mock): - query = "SELECT 4 + 4" - team_id = 2 - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Assert that we only called clickhouse once - execute_sync_mock.assert_called_once() - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_is_lazy_but_not_too_lazy(self, execute_sync_mock): - query = "SELECT 8 + 8" - team_id = 2 - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again, but with force - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True, force=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Assert that we called clickhouse twice - self.assertEqual(execute_sync_mock.call_count, 2) - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_manual_query_uuid(self, execute_sync_mock): - # This is a unique test because technically in the test pattern `SELECT 8 + 8` is already - # in redis. This tests to make sure it is treated as a unique run of that query - query = "SELECT 8 + 8" - team_id = 2 - query_id = "I'm so unique" - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True) - - # Try the same query again, but with force - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True, force=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True) - - # Assert that we called clickhouse twice - self.assertEqual(execute_sync_mock.call_count, 2) - - def test_client_strips_comments_from_request(self): - """ - To ensure we can easily copy queries from `system.query_log` in e.g. - Metabase, we strip comments from the query we send. Metabase doesn't - display multilined output. - - See https://github.com/metabase/metabase/issues/14253 - - Note I'm not really testing much complexity, I trust that those will - come out as failures in other tests. - """ - from posthog.clickhouse.query_tagging import tag_queries - - # First add in the request information that should be added to the sql. - # We check this to make sure it is not removed by the comment stripping - with self.capture_select_queries() as sqls: - tag_queries(kind="request", id="1") - sync_execute( - query=""" - -- this request returns 1 - SELECT 1 - """ - ) - self.assertEqual(len(sqls), 1) - first_query = sqls[0] - self.assertIn(f"SELECT 1", first_query) - self.assertNotIn("this request returns", first_query) - - # Make sure it still includes the "annotation" comment that includes - # request routing information for debugging purposes - self.assertIn("/* request:1 */", first_query) diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr index f039a2994204e..3474ae77b858f 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr @@ -1,6 +1,6 @@ # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results ' - /* user_id:126 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:131 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events diff --git a/ee/clickhouse/views/test/test_clickhouse_trends.py b/ee/clickhouse/views/test/test_clickhouse_trends.py index 75ab015e39a15..8bf86c1524006 100644 --- a/ee/clickhouse/views/test/test_clickhouse_trends.py +++ b/ee/clickhouse/views/test/test_clickhouse_trends.py @@ -118,7 +118,7 @@ def test_includes_only_intervals_within_range(client: Client): { "action": ANY, "breakdown_value": cohort["id"], - "label": "$pageview - test cohort", + "label": "test cohort", "count": 3.0, "data": [1.0, 1.0, 1.0], # Prior to the fix this would also include '29-Aug-2021' @@ -827,14 +827,12 @@ def test_insight_trends_cumulative(self): ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - person_response = get_people_from_url_ok( - self.client, data_response["$pageview - val"]["2012-01-14"].person_url - ) + person_response = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 3 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 3 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -862,12 +860,12 @@ def test_insight_trends_cumulative(self): properties=[{"type": "person", "key": "key", "value": "some_val"}], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - people = get_people_from_url_ok(self.client, data_response["$pageview - val"]["2012-01-14"].person_url) + people = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 3 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 3 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -894,12 +892,12 @@ def test_insight_trends_cumulative(self): ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - people = get_people_from_url_ok(self.client, data_response["$pageview - val"]["2012-01-14"].person_url) + people = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 2 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 2 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -933,12 +931,10 @@ def test_breakdown_with_filter(self): properties=[{"key": "key", "value": "oh", "operator": "not_icontains"}], ) data_response = get_trends_time_series_ok(self.client, params, self.team) - person_response = get_people_from_url_ok( - self.client, data_response["sign up - val"]["2012-01-13"].person_url - ) + person_response = get_people_from_url_ok(self.client, data_response["val"]["2012-01-13"].person_url) - assert data_response["sign up - val"]["2012-01-13"].value == 1 - assert data_response["sign up - val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" assert sorted([p["id"] for p in person_response]) == sorted([str(created_people["person1"].uuid)]) @@ -950,11 +946,9 @@ def test_breakdown_with_filter(self): events=[{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], ) aggregate_response = get_trends_aggregate_ok(self.client, params, self.team) - aggregate_person_response = get_people_from_url_ok( - self.client, aggregate_response["sign up - val"].person_url - ) + aggregate_person_response = get_people_from_url_ok(self.client, aggregate_response["val"].person_url) - assert aggregate_response["sign up - val"].value == 1 + assert aggregate_response["val"].value == 1 assert sorted([p["id"] for p in aggregate_person_response]) == sorted([str(created_people["person1"].uuid)]) def test_insight_trends_compare(self): diff --git a/frontend/__snapshots__/components-command-bar--actions.png b/frontend/__snapshots__/components-command-bar--actions.png new file mode 100644 index 0000000000000..302fb86f50402 Binary files /dev/null and b/frontend/__snapshots__/components-command-bar--actions.png differ diff --git a/frontend/__snapshots__/components-command-bar--search.png b/frontend/__snapshots__/components-command-bar--search.png new file mode 100644 index 0000000000000..54dff6f4974bf Binary files /dev/null and b/frontend/__snapshots__/components-command-bar--search.png differ diff --git a/frontend/__snapshots__/components-editable-field--default.png b/frontend/__snapshots__/components-editable-field--default.png index 2d16114431388..f68ba65618170 100644 Binary files a/frontend/__snapshots__/components-editable-field--default.png and b/frontend/__snapshots__/components-editable-field--default.png differ diff --git a/frontend/__snapshots__/components-networkrequesttiming--basic.png b/frontend/__snapshots__/components-networkrequesttiming--basic.png index 8247b433f71c8..effc91c21a0a6 100644 Binary files a/frontend/__snapshots__/components-networkrequesttiming--basic.png and b/frontend/__snapshots__/components-networkrequesttiming--basic.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--empty-with-action.png b/frontend/__snapshots__/components-product-empty-state--empty-with-action.png index dd10594e21d1c..4c6bc2766b5e4 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--empty-with-action.png and b/frontend/__snapshots__/components-product-empty-state--empty-with-action.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png b/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png index d9ed865218733..a93edc4abb8e1 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png and b/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--product-introduction.png b/frontend/__snapshots__/components-product-empty-state--product-introduction.png index dd10594e21d1c..4c6bc2766b5e4 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--product-introduction.png and b/frontend/__snapshots__/components-product-empty-state--product-introduction.png differ diff --git a/frontend/__snapshots__/components-properties-table--properties-table.png b/frontend/__snapshots__/components-properties-table--properties-table.png new file mode 100644 index 0000000000000..0ebb3a71ccb83 Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--properties-table.png differ diff --git a/frontend/__snapshots__/layout-navigation--app-page-with-side-bar-shown.png b/frontend/__snapshots__/layout-navigation--app-page-with-side-bar-shown.png index b49b6fc4bd341..b2ae49cd91f5b 100644 Binary files a/frontend/__snapshots__/layout-navigation--app-page-with-side-bar-shown.png and b/frontend/__snapshots__/layout-navigation--app-page-with-side-bar-shown.png differ diff --git a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png index 2038fbe5c8bb2..02abe5eaa23a3 100644 Binary files a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png and b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--closable.png b/frontend/__snapshots__/lemon-ui-lemon-banner--closable.png index a05dd78b3e3e7..a7a8ac55c5061 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--closable.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--closable.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable.png b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable.png index be2ef2e5a884b..540a8a3ef2c39 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--dismissable.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--error.png b/frontend/__snapshots__/lemon-ui-lemon-banner--error.png index 7db8c557495b9..9389cfa4ea1b2 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--error.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--error.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--info.png b/frontend/__snapshots__/lemon-ui-lemon-banner--info.png index 7c6e78d57caf2..6848c05f89a32 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--info.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--info.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--success.png b/frontend/__snapshots__/lemon-ui-lemon-banner--success.png index 2053ce5ccc6de..f3b58cb98363a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--success.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--success.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-banner--warning.png b/frontend/__snapshots__/lemon-ui-lemon-banner--warning.png index bf8c975d7385b..3c41933fb5078 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-banner--warning.png and b/frontend/__snapshots__/lemon-ui-lemon-banner--warning.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--as-links.png b/frontend/__snapshots__/lemon-ui-lemon-button--as-links.png index 24ae6fe59d181..292f9ce7d0a99 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--as-links.png and b/frontend/__snapshots__/lemon-ui-lemon-button--as-links.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png b/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png index b6109c6884322..90ae77d5ca04a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png and b/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-table--loading.png b/frontend/__snapshots__/lemon-ui-lemon-table--loading.png index e5852c23bda01..f3f2287fcdb4c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-table--loading.png and b/frontend/__snapshots__/lemon-ui-lemon-table--loading.png differ diff --git a/frontend/__snapshots__/posthog-3000-keyboard-shortcut--default.png b/frontend/__snapshots__/posthog-3000-keyboard-shortcut--default.png new file mode 100644 index 0000000000000..1f8990c3a32c8 Binary files /dev/null and b/frontend/__snapshots__/posthog-3000-keyboard-shortcut--default.png differ diff --git a/frontend/__snapshots__/posthog-3000-keyboard-shortcut--muted.png b/frontend/__snapshots__/posthog-3000-keyboard-shortcut--muted.png new file mode 100644 index 0000000000000..091bda66799ec Binary files /dev/null and b/frontend/__snapshots__/posthog-3000-keyboard-shortcut--muted.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png index 9329598d4f0f9..db71a2726588c 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png index a63ee9697bb12..3c1f2a921338a 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png differ diff --git a/frontend/__snapshots__/scenes-app-batchexports--create-export.png b/frontend/__snapshots__/scenes-app-batchexports--create-export.png index 5812443d7cc01..51889a6cdcc34 100644 Binary files a/frontend/__snapshots__/scenes-app-batchexports--create-export.png and b/frontend/__snapshots__/scenes-app-batchexports--create-export.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--edit.png b/frontend/__snapshots__/scenes-app-dashboards--edit.png index ca634e775f0d7..0bd4f10c2b233 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--edit.png and b/frontend/__snapshots__/scenes-app-dashboards--edit.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--show.png b/frontend/__snapshots__/scenes-app-dashboards--show.png index 0bac6702636e7..9f1dac8d8c809 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--show.png and b/frontend/__snapshots__/scenes-app-dashboards--show.png differ diff --git a/frontend/__snapshots__/scenes-app-events--event-explorer.png b/frontend/__snapshots__/scenes-app-events--event-explorer.png index 4ed82b3bdbbdd..7d3287e05481b 100644 Binary files a/frontend/__snapshots__/scenes-app-events--event-explorer.png and b/frontend/__snapshots__/scenes-app-events--event-explorer.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png index b954a89c3fe4f..0c9084824591a 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png b/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png index fd9d704276d4c..683e286506729 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png and b/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiments-list.png b/frontend/__snapshots__/scenes-app-experiments--experiments-list.png index f6760a46e6b69..4072657487cb8 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiments-list.png and b/frontend/__snapshots__/scenes-app-experiments--experiments-list.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png index 34fc0266e6ee9..766de1662f8ae 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png index c640e778e8505..2d6a0dd22fbb2 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png index ff8d20c392180..a36d7365e7344 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png index f46520b2f73dc..613e89cdbc0d8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png index d3727abe59c66..d2c639e108166 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png index 7f8fbe65c33f4..bcd656dcc26a3 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png index 7f37b4f88ea9e..93a2510edbd3e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png index 52b192f0436ff..88fce2b75ec39 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png index 9aa218739300b..234167ed34ceb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png index f7301be8fcc9b..7ef2484ddde67 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png index 2426224527e72..8eaedc5c1d3fa 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png index 3f938630e0cee..87320f1ad5b19 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png index 97c300aec44da..92811307a20d4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png index 451a3e13a2d4f..ee0abcee40c6b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png index 2cd74f758f4f0..cd77f1bbd8447 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png index 5e437308fefd9..b0c052e1b37ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png index 3ac105cb27e6c..62ab313160740 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png index 0c68ed20b354c..0f65f00462135 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png index b93219c3d0204..20f8540e7af66 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png index a03a837b14ca3..5ba4dd941fcbf 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png index 57c8868d90d40..64b2955db1cfe 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png index afc88b3502e60..7fde8e692b299 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png index 0c51f972a0c9e..67d55e396a376 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png index 1bc80b159fcf0..a41c083a673a9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png index 1fceceed5b2ba..10a781cafc5d0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png index 7f8d3f622e01d..8760764ed3a04 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png index 91f6f6e560edd..c4212fc4ab28d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png index bd131be5920a7..4f11382fa8bb4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png index 38cb27172dece..a4c4a82e5d207 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle.png b/frontend/__snapshots__/scenes-app-insights--lifecycle.png index 89b738cf06472..b5af8e303507d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention--webkit.png index 38102732ee359..3f6259a8a2428 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png index cb52e0f1495f4..8e6390a225b40 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png index ee03a7556c872..998fedb12e86e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png index 1e71a2fc11b12..6262a5918f1f1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png index 3e2ab6d65836f..62dd6f95bbd41 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png index 9d791a3259380..0a8e1c20fdb87 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit.png index 45c2836c18811..cd3d9f866c7f4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention.png b/frontend/__snapshots__/scenes-app-insights--retention.png index 9a7251ee3698d..5df60b1e00204 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention.png and b/frontend/__snapshots__/scenes-app-insights--retention.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png index b1f44c1341ec3..d226c57fe43df 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png index fc56f416a79ff..c458ce60c87b2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png index 3cc58acbcacee..80e20f0a84a3e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness.png b/frontend/__snapshots__/scenes-app-insights--stickiness.png index e3a0289b2bde0..5f6daca8e6c78 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness.png and b/frontend/__snapshots__/scenes-app-insights--stickiness.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png index d3ad0d0416ed0..451decec4637d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png index 8f14499429e91..77b2e0087b84b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png index a2d3654a7b019..39cb74dee61dd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png index eacd277f8c466..85107049f853f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png index d2c34da43a597..41ba6d59e1550 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png index bccdd29949efa..f70af9d2e782c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png index 689dcd9839e8d..f57645211428a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area.png b/frontend/__snapshots__/scenes-app-insights--trends-area.png index bc2055bb61140..fdd0315060948 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area.png and b/frontend/__snapshots__/scenes-app-insights--trends-area.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png index 9348697847367..5d60dee4f60b7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png index 6542e66ded261..8df383d36fc1b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png index 9bfb5ce522765..3a7274819f516 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png index c0c5a2092f525..e32072ec0780d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png index 1f0039036ffa4..03c62e056870b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png index b854c985c0849..c9bd130f6cdab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png index 932b2d3d43abc..ffaa1c4bf1988 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar.png b/frontend/__snapshots__/scenes-app-insights--trends-bar.png index e6b4d9722076b..74a5209277252 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png index b3a5b20f6394c..81275e0a9df95 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png index 5a1d695b843d6..ab2904ba751e9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png index 9d275c34efbf3..966b934574728 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png index e308b6b409e0d..ca3343a84046c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png index e2d82fc57c0e0..8f97eb8c1a01c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png index 066c2201792a3..48a045de1d451 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png index 0ac5093e23ca3..d4873d617014e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png index 1990ae3d264c0..a158e442de8ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png index b3d789e752710..5e1fb2d08feb2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png index 42561f322039c..225acef8aacb3 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png index 8a47ca33b0fd2..c036f5a792f9e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png index 2788c12f3b2c7..ca3654b2fd11d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png index 939ae1dea2f56..d2c5a5a81dcc9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line.png b/frontend/__snapshots__/scenes-app-insights--trends-line.png index 975132fef1b19..49bbad9f70249 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line.png and b/frontend/__snapshots__/scenes-app-insights--trends-line.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png index f3161e700a738..a4feb23317dd1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png index 20c2074825f8e..a34e6e1d5e272 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png index 80c580574079c..d9cf620c1a161 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number.png b/frontend/__snapshots__/scenes-app-insights--trends-number.png index baf47a94ff5db..03b2456ee4782 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number.png and b/frontend/__snapshots__/scenes-app-insights--trends-number.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png index 93b7e9b656117..660bb356237ca 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png index d33d21c2387f1..cadbd8a861f31 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png index e11265b7a23c5..646050c69e70b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png index f83b718feeb79..bd1f3ed00e233 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png index 6e89f0b8e8683..082bca2e23f47 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png index 6177698ddf6dc..7a1c58bf02c1d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png index dcb273e64439b..bc2f9c129b6f9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png index a5b457de916b1..3e784f8c855d5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png index c80f95a6464a1..886c9d33a0add 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie.png b/frontend/__snapshots__/scenes-app-insights--trends-pie.png index bea0f178cf14c..c6000ed1654c6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png index 106f7b70d1f54..cee544d679cc5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png index ff41c7540bfcf..4a32c1bf8df36 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png index 2d1533eb70717..dc9333bb1780d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png index 2f708f79c912c..f9e6a6f710268 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png index 18487bb88d559..ed70303f61142 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png index 38c6892a0c112..d71a4ba3b777d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png index 4a6fe4f1a4daf..43417b1c2f0ac 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table.png b/frontend/__snapshots__/scenes-app-insights--trends-table.png index bab2405d9ac61..2507ecadf9e8f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table.png and b/frontend/__snapshots__/scenes-app-insights--trends-table.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png index f6af074e96923..94e1c6b5604ab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png index 979febee96067..066eb2c1ac02a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png index d3a3dec473f0b..63e0e28282346 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png index fb29412159cd5..f3599c707dd15 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png index 7f099baaf9eab..a0f6e839e7e60 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png index 362964c3bcf89..140d24237944a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png index 91225e794f2b4..c908b3c0a5d9e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value.png b/frontend/__snapshots__/scenes-app-insights--trends-value.png index 94177c7f28870..ea8fd3d8fd413 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value.png and b/frontend/__snapshots__/scenes-app-insights--trends-value.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png index 25eaf83180416..24462204e7738 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png index 5ecbf86975a4f..e11671b67ea0a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png index 4afd8b0955fbd..9fbed136e738d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map.png index 0050ed308e178..cbb62ce7deb40 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png index c2354f8a1112d..bd9ba9dbf8a97 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png index a09292dd4f0cb..ea0ad2f69c6ae 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png index 7199f4a937fbc..af38b73f25636 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths.png b/frontend/__snapshots__/scenes-app-insights--user-paths.png index 2d4d7040cd6f6..deec9d37f5b61 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths.png and b/frontend/__snapshots__/scenes-app-insights--user-paths.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png b/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png index d196998a7e56f..2b56395785ef6 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png and b/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--empty-notebook.png b/frontend/__snapshots__/scenes-app-notebooks--empty-notebook.png index 2086ed0aa90aa..c6df20187e100 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--empty-notebook.png and b/frontend/__snapshots__/scenes-app-notebooks--empty-notebook.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--headings.png b/frontend/__snapshots__/scenes-app-notebooks--headings.png index 3186fed28fd49..1d202bf688da1 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--headings.png and b/frontend/__snapshots__/scenes-app-notebooks--headings.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found.png b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found.png index 0df1f64e9ec3c..6286e7ae27078 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found.png and b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list.png b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list.png index c9f29c566c2c6..d6e7ef6b4a71f 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebooks-list.png and b/frontend/__snapshots__/scenes-app-notebooks--notebooks-list.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png b/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png index cbe5aefd199ed..de7c9e016a3d0 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png and b/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png index e18ecc8d6a5a5..ae267af0034d9 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--text-formats.png b/frontend/__snapshots__/scenes-app-notebooks--text-formats.png index aa872f0a5cfe9..6f7c0b4c36de0 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--text-formats.png and b/frontend/__snapshots__/scenes-app-notebooks--text-formats.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png b/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png index f0ea3f9550997..c475638688418 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png and b/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png index 94b53793cc3fe..4268b9820e627 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png index f7ea3ec3cff02..fc3f054686e53 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png index 16186b1e522ff..6e4f613841bdf 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png index 2922f7f7736ff..a000f5709360c 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png index 72113b0438d81..d5df660c362ee 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png index b9810d5bf2186..5655e24e12a0c 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png index 80fdf0c1d8632..06bdfb7c4f880 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png differ diff --git a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png index 55081c3b37420..5850c12954b74 100644 Binary files a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png and b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png b/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png index c4c27f4db817a..7bb41e1837cc3 100644 Binary files a/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png and b/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--card-view.png b/frontend/__snapshots__/scenes-app-saved-insights--card-view.png index fd1b5084ad4bd..3349db3182ce2 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--card-view.png and b/frontend/__snapshots__/scenes-app-saved-insights--card-view.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png b/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png index 7757d243d548c..7952158fdb025 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png and b/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--list-view.png b/frontend/__snapshots__/scenes-app-saved-insights--list-view.png index 520f2c041c0a8..ae911ef6d0319 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--list-view.png and b/frontend/__snapshots__/scenes-app-saved-insights--list-view.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-templates.png b/frontend/__snapshots__/scenes-app-surveys--survey-templates.png index 069a66dbfbb5b..d888557c99407 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-templates.png and b/frontend/__snapshots__/scenes-app-surveys--survey-templates.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-list.png b/frontend/__snapshots__/scenes-app-surveys--surveys-list.png index 012692ee2758d..80ccaa5e006fd 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-list.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-list.png differ diff --git a/frontend/__snapshots__/scenes-other-login--second-factor.png b/frontend/__snapshots__/scenes-other-login--second-factor.png index 0eda9f6a6d221..d770df97b4345 100644 Binary files a/frontend/__snapshots__/scenes-other-login--second-factor.png and b/frontend/__snapshots__/scenes-other-login--second-factor.png differ diff --git a/frontend/__snapshots__/scenes-other-login--sso-error.png b/frontend/__snapshots__/scenes-other-login--sso-error.png index 4bac52c291407..37309681cfb75 100644 Binary files a/frontend/__snapshots__/scenes-other-login--sso-error.png and b/frontend/__snapshots__/scenes-other-login--sso-error.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset-complete--default.png b/frontend/__snapshots__/scenes-other-password-reset-complete--default.png index b08362e5ab1ab..8671219107f0c 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset-complete--default.png and b/frontend/__snapshots__/scenes-other-password-reset-complete--default.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link.png b/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link.png index d5f6503dbfa2b..6e928b3ee74ac 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link.png and b/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization.png b/frontend/__snapshots__/scenes-other-settings--settings-organization.png index 05fee3ba22511..0e5752e576d8b 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project.png b/frontend/__snapshots__/scenes-other-settings--settings-project.png index 71514ddb64f12..7e0fee45a376d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project.png and b/frontend/__snapshots__/scenes-other-settings--settings-project.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user.png b/frontend/__snapshots__/scenes-other-settings--settings-user.png index 5574601d11794..7d18a6db46dd5 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user.png and b/frontend/__snapshots__/scenes-other-settings--settings-user.png differ diff --git a/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene.png b/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene.png index 4c1e72c47cc86..e0e34f6fbc61b 100644 Binary files a/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene.png and b/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid.png index ea95c8af90bc0..0291a167335d6 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success.png index 45b7e05851b7a..71aad6c351f8b 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verifying-email.png b/frontend/__snapshots__/scenes-other-verify-email--verifying-email.png index a6efa516c1cda..ecfc94858c28a 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verifying-email.png and b/frontend/__snapshots__/scenes-other-verify-email--verifying-email.png differ diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index e727859e8c19f..cccca30bc0e4b 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -15,6 +15,12 @@ min-width: 0; overflow: auto; } + + .BridgePage { + background: none; + height: 100%; + overflow: visible; + } } .Navigation3000__scene { diff --git a/frontend/src/layout/navigation-3000/Navigation.tsx b/frontend/src/layout/navigation-3000/Navigation.tsx index 522dfc2bef618..3dab2a0c34052 100644 --- a/frontend/src/layout/navigation-3000/Navigation.tsx +++ b/frontend/src/layout/navigation-3000/Navigation.tsx @@ -12,6 +12,7 @@ import { SceneConfig } from 'scenes/sceneTypes' import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FEATURE_FLAGS } from 'lib/constants' import { SidePanel } from './sidepanel/SidePanel' +import { MinimalNavigation } from './components/MinimalNavigation' export function Navigation({ children, @@ -21,16 +22,22 @@ export function Navigation({ sceneConfig: SceneConfig | null }): JSX.Element { useMountedLogic(themeLogic) - const { activeNavbarItem } = useValues(navigation3000Logic) + const { activeNavbarItem, mode } = useValues(navigation3000Logic) useEffect(() => { // FIXME: Include debug notice in a non-obstructing way document.getElementById('bottom-notice')?.remove() }, []) - if (sceneConfig?.layout === 'plain') { - return <>{children} + if (mode !== 'full') { + return ( +
+ {mode === 'minimal' ? : null} +
{children}
+
+ ) } + return (
@@ -42,6 +49,7 @@ export function Navigation({
diff --git a/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss b/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss index 75b3dae2df67d..0cf4e5a260384 100644 --- a/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss +++ b/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss @@ -26,33 +26,40 @@ font-size: calc(0.75rem + 0.0625rem * var(--breadcrumbs-compaction-rate)); line-height: 1rem; font-weight: 600; - user-select: none; pointer-events: auto; } -.Breadcrumbs3000__crumbs { - display: flex; - align-items: center; -} - .Breadcrumbs3000__trail { flex-grow: 1; flex-shrink: 1; - overflow-x: auto; + min-width: 0; +} + +.Breadcrumbs3000__crumbs { + height: 1rem; + margin-top: 0.25rem; + display: flex; + align-items: center; + overflow: visible; } .Breadcrumbs3000__here { + visibility: var(--breadcrumbs-title-large-visibility); position: relative; line-height: 1.2; - margin: calc(0.25rem * (1 - var(--breadcrumbs-compaction-rate))) 0 0; + margin: 0; + padding: calc(0.5rem * (1 - var(--breadcrumbs-compaction-rate))) 0 0; font-size: 1rem; font-weight: 700; overflow: hidden; - height: calc(1em * 1.2 * (1 - var(--breadcrumbs-compaction-rate))); + height: calc(1.2em * (1 - var(--breadcrumbs-compaction-rate))); + box-sizing: content-box; + font-family: var(--font-sans) !important; - > span { + > * { position: absolute; - bottom: 0; + bottom: 0.25rem; + height: 1.2em; } } @@ -70,9 +77,10 @@ } &.Breadcrumbs3000__breadcrumb--here { + visibility: var(--breadcrumbs-title-small-visibility); cursor: default; - > span { + > * { opacity: 1; transform: translateY(calc(100% * (1 - var(--breadcrumbs-compaction-rate)))); } diff --git a/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx b/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx index 344d36f55f47c..cbbcc6403ae48 100644 --- a/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx +++ b/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx @@ -1,13 +1,14 @@ -import React, { useEffect, useState } from 'react' +import React, { useLayoutEffect, useState } from 'react' import { useActions, useValues } from 'kea' import { IconArrowDropDown } from 'lib/lemon-ui/icons' import { Link } from 'lib/lemon-ui/Link' import './Breadcrumbs.scss' -import { Breadcrumb as IBreadcrumb } from '~/types' +import { FinalizedBreadcrumb } from '~/types' import clsx from 'clsx' import { Popover } from 'lib/lemon-ui/Popover/Popover' import { breadcrumbsLogic } from '~/layout/navigation/Breadcrumbs/breadcrumbsLogic' import { LemonSkeleton } from '@posthog/lemon-ui' +import { EditableField } from 'lib/components/EditableField/EditableField' const COMPACTION_DISTANCE = 44 @@ -17,27 +18,47 @@ const COMPACTION_DISTANCE = 44 * - The "Quick scene actions" buttons (zero or more buttons on the right) */ export function Breadcrumbs(): JSX.Element | null { - const { breadcrumbs } = useValues(breadcrumbsLogic) + const { breadcrumbs, renameState } = useValues(breadcrumbsLogic) const { setActionsContainer } = useActions(breadcrumbsLogic) const [compactionRate, setCompactionRate] = useState(0) - useEffect(() => { + useLayoutEffect(() => { function handleScroll(): void { const scrollTop = document.getElementsByTagName('main')[0].scrollTop - setCompactionRate(Math.min(scrollTop / COMPACTION_DISTANCE, 1)) + const newCompactionRate = Math.min(scrollTop / COMPACTION_DISTANCE, 1) + setCompactionRate(newCompactionRate) + if ( + renameState && + ((newCompactionRate > 0.5 && compactionRate <= 0.5) || + (newCompactionRate <= 0.5 && compactionRate > 0.5)) + ) { + // Transfer selection from the outgoing input to the incoming one + const [source, target] = newCompactionRate > 0.5 ? ['large', 'small'] : ['small', 'large'] + const sourceEl = document.querySelector(`input[name="item-name-${source}"]`) + const targetEl = document.querySelector(`input[name="item-name-${target}"]`) + if (sourceEl && targetEl) { + targetEl.focus() + targetEl.setSelectionRange(sourceEl.selectionStart || 0, sourceEl.selectionEnd || 0) + } + } } const main = document.getElementsByTagName('main')[0] main.addEventListener('scroll', handleScroll) return () => main.removeEventListener('scroll', handleScroll) - }, []) + }, [compactionRate]) return breadcrumbs.length ? (
@@ -65,14 +86,43 @@ export function Breadcrumbs(): JSX.Element | null { } interface BreadcrumbProps { - breadcrumb: IBreadcrumb + breadcrumb: FinalizedBreadcrumb index: number here?: boolean } function Breadcrumb({ breadcrumb, index, here }: BreadcrumbProps): JSX.Element { + const { renameState } = useValues(breadcrumbsLogic) + const { tentativelyRename, finishRenaming } = useActions(breadcrumbsLogic) const [popoverShown, setPopoverShown] = useState(false) + let nameElement: JSX.Element + if (breadcrumb.name != null && breadcrumb.onRename) { + nameElement = ( + tentativelyRename(breadcrumb.globalKey, newName)} + onSave={(newName) => { + void breadcrumb.onRename?.(newName) + }} + mode={renameState && renameState[0] === breadcrumb.globalKey ? 'edit' : 'view'} + onModeToggle={(newMode) => { + if (newMode === 'edit') { + tentativelyRename(breadcrumb.globalKey, breadcrumb.name as string) + } else { + finishRenaming() + } + setPopoverShown(false) + }} + compactButtons="xsmall" + editingIndication="underlined" + /> + ) + } else { + nameElement = {breadcrumb.name} + } + const Component = breadcrumb.path ? Link : 'div' const breadcrumbContent = ( - {breadcrumb.name} + {nameElement} {breadcrumb.popover && } ) @@ -118,13 +168,39 @@ function Breadcrumb({ breadcrumb, index, here }: BreadcrumbProps): JSX.Element { } interface HereProps { - breadcrumb: IBreadcrumb + breadcrumb: FinalizedBreadcrumb } function Here({ breadcrumb }: HereProps): JSX.Element { + const { renameState } = useValues(breadcrumbsLogic) + const { tentativelyRename, finishRenaming } = useActions(breadcrumbsLogic) + return (

- {breadcrumb.name || } + {breadcrumb.name == null ? ( + + ) : breadcrumb.onRename ? ( + tentativelyRename(breadcrumb.globalKey, newName)} + onSave={(newName) => { + void breadcrumb.onRename?.(newName) + }} + mode={renameState && renameState[0] === breadcrumb.globalKey ? 'edit' : 'view'} + onModeToggle={(newMode) => { + if (newMode === 'edit') { + tentativelyRename(breadcrumb.globalKey, breadcrumb.name as string) + } else { + finishRenaming() + } + }} + compactButtons="xsmall" + editingIndication="underlined" + /> + ) : ( + {breadcrumb.name} + )}

) } diff --git a/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss index 6a1645dbcfd0e..29d1076ffe7aa 100644 --- a/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss +++ b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss @@ -15,6 +15,18 @@ color: var(--default); text-transform: capitalize; + .posthog-3000 & { + text-transform: uppercase; + border-radius: 0.25rem; + border-bottom-width: 2px; + font-size: 0.625rem; + padding: 0.125rem 0.25rem; + } + + .posthog-3000[theme='dark'] & { + border-color: var(--muted-3000); + } + .KeyboardShortcut--muted > & { background: none; color: var(--muted); diff --git a/frontend/src/layout/navigation-3000/components/KeyboardShortcut.stories.tsx b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.stories.tsx new file mode 100644 index 0000000000000..435e87ebfbdce --- /dev/null +++ b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.stories.tsx @@ -0,0 +1,27 @@ +import { Meta } from '@storybook/react' + +import { KeyboardShortcut } from './KeyboardShortcut' + +const meta: Meta = { + title: 'PostHog 3000/Keyboard Shortcut', + component: KeyboardShortcut, + tags: ['autodocs'], +} +export default meta + +export const Default = { + args: { + cmd: true, + shift: true, + k: true, + }, +} + +export const Muted = { + args: { + muted: true, + cmd: true, + shift: true, + k: true, + }, +} diff --git a/frontend/src/layout/navigation-3000/components/MinimalNavigation.tsx b/frontend/src/layout/navigation-3000/components/MinimalNavigation.tsx new file mode 100644 index 0000000000000..2bd7486738b47 --- /dev/null +++ b/frontend/src/layout/navigation-3000/components/MinimalNavigation.tsx @@ -0,0 +1,61 @@ +import { LemonButton, Lettermark, Popover, ProfilePicture } from '@posthog/lemon-ui' +import { ProjectSwitcherOverlay } from '~/layout/navigation/ProjectSwitcher' +import { SitePopoverOverlay } from '~/layout/navigation/TopBar/SitePopover' +import { useValues, useActions } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { navigationLogic } from '~/layout/navigation/navigationLogic' +import { userLogic } from 'scenes/userLogic' +import { IconLogomark } from '@posthog/icons' +import { urls } from 'scenes/urls' +import { organizationLogic } from 'scenes/organizationLogic' + +export function MinimalNavigation(): JSX.Element { + const { user } = useValues(userLogic) + + const { currentTeam } = useValues(teamLogic) + const { currentOrganization } = useValues(organizationLogic) + + const { isSitePopoverOpen, isProjectSwitcherShown } = useValues(navigationLogic) + const { closeSitePopover, toggleSitePopover, toggleProjectSwitcher, hideProjectSwitcher } = + useActions(navigationLogic) + + return ( + + ) +} diff --git a/frontend/src/layout/navigation-3000/components/Navbar.tsx b/frontend/src/layout/navigation-3000/components/Navbar.tsx index 3ddc730007a56..ac9898f6063a9 100644 --- a/frontend/src/layout/navigation-3000/components/Navbar.tsx +++ b/frontend/src/layout/navigation-3000/components/Navbar.tsx @@ -96,6 +96,7 @@ export function Navbar(): JSX.Element { title="Project settings" to={urls.settings('project')} /> + } visible={isSitePopoverOpen} diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index 5e5c1237c1b8f..0020dea10d30c 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -1,4 +1,4 @@ -import { actions, events, kea, listeners, path, props, reducers, selectors } from 'kea' +import { actions, connect, events, kea, listeners, path, props, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' import { BasicListItem, ExtendedListItem, NavbarItem, SidebarNavbarItem } from './types' @@ -42,6 +42,8 @@ import { isNotNil } from 'lib/utils' /** Multi-segment item keys are joined using this separator for easy comparisons. */ export const ITEM_KEY_PART_SEPARATOR = '::' +export type Navigation3000Mode = 'none' | 'minimal' | 'full' + const MINIMUM_SIDEBAR_WIDTH_PX: number = 192 const DEFAULT_SIDEBAR_WIDTH_PX: number = 288 const MAXIMUM_SIDEBAR_WIDTH_PX: number = 1024 @@ -50,6 +52,9 @@ const MAXIMUM_SIDEBAR_WIDTH_PERCENTAGE: number = 50 export const navigation3000Logic = kea([ path(['layout', 'navigation-3000', 'navigationLogic']), props({} as { inputElement?: HTMLInputElement | null }), + connect(() => ({ + values: [sceneLogic, ['sceneConfig']], + })), actions({ hideSidebar: true, showSidebar: (newNavbarItemId?: string) => ({ newNavbarItemId }), @@ -278,6 +283,16 @@ export const navigation3000Logic = kea([ }, })), selectors({ + mode: [ + (s) => [s.sceneConfig], + (sceneConfig): Navigation3000Mode => { + return sceneConfig?.layout === 'plain' && !sceneConfig.allowUnauthenticated + ? 'minimal' + : sceneConfig?.layout !== 'plain' + ? 'full' + : 'none' + }, + ], navbarItems: [ () => [featureFlagLogic.selectors.featureFlags], (featureFlags): NavbarItem[][] => { diff --git a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx index 056a0ccb06cdc..cd05460eb2669 100644 --- a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx +++ b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx @@ -10,7 +10,8 @@ import type { featureFlagsSidebarLogicType } from './featureFlagsType' import Fuse from 'fuse.js' import { FeatureFlagType } from '~/types' import { subscriptions } from 'kea-subscriptions' -import { copyToClipboard, deleteWithUndo } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' import { navigation3000Logic } from '../navigationLogic' diff --git a/frontend/src/layout/navigation-3000/sidebars/insights.ts b/frontend/src/layout/navigation-3000/sidebars/insights.ts index efc9766d0a7c6..71021b95246b6 100644 --- a/frontend/src/layout/navigation-3000/sidebars/insights.ts +++ b/frontend/src/layout/navigation-3000/sidebars/insights.ts @@ -9,7 +9,7 @@ import { navigation3000Logic } from '~/layout/navigation-3000/navigationLogic' import { INSIGHTS_PER_PAGE, savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' import type { insightsSidebarLogicType } from './insightsType' import { findSearchTermInItemName } from './utils' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { api } from '@posthog/apps-common' import { insightsModel } from '~/models/insightsModel' diff --git a/frontend/src/layout/navigation-3000/sidebars/personsAndGroups.ts b/frontend/src/layout/navigation-3000/sidebars/personsAndGroups.ts index 2cefac31cf5ba..d5f7fc5e2eeae 100644 --- a/frontend/src/layout/navigation-3000/sidebars/personsAndGroups.ts +++ b/frontend/src/layout/navigation-3000/sidebars/personsAndGroups.ts @@ -98,16 +98,17 @@ export const personsAndGroupsSidebarLogic = kea { - const { searchTerm } = values - const displayId = groupDisplayId(group.group_key, group.group_properties) - return { - key: group.group_key, - name: displayId, - url: urls.group(groupType.group_type_index, group.group_key), - searchMatch: findSearchTermInItemName(displayId, searchTerm), - } as BasicListItem - }), + items: + groups[groupType.group_type_index]?.results.map((group) => { + const { searchTerm } = values + const displayId = groupDisplayId(group.group_key, group.group_properties) + return { + key: group.group_key, + name: displayId, + url: urls.group(groupType.group_type_index, group.group_key), + searchMatch: findSearchTermInItemName(displayId, searchTerm), + } as BasicListItem + }) || [], loading: groupsLoading[groupType.group_type_index], // FIXME: Add remote } as SidebarCategory) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSettings.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSettings.tsx index 4ed5684861719..42de4417d1aba 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSettings.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSettings.tsx @@ -3,10 +3,11 @@ import { sidePanelSettingsLogic } from './sidePanelSettingsLogic' import { Settings } from 'scenes/settings/Settings' import { LemonButton } from '@posthog/lemon-ui' import { urls } from 'scenes/urls' -import { SettingsLogicProps, settingsLogic } from 'scenes/settings/settingsLogic' +import { settingsLogic } from 'scenes/settings/settingsLogic' import { useEffect } from 'react' import { SidePanelPaneHeader } from '../components/SidePanelPane' import { IconExternal } from '@posthog/icons' +import { SettingsLogicProps } from 'scenes/settings/types' export const SidePanelSettings = (): JSX.Element => { const { settings } = useValues(sidePanelSettingsLogic) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic.tsx index da07a199f139d..b39077ca39523 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic.tsx @@ -7,7 +7,7 @@ import { LemonDialog } from '@posthog/lemon-ui' import type { sidePanelSettingsLogicType } from './sidePanelSettingsLogicType' import { sidePanelStateLogic } from '../sidePanelStateLogic' import { SidePanelTab } from '~/types' -import { SettingsLogicProps } from 'scenes/settings/settingsLogic' +import { SettingsLogicProps } from 'scenes/settings/types' export const sidePanelSettingsLogic = kea([ path(['scenes', 'navigation', 'sidepanel', 'sidePanelSettingsLogic']), diff --git a/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss b/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss index 625535c49575e..6141582fea852 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss +++ b/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss @@ -22,7 +22,7 @@ &--actionable { cursor: pointer; - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts index dd49842bc5fa7..ce74771427690 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts +++ b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts @@ -25,8 +25,8 @@ describe('breadcrumbsLogic', () => { // test with .delay because subscriptions happen async router.actions.push(urls.savedInsights()) - await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Insights • PostHog' }) - expect(global.document.title).toEqual('Insights • PostHog') + await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Product analytics • PostHog' }) + expect(global.document.title).toEqual('Product analytics • PostHog') router.actions.push(urls.dashboards()) await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Dashboards • PostHog' }) diff --git a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx index 1d1739651cb09..d15aefbb51a12 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx +++ b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx @@ -1,4 +1,4 @@ -import { actions, connect, kea, path, props, reducers, selectors } from 'kea' +import { actions, connect, kea, listeners, path, props, reducers, selectors } from 'kea' import { organizationLogic } from 'scenes/organizationLogic' import { teamLogic } from 'scenes/teamLogic' import './Breadcrumbs.scss' @@ -11,7 +11,7 @@ import { Lettermark } from 'lib/lemon-ui/Lettermark' import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture' import { ProjectSwitcherOverlay } from '~/layout/navigation/ProjectSwitcher' import { OrganizationSwitcherOverlay } from '~/layout/navigation/OrganizationSwitcher' -import { Breadcrumb } from '~/types' +import { Breadcrumb, FinalizedBreadcrumb } from '~/types' import { subscriptions } from 'kea-subscriptions' export const breadcrumbsLogic = kea([ @@ -37,6 +37,11 @@ export const breadcrumbsLogic = kea([ })), actions({ setActionsContainer: (element: HTMLElement | null) => ({ element }), + tentativelyRename: (breadcrumbGlobalKey: string, tentativeName: string) => ({ + breadcrumbGlobalKey, + tentativeName, + }), + finishRenaming: true, }), reducers({ actionsContainer: [ @@ -45,7 +50,17 @@ export const breadcrumbsLogic = kea([ setActionsContainer: (_, { element }) => element, }, ], + renameState: [ + null as [breadcrumbGlobalKey: string, tentativeName: string] | null, + { + tentativelyRename: (_, { breadcrumbGlobalKey, tentativeName }) => [breadcrumbGlobalKey, tentativeName], + finishRenaming: () => null, + }, + ], }), + listeners(({ actions }) => ({ + [sceneLogic.actionTypes.loadScene]: () => actions.finishRenaming(), // Cancel renaming on navigation away + })), selectors(() => ({ sceneBreadcrumbs: [ (s) => [ @@ -94,6 +109,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'me', name: user.first_name, symbol: , }) @@ -104,6 +120,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'instance', name: stripHTTP(preflight.site_url), symbol: , }) @@ -114,6 +131,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'organization', name: currentOrganization.name, symbol: , popover: @@ -131,6 +149,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'project', name: currentTeam.name, popover: { overlay: , @@ -144,8 +163,24 @@ export const breadcrumbsLogic = kea([ ], breadcrumbs: [ (s) => [s.appBreadcrumbs, s.sceneBreadcrumbs], - (appBreadcrumbs, sceneBreadcrumbs) => { - return [...appBreadcrumbs, ...sceneBreadcrumbs] + (appBreadcrumbs, sceneBreadcrumbs): FinalizedBreadcrumb[] => { + const breadcrumbs = Array(appBreadcrumbs.length + sceneBreadcrumbs.length) + const globalPathSoFar: string[] = [] + for (let i = 0; i < appBreadcrumbs.length; i++) { + globalPathSoFar.push(String(appBreadcrumbs[i].key)) + breadcrumbs[i] = { + ...appBreadcrumbs[i], + globalKey: globalPathSoFar.join('.'), + } + } + for (let i = 0; i < sceneBreadcrumbs.length; i++) { + globalPathSoFar.push(String(sceneBreadcrumbs[i].key)) + breadcrumbs[i + appBreadcrumbs.length] = { + ...sceneBreadcrumbs[i], + globalKey: globalPathSoFar.join('.'), + } + } + return breadcrumbs }, ], firstBreadcrumb: [(s) => [s.breadcrumbs], (breadcrumbs) => breadcrumbs[0]], diff --git a/frontend/src/layout/navigation/SideBar/SideBar.tsx b/frontend/src/layout/navigation/SideBar/SideBar.tsx index b946d96f6de83..b5f8c1e3f8ef3 100644 --- a/frontend/src/layout/navigation/SideBar/SideBar.tsx +++ b/frontend/src/layout/navigation/SideBar/SideBar.tsx @@ -201,7 +201,7 @@ function Pages(): JSX.Element { } identifier={Scene.EarlyAccessFeatures} - title={'Early Access Management'} + title={'Early access features'} to={urls.earlyAccessFeatures()} />
Data
@@ -210,7 +210,7 @@ function Pages(): JSX.Element { icon={} identifier={Scene.Events} to={urls.events()} - title={'Event Explorer'} + title={'Event explorer'} /> } @@ -230,7 +230,7 @@ function Pages(): JSX.Element { } identifier={Scene.DataWarehouse} - title={'Data Warehouse'} + title={'Data warehouse'} to={urls.dataWarehouse()} highlight="beta" /> @@ -240,7 +240,7 @@ function Pages(): JSX.Element {
Apps
{canViewPlugins(currentOrganization) && ( } identifier={Scene.Apps} to={urls.projectApps()} diff --git a/frontend/src/layout/navigation/TopBar/TopBar.scss b/frontend/src/layout/navigation/TopBar/TopBar.scss index 8e1732a7d2d34..79815df7875f5 100644 --- a/frontend/src/layout/navigation/TopBar/TopBar.scss +++ b/frontend/src/layout/navigation/TopBar/TopBar.scss @@ -130,7 +130,7 @@ } .SitePopover__side-link { - color: var(--primary); + color: var(--primary-3000); margin-left: 0.5rem; font-weight: 600; font-size: 0.8125rem; diff --git a/frontend/src/layout/navigation/navigationLogic.ts b/frontend/src/layout/navigation/navigationLogic.ts index f819384e973e3..31ccb9b6a4d0f 100644 --- a/frontend/src/layout/navigation/navigationLogic.ts +++ b/frontend/src/layout/navigation/navigationLogic.ts @@ -10,7 +10,6 @@ import { userLogic } from 'scenes/userLogic' import type { navigationLogicType } from './navigationLogicType' import { membersLogic } from 'scenes/organization/membersLogic' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { Scene } from 'scenes/sceneTypes' export type ProjectNoticeVariant = | 'demo_project' @@ -22,7 +21,7 @@ export type ProjectNoticeVariant = export const navigationLogic = kea([ path(['layout', 'navigation', 'navigationLogic']), connect(() => ({ - values: [sceneLogic, ['sceneConfig', 'activeScene'], membersLogic, ['members', 'membersLoading']], + values: [sceneLogic, ['sceneConfig'], membersLogic, ['members', 'membersLoading']], actions: [eventUsageLogic, ['reportProjectNoticeDismissed']], })), actions({ @@ -121,10 +120,9 @@ export const navigationLogic = kea([ (fullscreen, sceneConfig) => fullscreen || sceneConfig?.layout === 'plain', ], minimalTopBar: [ - (s) => [s.activeScene], - (activeScene) => { - const minimalTopBarScenes = [Scene.Products, Scene.Onboarding] - return activeScene && minimalTopBarScenes.includes(activeScene) + (s) => [s.sceneConfig], + (sceneConfig) => { + return sceneConfig?.layout === 'plain' && !sceneConfig.allowUnauthenticated }, ], isSideBarShown: [ diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index d1de3a313acb2..d51c4226ab69b 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -1,4 +1,7 @@ import posthog from 'posthog-js' +import { decompressSync, strFromU8 } from 'fflate' +import { encodeParams } from 'kea-router' + import { ActionType, BatchExportLogEntry, @@ -55,22 +58,28 @@ import { ExternalDataStripeSourceCreatePayload, ExternalDataStripeSource, } from '~/types' -import { getCurrentOrganizationId, getCurrentTeamId } from './utils/logics' -import { CheckboxValueType } from 'antd/lib/checkbox/Group' -import { LOGS_PORTION_LIMIT } from 'scenes/plugins/plugin/pluginLogsLogic' +import { + ACTIVITY_PAGE_SIZE, + DashboardPrivilegeLevel, + EVENT_DEFINITIONS_PER_PAGE, + EVENT_PROPERTY_DEFINITIONS_PER_PAGE, + LOGS_PORTION_LIMIT, +} from './constants' import { toParams } from 'lib/utils' -import { DashboardPrivilegeLevel } from './constants' -import { EVENT_DEFINITIONS_PER_PAGE } from 'scenes/data-management/events/eventDefinitionsTableLogic' -import { EVENT_PROPERTY_DEFINITIONS_PER_PAGE } from 'scenes/data-management/properties/propertyDefinitionsTableLogic' import { ActivityLogItem, ActivityScope } from 'lib/components/ActivityLog/humanizeActivity' import { ActivityLogProps } from 'lib/components/ActivityLog/ActivityLog' import { SavedSessionRecordingPlaylistsResult } from 'scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic' -import { QuerySchema } from '~/queries/schema' -import { decompressSync, strFromU8 } from 'fflate' +import { QuerySchema, QueryStatus } from '~/queries/schema' import { getCurrentExporterData } from '~/exporter/exporterViewLogic' -import { encodeParams } from 'kea-router' -export const ACTIVITY_PAGE_SIZE = 20 +/** + * WARNING: Be very careful importing things here. This file is heavily used and can trigger a lot of cyclic imports + * Preferably create a dedicated file in utils/.. + */ + +type CheckboxValueType = string | number | boolean + +const PAGINATION_DEFAULT_MAX_PAGES = 10 export interface PaginatedResponse { results: T[] @@ -115,6 +124,33 @@ export async function getJSONOrThrow(response: Response): Promise { } } +export class ApiConfig { + private static _currentOrganizationId: OrganizationType['id'] | null = null + private static _currentTeamId: TeamType['id'] | null = null + + static getCurrentOrganizationId(): OrganizationType['id'] { + if (!this._currentOrganizationId) { + throw new Error('Organization ID is not known.') + } + return this._currentOrganizationId + } + + static setCurrentOrganizationId(id: OrganizationType['id']): void { + this._currentOrganizationId = id + } + + static getCurrentTeamId(): TeamType['id'] { + if (!this._currentTeamId) { + throw new Error('Team ID is not known.') + } + return this._currentTeamId + } + + static setCurrentTeamId(id: TeamType['id']): void { + this._currentTeamId = id + } +} + class ApiRequest { private pathComponents: string[] private queryString: string | undefined @@ -168,7 +204,7 @@ class ApiRequest { return this.addPathComponent('organizations') } - public organizationsDetail(id: OrganizationType['id'] = getCurrentOrganizationId()): ApiRequest { + public organizationsDetail(id: OrganizationType['id'] = ApiConfig.getCurrentOrganizationId()): ApiRequest { return this.organizations().addPathComponent(id) } @@ -199,7 +235,7 @@ class ApiRequest { return this.addPathComponent('projects') } - public projectsDetail(id: TeamType['id'] = getCurrentTeamId()): ApiRequest { + public projectsDetail(id: TeamType['id'] = ApiConfig.getCurrentTeamId()): ApiRequest { return this.projects().addPathComponent(id) } @@ -527,7 +563,7 @@ class ApiRequest { // Resource Access Permissions public featureFlagAccessPermissions(flagId: FeatureFlagType['id']): ApiRequest { - return this.featureFlag(flagId, getCurrentTeamId()).addPathComponent('role_access') + return this.featureFlag(flagId, ApiConfig.getCurrentTeamId()).addPathComponent('role_access') } public featureFlagAccessPermissionsDetail( @@ -542,6 +578,10 @@ class ApiRequest { return this.projectsDetail(teamId).addPathComponent('query') } + public queryStatus(queryId: string, teamId?: TeamType['id']): ApiRequest { + return this.query(teamId).addPathComponent(queryId) + } + // Notebooks public notebooks(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('notebooks') @@ -692,13 +732,13 @@ const api = { organizationFeatureFlags: { async get( - orgId: OrganizationType['id'] = getCurrentOrganizationId(), + orgId: OrganizationType['id'] = ApiConfig.getCurrentOrganizationId(), featureFlagKey: FeatureFlagType['key'] ): Promise { return await new ApiRequest().organizationFeatureFlags(orgId, featureFlagKey).get() }, async copy( - orgId: OrganizationType['id'] = getCurrentOrganizationId(), + orgId: OrganizationType['id'] = ApiConfig.getCurrentOrganizationId(), data: OrganizationFeatureFlagsCopyBody ): Promise<{ success: FeatureFlagType[]; failed: any }> { return await new ApiRequest().copyOrganizationFeatureFlags(orgId).create({ data }) @@ -740,7 +780,7 @@ const api = { list( activityLogProps: ActivityLogProps, page: number = 1, - teamId: TeamType['id'] = getCurrentTeamId() + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise> { const requestForScope: Record ApiRequest | null> = { [ActivityScope.FEATURE_FLAG]: (props) => { @@ -785,7 +825,7 @@ const api = { }, exports: { - determineExportUrl(exportId: number, teamId: TeamType['id'] = getCurrentTeamId()): string { + determineExportUrl(exportId: number, teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): string { return new ApiRequest() .export(exportId, teamId) .withAction('content') @@ -796,12 +836,12 @@ const api = { async create( data: Partial, params: Record = {}, - teamId: TeamType['id'] = getCurrentTeamId() + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise { return new ApiRequest().exports(teamId).withQueryString(toParams(params)).create({ data }) }, - async get(id: number, teamId: TeamType['id'] = getCurrentTeamId()): Promise { + async get(id: number, teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): Promise { return new ApiRequest().export(id, teamId).get() }, }, @@ -810,7 +850,7 @@ const api = { async get( id: EventType['id'], includePerson: boolean = false, - teamId: TeamType['id'] = getCurrentTeamId() + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise { let apiRequest = new ApiRequest().event(id, teamId) if (includePerson) { @@ -821,7 +861,7 @@ const api = { async list( filters: EventsListQueryParams, limit: number = 100, - teamId: TeamType['id'] = getCurrentTeamId() + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise> { const params: EventsListQueryParams = { ...filters, limit, orderBy: filters.orderBy ?? ['-timestamp'] } return new ApiRequest().events(teamId).withQueryString(toParams(params)).get() @@ -829,7 +869,7 @@ const api = { determineListEndpoint( filters: EventsListQueryParams, limit: number = 100, - teamId: TeamType['id'] = getCurrentTeamId() + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): string { const params: EventsListQueryParams = { ...filters, limit } return new ApiRequest().events(teamId).withQueryString(toParams(params)).assembleFullUrl() @@ -837,7 +877,7 @@ const api = { }, tags: { - async list(teamId: TeamType['id'] = getCurrentTeamId()): Promise { + async list(teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): Promise { return new ApiRequest().tags(teamId).get() }, }, @@ -860,7 +900,7 @@ const api = { }, async list({ limit = EVENT_DEFINITIONS_PER_PAGE, - teamId = getCurrentTeamId(), + teamId = ApiConfig.getCurrentTeamId(), ...params }: { limit?: number @@ -876,7 +916,7 @@ const api = { }, determineListEndpoint({ limit = EVENT_DEFINITIONS_PER_PAGE, - teamId = getCurrentTeamId(), + teamId = ApiConfig.getCurrentTeamId(), ...params }: { limit?: number @@ -925,7 +965,7 @@ const api = { }, async list({ limit = EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - teamId = getCurrentTeamId(), + teamId = ApiConfig.getCurrentTeamId(), ...params }: { event_names?: string[] @@ -951,7 +991,7 @@ const api = { }, determineListEndpoint({ limit = EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - teamId = getCurrentTeamId(), + teamId = ApiConfig.getCurrentTeamId(), ...params }: { event_names?: string[] @@ -1447,7 +1487,7 @@ const api = { }, async update( notebookId: NotebookType['short_id'], - data: Pick + data: Partial> ): Promise { return await new ApiRequest().notebook(notebookId).update({ data }) }, @@ -1722,6 +1762,12 @@ const api = { }, }, + queryStatus: { + async get(queryId: string): Promise { + return await new ApiRequest().queryStatus(queryId).get() + }, + }, + queryURL: (): string => { return new ApiRequest().query().assembleFullUrl(true) }, @@ -1730,7 +1776,8 @@ const api = { query: T, options?: ApiMethodOptions, queryId?: string, - refresh?: boolean + refresh?: boolean, + async?: boolean ): Promise< T extends { [response: string]: any } ? T['response'] extends infer P | undefined @@ -1740,7 +1787,7 @@ const api = { > { return await new ApiRequest() .query() - .create({ ...options, data: { query, client_query_id: queryId, refresh: refresh } }) + .create({ ...options, data: { query, client_query_id: queryId, refresh: refresh, async } }) }, /** Fetch data from specified URL. The result already is JSON-parsed. */ @@ -1852,6 +1899,23 @@ const api = { } return response }, + + async loadPaginatedResults( + url: string | null, + maxIterations: number = PAGINATION_DEFAULT_MAX_PAGES + ): Promise { + let results: any[] = [] + for (let i = 0; i <= maxIterations; ++i) { + if (!url) { + break + } + + const { results: partialResults, next } = await api.get(url) + results = results.concat(partialResults) + url = next + } + return results + }, } function reportError(method: string, url: string, response: Response, startTime: number): void { diff --git a/frontend/src/lib/colors.ts b/frontend/src/lib/colors.ts index c378ce0e0032c..6c95c2443a214 100644 --- a/frontend/src/lib/colors.ts +++ b/frontend/src/lib/colors.ts @@ -5,20 +5,21 @@ export const BRAND_BLUE_HSL: [number, number, number] = [228, 100, 56] /* Insight series colors. */ const dataColorVars = [ - 'brand-blue', - 'purple', - 'viridian', - 'magenta', - 'vermilion', - 'brown', - 'green', - 'blue', - 'pink', - 'navy', - 'turquoise', - 'brick', - 'yellow', - 'lilac', + 'color-1', + 'color-2', + 'color-3', + 'color-4', + 'color-5', + 'color-6', + 'color-7', + 'color-8', + 'color-9', + 'color-10', + 'color-11', + 'color-12', + 'color-13', + 'color-14', + 'color-15', ] export const tagColors = [ @@ -80,8 +81,8 @@ export function getBarColorFromStatus(status: LifecycleToggle, hover?: boolean): export function getGraphColors(isDarkModeOn: boolean): Record { return { axisLabel: isDarkModeOn ? '#fff' : '#2d2d2d', // --text-3000 - axisLine: isDarkModeOn ? '#888' : '#ddd', // --funnel-grid - axis: isDarkModeOn ? '#aaa' : '#999', + axisLine: isDarkModeOn ? '#4b4d58' : '#ddd', // --funnel-grid + axis: isDarkModeOn ? '#4b4d58' : '#999', crosshair: isDarkModeOn ? 'rgba(255,255,255,0.2)' : 'rgba(0,0,0,0.2)', tooltipBackground: '#1dc9b7', tooltipTitle: '#fff', diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx index fcc633bb7dfe0..4e19411f067bb 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx @@ -1,6 +1,6 @@ import { loaders } from 'kea-loaders' import { kea, props, key, path, actions, reducers, selectors, listeners, events } from 'kea' -import api, { ACTIVITY_PAGE_SIZE, ActivityLogPaginatedResponse } from 'lib/api' +import api, { ActivityLogPaginatedResponse } from 'lib/api' import { ActivityLogItem, ActivityScope, @@ -19,6 +19,7 @@ import { insightActivityDescriber } from 'scenes/saved-insights/activityDescript import { personActivityDescriber } from 'scenes/persons/activityDescriptions' import { dataManagementActivityDescriber } from 'scenes/data-management/dataManagementDescribers' import { notebookActivityDescriber } from 'scenes/notebooks/Notebook/notebookActivityDescriber' +import { ACTIVITY_PAGE_SIZE } from 'lib/constants' /** * Having this function inside the `humanizeActivity module was causing very weird test errors in other modules diff --git a/frontend/src/lib/components/BillingAlertsV2.tsx b/frontend/src/lib/components/BillingAlertsV2.tsx index 21a4023a6262a..6b3a0a7b4c3bb 100644 --- a/frontend/src/lib/components/BillingAlertsV2.tsx +++ b/frontend/src/lib/components/BillingAlertsV2.tsx @@ -12,18 +12,26 @@ export function BillingAlertsV2(): JSX.Element | null { const [alertHidden, setAlertHidden] = useState(false) useEffect(() => { + if (billingAlert?.pathName && currentLocation.pathname !== billingAlert?.pathName) { + setAlertHidden(true) + } else { + setAlertHidden(false) + } if (billingAlert) { reportBillingAlertShown(billingAlert) } - }, [billingAlert]) + }, [billingAlert, currentLocation]) if (!billingAlert || alertHidden) { return null } - const showButton = billingAlert.contactSupport || currentLocation.pathname !== urls.organizationBilling() + const showButton = + billingAlert.action || billingAlert.contactSupport || currentLocation.pathname !== urls.organizationBilling() - const buttonProps = billingAlert.contactSupport + const buttonProps = billingAlert.action + ? billingAlert.action + : billingAlert.contactSupport ? { to: 'mailto:sales@posthog.com', children: billingAlert.buttonCTA || 'Contact support', diff --git a/frontend/src/lib/components/BridgePage/BridgePage.scss b/frontend/src/lib/components/BridgePage/BridgePage.scss index fca0fc8ef9599..fb780e9cfe9c4 100644 --- a/frontend/src/lib/components/BridgePage/BridgePage.scss +++ b/frontend/src/lib/components/BridgePage/BridgePage.scss @@ -6,7 +6,9 @@ display: flex; flex-direction: column; flex: 1; - overflow: scroll; + overflow: hidden; + min-height: 100vh; + height: 100%; &::-webkit-scrollbar { width: 0 !important; diff --git a/frontend/src/lib/components/BridgePage/BridgePage.tsx b/frontend/src/lib/components/BridgePage/BridgePage.tsx index d7c270d0f5fe1..e131434765fe5 100644 --- a/frontend/src/lib/components/BridgePage/BridgePage.tsx +++ b/frontend/src/lib/components/BridgePage/BridgePage.tsx @@ -9,7 +9,6 @@ import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { Region } from '~/types' export type BridgePageCommonProps = { - className?: string children?: React.ReactNode footer?: React.ReactNode header?: React.ReactNode @@ -18,7 +17,6 @@ export type BridgePageCommonProps = { sideLogo?: boolean fixedWidth?: boolean leftContainerContent?: JSX.Element - fullScreen?: boolean } interface NoHedgehogProps extends BridgePageCommonProps { @@ -36,7 +34,6 @@ type BridgePageProps = NoHedgehogProps | YesHedgehogProps export function BridgePage({ children, - className, header, footer, view, @@ -46,7 +43,6 @@ export function BridgePage({ fixedWidth = true, leftContainerContent, hedgehog = false, - fullScreen = true, }: BridgePageProps): JSX.Element { const [messageShowing, setMessageShowing] = useState(false) const { preflight } = useValues(preflightLogic) @@ -59,14 +55,7 @@ export function BridgePage({ }, []) return ( -
+
{leftContainerContent || hedgehog ? (
@@ -108,7 +97,7 @@ export function BridgePage({
{children}
-
{footer}
+ {footer &&
{footer}
}
) } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss index 2dbb9042ea037..b0f5f5a471e2c 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss @@ -12,8 +12,8 @@ flex-direction: column; &--highlighted { - border-color: var(--primary); - outline: 1px solid var(--primary); + border-color: var(--primary-3000); + outline: 1px solid var(--primary-3000); } .ant-alert { @@ -160,7 +160,7 @@ width: 1rem; border-radius: 0.25rem; margin-right: 0.25rem; - background: var(--primary); + background: var(--primary-3000); color: var(--bg-light); line-height: 1rem; font-size: 0.625rem; diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx index f594637987c79..35198f96ed277 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx @@ -1,5 +1,5 @@ import { useValues } from 'kea' -import { allOperatorsMapping, capitalizeFirstLetter, formatPropertyLabel } from 'lib/utils' +import { allOperatorsMapping, capitalizeFirstLetter } from 'lib/utils' import { LocalFilter, toLocalFilters } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { humanizePathsEventTypes } from 'scenes/insights/utils' import { apiValueToMathType, MathCategory, MathDefinition, mathsLogic } from 'scenes/trends/mathsLogic' @@ -26,6 +26,7 @@ import { cohortsModel } from '~/models/cohortsModel' import React from 'react' import { isPathsFilter, isTrendsFilter } from 'scenes/insights/sharedUtils' import { + formatPropertyLabel, isAnyPropertyfilter, isCohortPropertyFilter, isPropertyFilterWithOperator, diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx index 129a365f6d84c..2b85413419f02 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx @@ -107,7 +107,7 @@ export function InsightMeta({ samplingNotice={ insight.filters.sampling_factor && insight.filters.sampling_factor < 1 ? ( - + ) : null } diff --git a/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx b/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx index 9c9ca574f8194..4b101df7bfa1a 100644 --- a/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx +++ b/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx @@ -18,7 +18,7 @@ import yaml from 'react-syntax-highlighter/dist/esm/languages/prism/yaml' import markup from 'react-syntax-highlighter/dist/esm/languages/prism/markup' import http from 'react-syntax-highlighter/dist/esm/languages/prism/http' import sql from 'react-syntax-highlighter/dist/esm/languages/prism/sql' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { Popconfirm } from 'antd' import { PopconfirmProps } from 'antd/lib/popconfirm' import './CodeSnippet.scss' diff --git a/frontend/src/lib/components/CommandBar/ActionInput.tsx b/frontend/src/lib/components/CommandBar/ActionInput.tsx index 879b699ad1930..7ed1dd64d1392 100644 --- a/frontend/src/lib/components/CommandBar/ActionInput.tsx +++ b/frontend/src/lib/components/CommandBar/ActionInput.tsx @@ -30,7 +30,7 @@ export const ActionInput = (): JSX.Element => { className="CommandBar__input" fullWidth prefix={} - suffix={} + suffix={} placeholder={activeFlow?.instruction ?? 'What would you like to do? Try some suggestions…'} autoFocus value={input} diff --git a/frontend/src/lib/components/CommandBar/ActionResult.tsx b/frontend/src/lib/components/CommandBar/ActionResult.tsx index a67082f3dfc98..3ae32bd484a14 100644 --- a/frontend/src/lib/components/CommandBar/ActionResult.tsx +++ b/frontend/src/lib/components/CommandBar/ActionResult.tsx @@ -2,7 +2,6 @@ import { useEffect, useRef } from 'react' import { useActions } from 'kea' import { actionBarLogic } from './actionBarLogic' -import { getNameFromActionScope } from './utils' import { CommandResultDisplayable } from '../CommandPalette/commandPaletteLogic' type SearchResultProps = { @@ -25,9 +24,7 @@ export const ActionResult = ({ result, focused }: SearchResultProps): JSX.Elemen return (
{ onMouseEnterResult(result.index) }} @@ -41,11 +38,9 @@ export const ActionResult = ({ result, focused }: SearchResultProps): JSX.Elemen }} ref={ref} > -
- {result.source.scope && ( - {getNameFromActionScope(result.source.scope)} - )} - {result.display} +
+ + {result.display}
diff --git a/frontend/src/lib/components/CommandBar/ActionResults.tsx b/frontend/src/lib/components/CommandBar/ActionResults.tsx index 059954357506e..ed44ab499d03e 100644 --- a/frontend/src/lib/components/CommandBar/ActionResults.tsx +++ b/frontend/src/lib/components/CommandBar/ActionResults.tsx @@ -15,7 +15,9 @@ type ResultsGroupProps = { const ResultsGroup = ({ scope, results, activeResultIndex }: ResultsGroupProps): JSX.Element => { return ( <> -
{getNameFromActionScope(scope)}
+
+ {getNameFromActionScope(scope)} +
{results.map((result) => ( = { + title: 'Components/Command Bar', + component: CommandBar, + decorators: [ + mswDecorator({ + get: { + '/api/projects/:team_id/search': SEARCH_RESULT, + }, + }), + ], + parameters: { + layout: 'fullscreen', + testOptions: { + snapshotTargetSelector: '[data-attr="command-bar"]', + }, + viewMode: 'story', + }, +} +export default meta + +export function Search(): JSX.Element { + const { setCommandBar } = useActions(commandBarLogic) + + useEffect(() => { + setCommandBar(BarStatus.SHOW_SEARCH) + }, []) + + return +} + +export function Actions(): JSX.Element { + const { setCommandBar } = useActions(commandBarLogic) + + useEffect(() => { + setCommandBar(BarStatus.SHOW_ACTIONS) + }, []) + + return +} diff --git a/frontend/src/lib/components/CommandBar/CommandBar.tsx b/frontend/src/lib/components/CommandBar/CommandBar.tsx index 3850051e60551..37ce7466d31d6 100644 --- a/frontend/src/lib/components/CommandBar/CommandBar.tsx +++ b/frontend/src/lib/components/CommandBar/CommandBar.tsx @@ -1,4 +1,4 @@ -import { useRef } from 'react' +import { forwardRef, useRef } from 'react' import { useActions, useValues } from 'kea' import { useOutsideClickHandler } from 'lib/hooks/useOutsideClickHandler' @@ -10,20 +10,38 @@ import './index.scss' import { SearchBar } from './SearchBar' import { ActionBar } from './ActionBar' -const CommandBarOverlay = ({ children }: { children?: React.ReactNode }): JSX.Element => ( -
- {children} -
-) +interface CommandBarOverlayProps { + barStatus: BarStatus + children?: React.ReactNode +} + +const CommandBarOverlay = forwardRef(function CommandBarOverlayInternal( + { barStatus, children }, + ref +): JSX.Element { + return ( +
+
+ {children} +
+
+ ) +}) export function CommandBar(): JSX.Element | null { const containerRef = useRef(null) @@ -37,13 +55,8 @@ export function CommandBar(): JSX.Element | null { } return ( - -
- {barStatus === BarStatus.SHOW_SEARCH ? : } -
+ + {barStatus === BarStatus.SHOW_SEARCH ? : } ) } diff --git a/frontend/src/lib/components/CommandBar/SearchBar.tsx b/frontend/src/lib/components/CommandBar/SearchBar.tsx index fad41fbfa7978..7a4163e487a91 100644 --- a/frontend/src/lib/components/CommandBar/SearchBar.tsx +++ b/frontend/src/lib/components/CommandBar/SearchBar.tsx @@ -1,4 +1,5 @@ import { useMountedLogic } from 'kea' +import { useRef } from 'react' import { searchBarLogic } from './searchBarLogic' @@ -9,11 +10,13 @@ import { SearchTabs } from './SearchTabs' export const SearchBar = (): JSX.Element => { useMountedLogic(searchBarLogic) // load initial results + const inputRef = useRef(null) + return (
- + - +
) } diff --git a/frontend/src/lib/components/CommandBar/SearchBarTab.tsx b/frontend/src/lib/components/CommandBar/SearchBarTab.tsx index f218824187a26..c2dcb75f0917b 100644 --- a/frontend/src/lib/components/CommandBar/SearchBarTab.tsx +++ b/frontend/src/lib/components/CommandBar/SearchBarTab.tsx @@ -1,24 +1,50 @@ -import { useActions } from 'kea' +import { RefObject } from 'react' +import { useActions, useValues } from 'kea' import { resultTypeToName } from './constants' import { searchBarLogic } from './searchBarLogic' import { ResultTypeWithAll } from './types' +import { Spinner } from 'lib/lemon-ui/Spinner' type SearchBarTabProps = { type: ResultTypeWithAll active: boolean count?: number | null + inputRef: RefObject } -export const SearchBarTab = ({ type, active, count }: SearchBarTabProps): JSX.Element => { +export const SearchBarTab = ({ type, active, count, inputRef }: SearchBarTabProps): JSX.Element => { const { setActiveTab } = useActions(searchBarLogic) return (
setActiveTab(type)} + onClick={() => { + setActiveTab(type) + inputRef.current?.focus() + }} > {resultTypeToName[type]} - {count != null && {count}} +
) } + +type CountProps = { + type: ResultTypeWithAll + active: boolean + count?: number | null +} + +const Count = ({ type, active, count }: CountProps): JSX.Element | null => { + const { searchResponseLoading } = useValues(searchBarLogic) + + if (type === 'all') { + return null + } else if (active && searchResponseLoading) { + return + } else if (count != null) { + return {count} + } else { + return + } +} diff --git a/frontend/src/lib/components/CommandBar/SearchInput.tsx b/frontend/src/lib/components/CommandBar/SearchInput.tsx index 40c08c942c5bb..d73baa2dd4cc7 100644 --- a/frontend/src/lib/components/CommandBar/SearchInput.tsx +++ b/frontend/src/lib/components/CommandBar/SearchInput.tsx @@ -4,23 +4,28 @@ import { LemonInput } from '@posthog/lemon-ui' import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { searchBarLogic } from './searchBarLogic' +import { forwardRef, Ref } from 'react' +import { teamLogic } from 'scenes/teamLogic' -export const SearchInput = (): JSX.Element => { +export const SearchInput = forwardRef(function _SearchInput(_, ref: Ref): JSX.Element { + const { currentTeam } = useValues(teamLogic) const { searchQuery } = useValues(searchBarLogic) const { setSearchQuery } = useActions(searchBarLogic) return (
} + suffix={} autoFocus value={searchQuery} onChange={setSearchQuery} + placeholder={currentTeam ? `Search the ${currentTeam.name} project…` : 'Search…'} />
) -} +}) diff --git a/frontend/src/lib/components/CommandBar/SearchResult.tsx b/frontend/src/lib/components/CommandBar/SearchResult.tsx index af88481772ff5..8c5364ca87101 100644 --- a/frontend/src/lib/components/CommandBar/SearchResult.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResult.tsx @@ -42,8 +42,8 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: return (
{ if (isAutoScrolling) { return @@ -63,7 +63,9 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: >
{resultTypeToName[result.type]} - {result.name} + + + {location.host} {urlForResult(result)} @@ -74,11 +76,41 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: } export const SearchResultSkeleton = (): JSX.Element => ( -
-
- - - -
+
+ + +
) + +type ResultNameProps = { + result: SearchResultType +} + +export const ResultName = ({ result }: ResultNameProps): JSX.Element | null => { + const { type, extra_fields } = result + if (type === 'insight') { + return extra_fields.name ? {extra_fields.name} : {extra_fields.derived_name} + } else if (type === 'feature_flag') { + return {extra_fields.key} + } else if (type === 'notebook') { + return {extra_fields.title} + } else { + return {extra_fields.name} + } +} + +export const ResultDescription = ({ result }: ResultNameProps): JSX.Element | null => { + const { type, extra_fields } = result + if (type === 'feature_flag') { + return extra_fields.name && extra_fields.name !== extra_fields.key ? ( + {extra_fields.name} + ) : ( + No description. + ) + } else if (type === 'notebook') { + return {extra_fields.text_content} + } else { + return extra_fields.description ? {extra_fields.description} : No description. + } +} diff --git a/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx b/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx new file mode 100644 index 0000000000000..f91b09a865dd3 --- /dev/null +++ b/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx @@ -0,0 +1,28 @@ +import { useValues } from 'kea' + +import { resultTypeToName } from './constants' +import { searchBarLogic } from './searchBarLogic' + +import { ResultDescription, ResultName } from 'lib/components/CommandBar/SearchResult' + +export const SearchResultPreview = (): JSX.Element | null => { + const { activeResultIndex, filterSearchResults } = useValues(searchBarLogic) + + if (!filterSearchResults || filterSearchResults.length === 0) { + return null + } + + const result = filterSearchResults[activeResultIndex] + + return ( +
+
{resultTypeToName[result.type]}
+
+ +
+
+ +
+
+ ) +} diff --git a/frontend/src/lib/components/CommandBar/SearchResults.tsx b/frontend/src/lib/components/CommandBar/SearchResults.tsx index 1b2ab8ee00bd7..9a1e217d69b4d 100644 --- a/frontend/src/lib/components/CommandBar/SearchResults.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResults.tsx @@ -4,37 +4,43 @@ import { DetectiveHog } from '../hedgehogs' import { searchBarLogic } from './searchBarLogic' import { SearchResult, SearchResultSkeleton } from './SearchResult' +import { SearchResultPreview } from './SearchResultPreview' export const SearchResults = (): JSX.Element => { const { filterSearchResults, searchResponseLoading, activeResultIndex, keyboardResultIndex } = useValues(searchBarLogic) return ( -
- {searchResponseLoading && ( - <> - - - - - )} - {!searchResponseLoading && filterSearchResults?.length === 0 && ( -
-

No results

-

This doesn't happen often, but we're stumped!

- -
- )} - {!searchResponseLoading && - filterSearchResults?.map((result, index) => ( - - ))} +
+
+ {searchResponseLoading && ( + <> + + + + + )} + {!searchResponseLoading && filterSearchResults?.length === 0 && ( +
+

No results

+

This doesn't happen often, but we're stumped!

+ +
+ )} + {!searchResponseLoading && + filterSearchResults?.map((result, index) => ( + + ))} +
+
+ +
) } diff --git a/frontend/src/lib/components/CommandBar/SearchTabs.tsx b/frontend/src/lib/components/CommandBar/SearchTabs.tsx index bc83ed1110a76..fe6e9a9edb2ad 100644 --- a/frontend/src/lib/components/CommandBar/SearchTabs.tsx +++ b/frontend/src/lib/components/CommandBar/SearchTabs.tsx @@ -1,10 +1,15 @@ import { useValues } from 'kea' +import { RefObject } from 'react' import { searchBarLogic } from './searchBarLogic' import { SearchBarTab } from './SearchBarTab' import { ResultType } from './types' -export const SearchTabs = (): JSX.Element | null => { +type SearchTabsProps = { + inputRef: RefObject +} + +export const SearchTabs = ({ inputRef }: SearchTabsProps): JSX.Element | null => { const { searchResponse, activeTab } = useValues(searchBarLogic) if (!searchResponse) { @@ -13,9 +18,15 @@ export const SearchTabs = (): JSX.Element | null => { return (
- + {Object.entries(searchResponse.counts).map(([type, count]) => ( - + ))}
) diff --git a/frontend/src/lib/components/CommandBar/constants.ts b/frontend/src/lib/components/CommandBar/constants.ts index 4b0d973cdb95a..14396bb019f20 100644 --- a/frontend/src/lib/components/CommandBar/constants.ts +++ b/frontend/src/lib/components/CommandBar/constants.ts @@ -8,6 +8,7 @@ export const resultTypeToName: Record = { experiment: 'Experiments', feature_flag: 'Feature flags', insight: 'Insights', + notebook: 'Notebooks', } export const actionScopeToName: Record = { diff --git a/frontend/src/lib/components/CommandBar/searchBarLogic.ts b/frontend/src/lib/components/CommandBar/searchBarLogic.ts index f8a40bb93b6d5..91c649fc9eb89 100644 --- a/frontend/src/lib/components/CommandBar/searchBarLogic.ts +++ b/frontend/src/lib/components/CommandBar/searchBarLogic.ts @@ -25,17 +25,23 @@ export const searchBarLogic = kea([ setIsAutoScrolling: (scrolling: boolean) => ({ scrolling }), openResult: (index: number) => ({ index }), }), - loaders({ + loaders(({ values }) => ({ searchResponse: [ null as SearchResponse | null, { - setSearchQuery: async ({ query }, breakpoint) => { + loadSearchResponse: async (_, breakpoint) => { await breakpoint(300) - return await api.get(`api/projects/@current/search?q=${query}`) + if (values.activeTab === 'all') { + return await api.get(`api/projects/@current/search?q=${values.searchQuery}`) + } else { + return await api.get( + `api/projects/@current/search?q=${values.searchQuery}&entities=${values.activeTab}` + ) + } }, }, ], - }), + })), reducers({ searchQuery: ['', { setSearchQuery: (_, { query }) => query }], keyboardResultIndex: [ @@ -91,6 +97,8 @@ export const searchBarLogic = kea([ router.actions.push(urlForResult(result)) actions.hideCommandBar() }, + setSearchQuery: actions.loadSearchResponse, + setActiveTab: actions.loadSearchResponse, })), afterMount(({ actions, values, cache }) => { // load initial results @@ -149,6 +157,8 @@ export const urlForResult = (result: SearchResult): string => { return urls.featureFlag(result.result_id) case 'insight': return urls.insightView(result.result_id as InsightShortId) + case 'notebook': + return urls.notebook(result.result_id) default: throw new Error(`No action for type '${result.type}' defined.`) } diff --git a/frontend/src/lib/components/CommandBar/types.ts b/frontend/src/lib/components/CommandBar/types.ts index 4afc7e3ff66d2..1f3278f3727f6 100644 --- a/frontend/src/lib/components/CommandBar/types.ts +++ b/frontend/src/lib/components/CommandBar/types.ts @@ -4,11 +4,16 @@ export enum BarStatus { SHOW_ACTIONS = 'show_actions', } -export type ResultType = 'action' | 'cohort' | 'insight' | 'dashboard' | 'experiment' | 'feature_flag' +export type ResultType = 'action' | 'cohort' | 'insight' | 'dashboard' | 'experiment' | 'feature_flag' | 'notebook' export type ResultTypeWithAll = ResultType | 'all' -export type SearchResult = { result_id: string; type: ResultType; name: string | null } +export type SearchResult = { + result_id: string + type: ResultType + name: string | null + extra_fields: Record +} export type SearchResponse = { results: SearchResult[] diff --git a/frontend/src/lib/components/CommandPalette/CommandPalette.scss b/frontend/src/lib/components/CommandPalette/CommandPalette.scss index 0972ea918e81e..55079ad3ac496 100644 --- a/frontend/src/lib/components/CommandPalette/CommandPalette.scss +++ b/frontend/src/lib/components/CommandPalette/CommandPalette.scss @@ -109,7 +109,7 @@ cursor: pointer; &::after { - background: var(--primary); + background: var(--primary-3000); } } @@ -121,5 +121,5 @@ .palette__icon { display: flex; align-items: center; - font-size: 1rem; + font-size: 1.25rem; } diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index 1555276502b3a..2109d2ddd47fd 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -6,7 +6,8 @@ import { dashboardsModel } from '~/models/dashboardsModel' import { Parser } from 'expr-eval' import { DashboardType, InsightType } from '~/types' import api from 'lib/api' -import { copyToClipboard, isMobile, isURL, sample, uniqueBy } from 'lib/utils' +import { isMobile, isURL, uniqueBy } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { userLogic } from 'scenes/userLogic' import { personalAPIKeysLogic } from '../../../scenes/settings/user/personalAPIKeysLogic' import { teamLogic } from 'scenes/teamLogic' @@ -16,38 +17,47 @@ import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { urls } from 'scenes/urls' import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' import { - IconAction, IconApps, - IconBarChart, - IconCalculate, - IconCheckmark, - IconCohort, - IconComment, - IconCorporate, - IconCottage, - IconEmojiPeople, - IconFlag, - IconFunnelHorizontal, - IconGauge, + IconCalculator, + IconChat, + IconCheck, + IconCursor, + IconDashboard, + IconDatabase, + IconExternal, + IconFunnels, + IconGear, IconGithub, + IconGraph, + IconHogQL, + IconHome, IconKeyboard, + IconLeave, + IconLifecycle, + IconList, IconLive, - IconLockOpen, - IconLogout, - IconOpenInNew, - IconPerson, - IconPersonFilled, - IconRecording, + IconNotebook, + IconPageChart, + IconPeople, + IconPeopleFilled, + IconPieChart, + IconRetention, + IconRewindPlay, + IconRocket, IconServer, - IconSettings, - IconTableChart, - IconTools, - IconTrendingFlat, - IconTrendingUp, -} from 'lib/lemon-ui/icons' + IconStickiness, + IconTestTube, + IconThoughtBubble, + IconToggle, + IconToolbar, + IconTrends, + IconUnlock, + IconUserPaths, +} from '@posthog/icons' import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { insightTypeURL } from 'scenes/insights/utils' // If CommandExecutor returns CommandFlow, flow will be entered export type CommandExecutor = () => CommandFlow | void @@ -239,7 +249,7 @@ export const commandPaletteLogic = kea([ key: 'custom_dashboards', resolver: dashboards.map((dashboard: DashboardType) => ({ key: `dashboard_${dashboard.id}`, - icon: IconTableChart, + icon: IconPageChart, display: `Go to dashboard: ${dashboard.name}`, executor: () => { const { push } = router.actions @@ -316,7 +326,7 @@ export const commandPaletteLogic = kea([ .search(argument) .slice(0, RESULTS_MAX) .map((result) => result.item) - : sample(fusableResults, RESULTS_MAX - guaranteedResults.length) + : fusableResults.slice(0, RESULTS_MAX) return guaranteedResults.concat(fusedResults) }, ], @@ -394,7 +404,7 @@ export const commandPaletteLogic = kea([ key: `person-${person.distinct_ids[0]}`, resolver: [ { - icon: IconPersonFilled, + icon: IconPeopleFilled, display: `View person ${input}`, executor: () => { const { push } = router.actions @@ -418,67 +428,128 @@ export const commandPaletteLogic = kea([ prefixes: ['open', 'visit'], resolver: [ { - icon: IconGauge, + icon: IconDashboard, display: 'Go to Dashboards', executor: () => { push(urls.dashboards()) }, }, { - icon: IconBarChart, + icon: IconHome, + display: 'Go to Project homepage', + executor: () => { + push(urls.projectHomepage()) + }, + }, + { + icon: IconGraph, display: 'Go to Insights', executor: () => { push(urls.savedInsights()) }, }, { - icon: IconTrendingUp, - display: 'Go to Trends', + icon: IconTrends, + display: 'Create a new Trend insight', executor: () => { // TODO: Don't reset insight on change push(urls.insightNew({ insight: InsightType.TRENDS })) }, }, { - icon: IconFunnelHorizontal, - display: 'Go to Funnels', + icon: IconFunnels, + display: 'Create a new Funnel insight', executor: () => { // TODO: Don't reset insight on change push(urls.insightNew({ insight: InsightType.FUNNELS })) }, }, { - icon: IconTrendingFlat, - display: 'Go to Retention', + icon: IconRetention, + display: 'Create a new Retention insight', executor: () => { // TODO: Don't reset insight on change push(urls.insightNew({ insight: InsightType.RETENTION })) }, }, { - icon: IconEmojiPeople, - display: 'Go to Paths', + icon: IconUserPaths, + display: 'Create a new Paths insight', executor: () => { // TODO: Don't reset insight on change push(urls.insightNew({ insight: InsightType.PATHS })) }, }, + { + icon: IconStickiness, + display: 'Create a new Stickiness insight', + executor: () => { + // TODO: Don't reset insight on change + push(urls.insightNew({ insight: InsightType.STICKINESS })) + }, + }, + { + icon: IconLifecycle, + display: 'Create a new Lifecycle insight', + executor: () => { + // TODO: Don't reset insight on change + push(urls.insightNew({ insight: InsightType.LIFECYCLE })) + }, + }, + { + icon: IconHogQL, + display: 'Create a new HogQL insight', + synonyms: ['hogql', 'sql'], + executor: () => { + // TODO: Don't reset insight on change + push(insightTypeURL[InsightType.SQL]) + }, + }, + { + icon: IconNotebook, + display: 'Go to Notebooks', + executor: () => { + push(urls.notebooks()) + }, + }, { icon: IconLive, - display: 'Go to Events', + display: 'Go to Events explorer', executor: () => { push(urls.events()) }, }, { - icon: IconAction, + icon: IconDatabase, + display: 'Go to Data management', + synonyms: ['events'], + executor: () => { + push(urls.eventDefinitions()) + }, + }, + { + icon: IconCursor, display: 'Go to Actions', executor: () => { push(urls.actions()) }, }, { - icon: IconPerson, + icon: IconList, + display: 'Go to Properties', + executor: () => { + push(urls.propertyDefinitions()) + }, + }, + { + icon: IconThoughtBubble, + display: 'Go to Annotations', + executor: () => { + push(urls.annotations()) + }, + }, + { + icon: IconPeople, display: 'Go to Persons', synonyms: ['people'], executor: () => { @@ -486,77 +557,110 @@ export const commandPaletteLogic = kea([ }, }, { - icon: IconCohort, + icon: IconPeople, display: 'Go to Cohorts', executor: () => { push(urls.cohorts()) }, }, + ...(values.featureFlags[FEATURE_FLAGS.WEB_ANALYTICS] + ? [ + { + icon: IconPieChart, + display: 'Go to Web analytics', + executor: () => { + push(urls.webAnalytics()) + }, + }, + ] + : []), + ...(values.featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] + ? [ + { + icon: IconServer, + display: 'Go to Data warehouse', + executor: () => { + push(urls.dataWarehouse()) + }, + }, + ] + : []), + { + display: 'Go to Session replay', + icon: IconRewindPlay, + executor: () => { + push(urls.replay()) + }, + }, { - icon: IconFlag, - display: 'Go to Feature Flags', - synonyms: ['feature flags', 'a/b tests'], + display: 'Go to Surveys', + icon: IconChat, + executor: () => { + push(urls.surveys()) + }, + }, + { + icon: IconToggle, + display: 'Go to Feature flags', executor: () => { push(urls.featureFlags()) }, }, { - icon: IconComment, - display: 'Go to Annotations', + icon: IconTestTube, + display: 'Go to A/B testing', executor: () => { - push(urls.annotations()) + push(urls.experiments()) }, }, { - icon: IconCorporate, - display: 'Go to Team members', - synonyms: ['organization', 'members', 'invites', 'teammates'], + icon: IconRocket, + display: 'Go to Early access features', executor: () => { - push(urls.settings('organization')) + push(urls.earlyAccessFeatures()) }, }, { - icon: IconCottage, - display: 'Go to project homepage', + icon: IconApps, + display: 'Go to Apps', + synonyms: ['integrations'], executor: () => { - push(urls.projectHomepage()) + push(urls.projectApps()) }, }, { - icon: IconSettings, - display: 'Go to Project settings', + icon: IconToolbar, + display: 'Go to Toolbar', executor: () => { - push(urls.settings('project')) + push(urls.toolbarLaunch()) }, }, { - icon: () => ( - - ), - display: 'Go to My settings', - synonyms: ['account'], + icon: IconGear, + display: 'Go to Project settings', executor: () => { - push(urls.settings('user')) + push(urls.settings('project')) }, }, { - icon: IconApps, - display: 'Go to Apps', - synonyms: ['integrations'], + icon: IconGear, + display: 'Go to Organization settings', executor: () => { - push(urls.projectApps()) + push(urls.settings('organization')) }, }, { - icon: IconServer, - display: 'Go to Instance status & settings', - synonyms: ['redis', 'celery', 'django', 'postgres', 'backend', 'service', 'online'], + icon: () => ( + + ), + display: 'Go to User settings', + synonyms: ['account', 'profile'], executor: () => { - push(urls.instanceStatus()) + push(urls.settings('user')) }, }, { - icon: IconLogout, + icon: IconLeave, display: 'Log out', executor: () => { userLogic.actions.logout() @@ -574,7 +678,7 @@ export const commandPaletteLogic = kea([ preflightLogic.values.preflight?.is_debug || preflightLogic.values.preflight?.instance_preferences?.debug_queries ? { - icon: IconTools, + icon: IconDatabase, display: 'Debug ClickHouse Queries', executor: () => openCHQueriesDebugModal(), } @@ -585,7 +689,7 @@ export const commandPaletteLogic = kea([ key: 'debug-copy-session-recording-url', scope: GLOBAL_COMMAND_SCOPE, resolver: { - icon: IconRecording, + icon: IconRewindPlay, display: 'Debug: Copy the session recording link to clipboard', executor: () => { const url = posthog.get_session_replay_url({ withTimestamp: true, timestampLookBack: 30 }) @@ -607,7 +711,7 @@ export const commandPaletteLogic = kea([ return isNaN(result) ? null : { - icon: IconCalculate, + icon: IconCalculator, display: `= ${result}`, guarantee: true, executor: () => { @@ -627,7 +731,7 @@ export const commandPaletteLogic = kea([ resolver: (argument) => { const results: CommandResultTemplate[] = (teamLogic.values.currentTeam?.app_urls ?? []).map( (url: string) => ({ - icon: IconOpenInNew, + icon: IconExternal, display: `Open ${url}`, synonyms: [`Visit ${url}`], executor: () => { @@ -637,7 +741,7 @@ export const commandPaletteLogic = kea([ ) if (argument && isURL(argument)) { results.push({ - icon: IconOpenInNew, + icon: IconExternal, display: `Open ${argument}`, synonyms: [`Visit ${argument}`], executor: () => { @@ -646,7 +750,7 @@ export const commandPaletteLogic = kea([ }) } results.push({ - icon: IconOpenInNew, + icon: IconExternal, display: 'Open PostHog Docs', synonyms: ['technical documentation'], executor: () => { @@ -661,7 +765,7 @@ export const commandPaletteLogic = kea([ key: 'create-personal-api-key', scope: GLOBAL_COMMAND_SCOPE, resolver: { - icon: IconLockOpen, + icon: IconUnlock, display: 'Create Personal API Key', executor: () => ({ instruction: 'Give your key a label', @@ -670,7 +774,7 @@ export const commandPaletteLogic = kea([ resolver: (argument) => { if (argument?.length) { return { - icon: IconLockOpen, + icon: IconUnlock, display: `Create Key "${argument}"`, executor: () => { personalAPIKeysLogic.actions.createKey(argument) @@ -688,7 +792,7 @@ export const commandPaletteLogic = kea([ key: 'create-dashboard', scope: GLOBAL_COMMAND_SCOPE, resolver: { - icon: IconGauge, + icon: IconDashboard, display: 'Create Dashboard', executor: () => ({ instruction: 'Name your new dashboard', @@ -697,7 +801,7 @@ export const commandPaletteLogic = kea([ resolver: (argument) => { if (argument?.length) { return { - icon: IconGauge, + icon: IconDashboard, display: `Create Dashboard "${argument}"`, executor: () => { newDashboardLogic.actions.addDashboard({ name: argument }) @@ -714,7 +818,7 @@ export const commandPaletteLogic = kea([ key: 'share-feedback', scope: GLOBAL_COMMAND_SCOPE, resolver: { - icon: IconComment, + icon: IconThoughtBubble, display: 'Share Feedback', synonyms: ['send opinion', 'ask question', 'message posthog', 'github issue'], executor: () => ({ @@ -722,12 +826,12 @@ export const commandPaletteLogic = kea([ resolver: [ { display: 'Send Message Directly to PostHog', - icon: IconComment, + icon: IconThoughtBubble, executor: () => ({ instruction: "What's on your mind?", - icon: IconComment, + icon: IconThoughtBubble, resolver: (argument) => ({ - icon: IconComment, + icon: IconThoughtBubble, display: 'Send', executor: !argument?.length ? undefined @@ -735,7 +839,7 @@ export const commandPaletteLogic = kea([ posthog.capture('palette feedback', { message: argument }) return { resolver: { - icon: IconCheckmark, + icon: IconCheck, display: 'Message Sent!', executor: true, }, diff --git a/frontend/src/lib/components/CompactList/CompactList.scss b/frontend/src/lib/components/CompactList/CompactList.scss index cd329a1d8e4f7..930ea6f17b1b1 100644 --- a/frontend/src/lib/components/CompactList/CompactList.scss +++ b/frontend/src/lib/components/CompactList/CompactList.scss @@ -31,4 +31,14 @@ overflow: auto auto; padding: 0 0.5rem 0.5rem; } + + .LemonButton { + font-family: var(--font-sans) !important; + } + + .secondary-text { + .posthog-3000 & { + color: var(--text-secondary); + } + } } diff --git a/frontend/src/lib/components/CopyToClipboard.tsx b/frontend/src/lib/components/CopyToClipboard.tsx index bf19ae2d3f782..0e85ee60317d8 100644 --- a/frontend/src/lib/components/CopyToClipboard.tsx +++ b/frontend/src/lib/components/CopyToClipboard.tsx @@ -1,12 +1,10 @@ import { HTMLProps } from 'react' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { IconCopy } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' -interface InlineProps extends HTMLProps { - children?: JSX.Element | string - explicitValue?: string +interface InlinePropsBase extends HTMLProps { description?: string /** Makes text selectable instead of copying on click anywhere */ selectable?: boolean @@ -16,6 +14,15 @@ interface InlineProps extends HTMLProps { iconPosition?: 'end' | 'start' style?: React.CSSProperties } +interface InlinePropsWithStringInside extends InlinePropsBase { + children: string + explicitValue?: string +} +interface InlinePropsWithJSXInside extends InlinePropsBase { + children?: JSX.Element + explicitValue: string +} +type InlineProps = InlinePropsWithStringInside | InlinePropsWithJSXInside export function CopyToClipboardInline({ children, @@ -29,10 +36,6 @@ export function CopyToClipboardInline({ style, ...props }: InlineProps): JSX.Element { - if (typeof children !== 'string' && !explicitValue) { - throw new Error('CopyToClipboardInline must have a string child or explicitValue prop') - } - const copy = async (): Promise => await copyToClipboard((explicitValue ?? children) as string, description) const content = ( @@ -54,7 +57,7 @@ export function CopyToClipboardInline({ onClick={!selectable ? copy : undefined} {...props} > - {children} + {children && {children}} } diff --git a/frontend/src/lib/components/DateFilter/DateFilter.tsx b/frontend/src/lib/components/DateFilter/DateFilter.tsx index 01bb247117747..254493add06ed 100644 --- a/frontend/src/lib/components/DateFilter/DateFilter.tsx +++ b/frontend/src/lib/components/DateFilter/DateFilter.tsx @@ -3,14 +3,20 @@ import { dateMapping, dateFilterToText, uuid } from 'lib/utils' import { DateMappingOption } from '~/types' import { dayjs } from 'lib/dayjs' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { CUSTOM_OPTION_DESCRIPTION, CUSTOM_OPTION_KEY, CUSTOM_OPTION_VALUE, dateFilterLogic } from './dateFilterLogic' +import { dateFilterLogic } from './dateFilterLogic' import { RollingDateRangeFilter } from './RollingDateRangeFilter' import { useActions, useValues } from 'kea' import { LemonButtonWithDropdown, LemonDivider, LemonButton, LemonButtonProps } from '@posthog/lemon-ui' import { IconCalendar } from 'lib/lemon-ui/icons' import { LemonCalendarSelect } from 'lib/lemon-ui/LemonCalendar/LemonCalendarSelect' import { LemonCalendarRange } from 'lib/lemon-ui/LemonCalendarRange/LemonCalendarRange' -import { DateFilterLogicProps, DateFilterView } from 'lib/components/DateFilter/types' +import { + CUSTOM_OPTION_DESCRIPTION, + CUSTOM_OPTION_KEY, + CUSTOM_OPTION_VALUE, + DateFilterLogicProps, + DateFilterView, +} from 'lib/components/DateFilter/types' import { Placement } from '@floating-ui/react' export interface DateFilterProps { diff --git a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss index 8f99bb64c0bde..3d18b2e5b2d96 100644 --- a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss +++ b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss @@ -45,9 +45,15 @@ line-height: 1.25rem; align-items: center; - input { + .LemonInput { width: 3rem; - text-align: center; + min-height: 0; + padding: 0; + border: none; + + input { + text-align: center; + } } .RollingDateRangeFilter__counter__step { diff --git a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx index c1371a66414fe..6b207fb92c48a 100644 --- a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx +++ b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx @@ -1,7 +1,6 @@ -import { Input } from 'antd' import { DateOption, rollingDateRangeFilterLogic } from './rollingDateRangeFilterLogic' import { useActions, useValues } from 'kea' -import { LemonButton, LemonSelect, LemonSelectOptions } from '@posthog/lemon-ui' +import { LemonButton, LemonInput, LemonSelect, LemonSelectOptions } from '@posthog/lemon-ui' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { dayjs } from 'lib/dayjs' import clsx from 'clsx' @@ -38,11 +37,6 @@ export function RollingDateRangeFilter({ useActions(rollingDateRangeFilterLogic(logicProps)) const { counter, dateOption, formattedDate } = useValues(rollingDateRangeFilterLogic(logicProps)) - const onInputChange = (event: React.ChangeEvent): void => { - const newValue = event.target.value ? parseFloat(event.target.value) : undefined - setCounter(newValue) - } - return ( - - setCounter(value)} /> ([ path(['lib', 'components', 'DateFilter', 'DateFilterLogic']), diff --git a/frontend/src/lib/components/DateFilter/types.ts b/frontend/src/lib/components/DateFilter/types.ts index 5d65aed3d4cfb..63bbd2c29303a 100644 --- a/frontend/src/lib/components/DateFilter/types.ts +++ b/frontend/src/lib/components/DateFilter/types.ts @@ -15,3 +15,7 @@ export type DateFilterLogicProps = { dateOptions?: DateMappingOption[] isDateFormatted?: boolean } + +export const CUSTOM_OPTION_KEY = 'Custom' +export const CUSTOM_OPTION_VALUE = 'No date range override' +export const CUSTOM_OPTION_DESCRIPTION = 'Use the original date ranges of insights' diff --git a/frontend/src/lib/components/DebugNotice.tsx b/frontend/src/lib/components/DebugNotice.tsx index 11d70ba34054d..4ceb631d56d77 100644 --- a/frontend/src/lib/components/DebugNotice.tsx +++ b/frontend/src/lib/components/DebugNotice.tsx @@ -29,7 +29,7 @@ export function DebugNotice(): JSX.Element | null { return (
setNoticeHidden(true)}> -
+
DEBUG mode -
{propertyType}
-
- ) : ( - <> - ) -} - -function OwnerDropdown(): JSX.Element { - const { members } = useValues(membersLogic) - const { localDefinition } = useValues(definitionPopoverLogic) - const { setLocalDefinition } = useActions(definitionPopoverLogic) - - return ( - - ) -} - export const DefinitionPopover = { Wrapper, Header, @@ -289,6 +246,4 @@ export const DefinitionPopover = { Grid, Section, Card, - OwnerDropdown, - Type, } diff --git a/frontend/src/lib/components/DropdownSelector/DropdownSelector.scss b/frontend/src/lib/components/DropdownSelector/DropdownSelector.scss deleted file mode 100644 index b8ffa8d145235..0000000000000 --- a/frontend/src/lib/components/DropdownSelector/DropdownSelector.scss +++ /dev/null @@ -1,25 +0,0 @@ -.dropdown-selector { - padding: 0.5rem; - border: 1px solid var(--border-light); - border-radius: var(--radius); - display: flex; - align-items: center; - cursor: pointer; - - &.disabled { - color: var(--muted); - cursor: not-allowed; - } - - &.compact { - padding: 0.333rem 0.5rem; - } - - .dropdown-arrow { - display: flex; - align-items: center; - padding-left: 4px; - font-size: 1.2em; - color: var(--muted-alt); - } -} diff --git a/frontend/src/lib/components/DropdownSelector/DropdownSelector.tsx b/frontend/src/lib/components/DropdownSelector/DropdownSelector.tsx deleted file mode 100644 index abe0e061c8827..0000000000000 --- a/frontend/src/lib/components/DropdownSelector/DropdownSelector.tsx +++ /dev/null @@ -1,94 +0,0 @@ -/* Custom dropdown selector with an icon a help caption */ -import { Dropdown, Menu } from 'antd' -import clsx from 'clsx' -import { IconArrowDropDown } from 'lib/lemon-ui/icons' -import './DropdownSelector.scss' - -interface DropdownSelectorProps { - label?: string - value: string | null - onValueChange: (value: string) => void - options: DropdownOption[] - hideDescriptionOnDisplay?: boolean // Hides the description support text on the main display component (i.e. only shown in the dropdown menu) - disabled?: boolean - compact?: boolean -} - -interface DropdownOption { - key: string - label: string - description?: string - icon: JSX.Element - hidden?: boolean -} - -interface SelectItemInterface { - icon: JSX.Element - label: string - description?: string - onClick: () => void -} - -function SelectItem({ icon, label, description, onClick }: SelectItemInterface): JSX.Element { - return ( -
-
- {icon} -
{label}
-
- {description &&
{description}
} -
- ) -} - -export function DropdownSelector({ - label, - value, - onValueChange, - options, - hideDescriptionOnDisplay, - disabled, - compact, -}: DropdownSelectorProps): JSX.Element { - const selectedOption = options.find((opt) => opt.key === value) - - const menu = ( - - {options.map(({ key, hidden, ...props }) => { - if (hidden) { - return null - } - return ( - - onValueChange(key)} /> - - ) - })} - - ) - - return ( - <> - {label && } - -
e.preventDefault()} - > -
- {selectedOption && ( - {}} - description={hideDescriptionOnDisplay ? undefined : selectedOption.description} - /> - )} -
-
- -
-
-
- - ) -} diff --git a/frontend/src/lib/components/EditableField/EditableField.scss b/frontend/src/lib/components/EditableField/EditableField.scss index c58f63e9d2114..3bd0610a9ff90 100644 --- a/frontend/src/lib/components/EditableField/EditableField.scss +++ b/frontend/src/lib/components/EditableField/EditableField.scss @@ -4,7 +4,7 @@ max-width: 100%; &:not(.EditableField--multiline) { - line-height: 2rem; + line-height: 1.15em; } i { @@ -23,14 +23,13 @@ align-items: center; width: fit-content; max-width: calc(100% + 0.5rem); - min-height: 2rem; padding: 0.25rem; // Some padding to give the focus outline more breathing space margin: -0.25rem; white-space: pre-wrap; overflow: auto; } - &--editing .EditableField__highlight { + &.EditableField--editing .EditableField__highlight { flex-grow: 1; align-items: flex-end; width: auto; @@ -39,6 +38,23 @@ border-radius: var(--radius); } + &.EditableField--underlined { + .EditableField__highlight { + padding: 0; + margin: 0; + } + + &.EditableField--editing .EditableField__highlight { + outline: none; + + input { + text-decoration: underline; + text-decoration-color: var(--muted); + text-underline-offset: 0.5em; + } + } + } + .EditableField__autosize { align-self: center; min-width: 0; diff --git a/frontend/src/lib/components/EditableField/EditableField.tsx b/frontend/src/lib/components/EditableField/EditableField.tsx index 40e61b3e57d08..2070ea7f1cbe3 100644 --- a/frontend/src/lib/components/EditableField/EditableField.tsx +++ b/frontend/src/lib/components/EditableField/EditableField.tsx @@ -22,11 +22,14 @@ export interface EditableFieldProps { multiline?: boolean /** Whether to render the content as Markdown in view mode. */ markdown?: boolean - compactButtons?: boolean + compactButtons?: boolean | 'xsmall' // The 'xsmall' is somewhat hacky, but necessary for 3000 breadcrumbs /** Whether this field should be gated behind a "paywall". */ paywall?: boolean /** Controlled mode. */ mode?: 'view' | 'edit' + onModeToggle?: (newMode: 'view' | 'edit') => void + /** @default 'outlined' */ + editingIndication?: 'outlined' | 'underlined' className?: string style?: React.CSSProperties 'data-attr'?: string @@ -53,6 +56,8 @@ export function EditableField({ compactButtons = false, paywall = false, mode, + onModeToggle, + editingIndication = 'outlined', className, style, 'data-attr': dataAttr, @@ -60,13 +65,16 @@ export function EditableField({ notice, }: EditableFieldProps): JSX.Element { const [localIsEditing, setLocalIsEditing] = useState(false) - const [tentativeValue, setTentativeValue] = useState(value) + const [localTentativeValue, setLocalTentativeValue] = useState(value) useEffect(() => { - setTentativeValue(value) + setLocalTentativeValue(value) }, [value]) + useEffect(() => { + setLocalIsEditing(mode === 'edit') + }, [mode]) - const isSaveable = !minLength || tentativeValue.length >= minLength + const isSaveable = !minLength || localTentativeValue.length >= minLength const mouseDownOnCancelButton = (e: React.MouseEvent): void => { // if saveOnBlur is set the onBlur handler of the input fires before the onClick event of the button @@ -76,12 +84,14 @@ export function EditableField({ const cancel = (): void => { setLocalIsEditing(false) - setTentativeValue(value) + setLocalTentativeValue(value) + onModeToggle?.('view') } const save = (): void => { - onSave?.(tentativeValue) + onSave?.(localTentativeValue) setLocalIsEditing(false) + onModeToggle?.('view') } const isEditing = !paywall && (mode === 'edit' || localIsEditing) @@ -107,6 +117,7 @@ export function EditableField({ 'EditableField', multiline && 'EditableField--multiline', isEditing && 'EditableField--editing', + editingIndication === 'underlined' && 'EditableField--underlined', className )} data-attr={dataAttr} @@ -127,12 +138,12 @@ export function EditableField({ {multiline ? ( { onChange?.(e.target.value) - setTentativeValue(e.target.value) + setLocalTentativeValue(e.target.value) }} - onBlur={saveOnBlur ? (tentativeValue !== value ? save : cancel) : undefined} + onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} onKeyDown={handleKeyDown} placeholder={placeholder} minLength={minLength} @@ -142,12 +153,12 @@ export function EditableField({ ) : ( { onChange?.(e.target.value) - setTentativeValue(e.target.value) + setLocalTentativeValue(e.target.value) }} - onBlur={saveOnBlur ? (tentativeValue !== value ? save : cancel) : undefined} + onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} onKeyDown={handleKeyDown} placeholder={placeholder} minLength={minLength} @@ -155,7 +166,7 @@ export function EditableField({ autoFocus={autoFocus} /> )} - {!mode && ( + {(!mode || !!onModeToggle) && (
{markdown && ( @@ -164,7 +175,7 @@ export function EditableField({ )} ) : ( <> - {tentativeValue && markdown ? ( - {tentativeValue} + {localTentativeValue && markdown ? ( + {localTentativeValue} ) : ( - tentativeValue || {placeholder} + localTentativeValue || {placeholder} )} - {!mode && ( + {(!mode || !!onModeToggle) && (
} - size={compactButtons ? 'small' : undefined} - onClick={() => setLocalIsEditing(true)} + size={ + typeof compactButtons === 'string' + ? compactButtons + : compactButtons + ? 'small' + : undefined + } + onClick={() => { + setLocalIsEditing(true) + onModeToggle?.('edit') + }} data-attr={`edit-prop-${name}`} disabled={paywall} noPadding diff --git a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss index 17b0dded01c1d..c0264a96288f6 100644 --- a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss +++ b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss @@ -4,14 +4,14 @@ border-radius: 4px; &.SelectableElement--selected { - background: var(--primary); + background: var(--primary-3000); } &:hover { - background: var(--primary-light); + background: var(--primary-3000-hover); } &:active { - background: var(--primary-dark); + background: var(--primary-3000-active); } } diff --git a/frontend/src/lib/components/Map/Maplibre.scss b/frontend/src/lib/components/Map/Maplibre.scss index f4c4cdeb2fda7..2a7688a3f23af 100644 --- a/frontend/src/lib/components/Map/Maplibre.scss +++ b/frontend/src/lib/components/Map/Maplibre.scss @@ -1,6 +1,6 @@ .maplibregl-ctrl-attrib-button:focus, .maplibregl-ctrl-group button:focus { - box-shadow: 0 0 2px 2px var(--primary); + box-shadow: 0 0 2px 2px var(--primary-3000); } @media screen { diff --git a/frontend/src/lib/components/NotFound/index.tsx b/frontend/src/lib/components/NotFound/index.tsx index 1c1a20c595925..9e18b27d6ecc9 100644 --- a/frontend/src/lib/components/NotFound/index.tsx +++ b/frontend/src/lib/components/NotFound/index.tsx @@ -4,7 +4,7 @@ import './NotFound.scss' import { useActions, useValues } from 'kea' import { supportLogic } from '../Support/supportLogic' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { LemonButton } from '@posthog/lemon-ui' interface NotFoundProps { diff --git a/frontend/src/lib/components/PageHeader.tsx b/frontend/src/lib/components/PageHeader.tsx index 5aee5b5d0bc01..9b8a9e2611c7a 100644 --- a/frontend/src/lib/components/PageHeader.tsx +++ b/frontend/src/lib/components/PageHeader.tsx @@ -5,6 +5,8 @@ import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { createPortal } from 'react-dom' import { DraggableToNotebook, DraggableToNotebookProps } from 'scenes/notebooks/AddToNotebook/DraggableToNotebook' import { breadcrumbsLogic } from '~/layout/navigation/Breadcrumbs/breadcrumbsLogic' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' interface PageHeaderProps { title: string | JSX.Element @@ -29,6 +31,9 @@ export function PageHeader({ }: PageHeaderProps): JSX.Element | null { const is3000 = useFeatureFlag('POSTHOG_3000') const { actionsContainer } = useValues(breadcrumbsLogic) + const { featureFlags } = useValues(featureFlagLogic) + + const has3000 = featureFlags[FEATURE_FLAGS.POSTHOG_3000] return ( <> @@ -52,7 +57,7 @@ export function PageHeader({ {is3000 && buttons && actionsContainer && createPortal(buttons, actionsContainer)} {caption &&
{caption}
} - {delimited && } + {delimited && } ) } diff --git a/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx b/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx index 125c31d54eb0d..0a0917a29280e 100644 --- a/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx +++ b/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx @@ -85,7 +85,7 @@ export const ProductIntroduction = ({ {action ? ( } + icon={} onClick={() => { updateHasSeenProductIntroFor(productKey, true) action && action() diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss index 2ec1fd268b87f..f530132d670a6 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss @@ -5,21 +5,31 @@ } .property-value-type { - display: flex; align-items: center; - width: fit-content; - height: 1.25rem; - padding: 0.125rem 0.25rem; - letter-spacing: 0.25px; + background: var(--mid); border-radius: var(--radius); border: 1px solid var(--border-light); - background: var(--mid); color: var(--muted-alt); + cursor: default; + display: flex; font-size: 0.625rem; font-weight: 500; + height: 1.25rem; + letter-spacing: 0.25px; + padding: 0.125rem 0.25rem; text-transform: uppercase; white-space: nowrap; - cursor: default; + width: fit-content; + + .posthog-3000 & { + background: none; + border-radius: calc(var(--radius) * 0.75); + border-style: solid; + border-width: 1px; + font-family: var(--font-mono); + font-size: 0.688rem; + padding: 0.075rem 0.25rem; + } &:not(:first-child) { margin-left: 0.25rem; @@ -27,10 +37,10 @@ } .properties-table-value { - min-width: 12rem; - max-width: fit-content; - display: flex; align-items: center; + display: flex; + max-width: fit-content; + min-width: 12rem; .value-link { > * { @@ -45,7 +55,20 @@ .editable { text-decoration: underline dotted; - text-decoration-color: var(--primary); + text-decoration-color: var(--primary-3000); cursor: pointer; + + .posthog-3000 & { + border: 1px solid transparent; + border-radius: calc(var(--radius) * 0.75); + margin-left: -0.25rem; + padding: 0.125rem 0.25rem; + text-decoration: none; + + &:hover { + background: var(--bg-light); + border: 1px solid var(--border-light); + } + } } } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx new file mode 100644 index 0000000000000..4a577e8c10741 --- /dev/null +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx @@ -0,0 +1,25 @@ +import { Meta, StoryFn } from '@storybook/react' +import { PropertiesTable as PropertiesTableComponent } from '.' +import { PropertyDefinitionType } from '~/types' + +const meta: Meta = { + title: 'Components/Properties Table', + component: PropertiesTableComponent, +} +export default meta + +export const PropertiesTable: StoryFn = () => { + const properties = { + name: 'John Doe', + age: 30, + url: 'https://www.google.com', + is_good: true, + evil_level: null, + tags: ['best', 'cool', 'awesome'], + location: { + city: 'Prague', + country: 'Czechia', + }, + } + return +} diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index 6729d55ee42b4..f96fb045c022f 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -15,6 +15,7 @@ import { NewProperty } from 'scenes/persons/NewProperty' import { LemonCheckbox, LemonInput, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { PropertyDefinitionType } from '~/types' +import { IconPencil } from '@posthog/icons' type HandledType = 'string' | 'number' | 'bigint' | 'boolean' | 'undefined' | 'null' type Type = HandledType | 'symbol' | 'object' | 'function' @@ -85,18 +86,19 @@ function ValueDisplay({ const valueComponent = ( canEdit && textBasedTypes.includes(valueType) && setEditing(true)} > {!isURL(value) ? ( - valueString + {valueString} ) : ( {valueString} )} + {canEdit && } ) @@ -283,13 +285,10 @@ export function PropertiesTable({ title: '', width: 0, render: function Copy(_, item: any): JSX.Element | false { - if (Array.isArray(item[1]) || item[1] instanceof Object || item[1] === null) { - return false - } return ( void diff --git a/frontend/src/lib/components/PropertyFilters/utils.test.ts b/frontend/src/lib/components/PropertyFilters/utils.test.ts index f8ecda127588b..56ff189f94e0e 100644 --- a/frontend/src/lib/components/PropertyFilters/utils.test.ts +++ b/frontend/src/lib/components/PropertyFilters/utils.test.ts @@ -3,7 +3,9 @@ import { CohortPropertyFilter, ElementPropertyFilter, EmptyPropertyFilter, + FilterLogicalOperator, PropertyFilterType, + PropertyGroupFilter, PropertyOperator, SessionPropertyFilter, } from '../../../types' @@ -11,6 +13,8 @@ import { isValidPropertyFilter, propertyFilterTypeToTaxonomicFilterType, breakdownFilterToTaxonomicFilterType, + convertPropertiesToPropertyGroup, + convertPropertyGroupToProperties, } from 'lib/components/PropertyFilters/utils' import { TaxonomicFilterGroupType } from '../TaxonomicFilter/types' import { BreakdownFilter } from '~/queries/schema' @@ -123,3 +127,67 @@ describe('breakdownFilterToTaxonomicFilterType()', () => { ) }) }) + +describe('convertPropertyGroupToProperties()', () => { + it('converts a single layer property group into an array of properties', () => { + const propertyGroup = { + type: FilterLogicalOperator.And, + values: [ + { + type: FilterLogicalOperator.And, + values: [ + { key: '$browser', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + { key: '$current_url', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + ] as AnyPropertyFilter[], + }, + { + type: FilterLogicalOperator.And, + values: [ + { key: '$lib', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + ] as AnyPropertyFilter[], + }, + ], + } + expect(convertPropertyGroupToProperties(propertyGroup)).toEqual([ + { key: '$browser', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + { key: '$current_url', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + { key: '$lib', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, + ]) + }) + + it('converts a deeply nested property group into an array of properties', () => { + const propertyGroup: PropertyGroupFilter = { + type: FilterLogicalOperator.And, + values: [ + { + type: FilterLogicalOperator.And, + values: [{ type: FilterLogicalOperator.And, values: [{ key: '$lib' } as any] }], + }, + { type: FilterLogicalOperator.And, values: [{ key: '$browser' } as any] }, + ], + } + expect(convertPropertyGroupToProperties(propertyGroup)).toEqual([{ key: '$lib' }, { key: '$browser' }]) + }) +}) + +describe('convertPropertiesToPropertyGroup', () => { + it('converts properties to one AND operator property group', () => { + const properties: any[] = [{ key: '$lib' }, { key: '$browser' }, { key: '$current_url' }] + expect(convertPropertiesToPropertyGroup(properties)).toEqual({ + type: FilterLogicalOperator.And, + values: [ + { + type: FilterLogicalOperator.And, + values: [{ key: '$lib' }, { key: '$browser' }, { key: '$current_url' }], + }, + ], + }) + }) + + it('converts properties to one AND operator property group', () => { + expect(convertPropertiesToPropertyGroup(undefined)).toEqual({ + type: FilterLogicalOperator.And, + values: [], + }) + }) +}) diff --git a/frontend/src/lib/components/PropertyFilters/utils.ts b/frontend/src/lib/components/PropertyFilters/utils.ts index 48a181c2acdbb..6ce5fe2dd174f 100644 --- a/frontend/src/lib/components/PropertyFilters/utils.ts +++ b/frontend/src/lib/components/PropertyFilters/utils.ts @@ -2,16 +2,20 @@ import { AnyFilterLike, AnyPropertyFilter, CohortPropertyFilter, + CohortType, ElementPropertyFilter, + EmptyPropertyFilter, EventDefinition, EventPropertyFilter, FeaturePropertyFilter, FilterLogicalOperator, GroupPropertyFilter, HogQLPropertyFilter, + KeyMappingInterface, PersonPropertyFilter, PropertyDefinitionType, PropertyFilterType, + PropertyFilterValue, PropertyGroupFilter, PropertyGroupFilterValue, PropertyOperator, @@ -19,8 +23,87 @@ import { SessionPropertyFilter, } from '~/types' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { flattenPropertyGroup, isPropertyGroup } from 'lib/utils' +import { allOperatorsMapping, isOperatorFlag } from 'lib/utils' import { BreakdownFilter } from '~/queries/schema' +import { extractExpressionComment } from '~/queries/nodes/DataTable/utils' + +export function isPropertyGroup( + properties: + | PropertyGroupFilter + | PropertyGroupFilterValue + | AnyPropertyFilter[] + | AnyPropertyFilter + | Record + | null + | undefined +): properties is PropertyGroupFilter { + return ( + (properties as PropertyGroupFilter)?.type !== undefined && + (properties as PropertyGroupFilter)?.values !== undefined + ) +} + +function flattenPropertyGroup( + flattenedProperties: AnyPropertyFilter[], + propertyGroup: PropertyGroupFilter | PropertyGroupFilterValue | AnyPropertyFilter +): AnyPropertyFilter[] { + const obj: AnyPropertyFilter = {} as EmptyPropertyFilter + Object.keys(propertyGroup).forEach(function (k) { + obj[k] = propertyGroup[k] + }) + if (isValidPropertyFilter(obj)) { + flattenedProperties.push(obj) + } + if (isPropertyGroup(propertyGroup)) { + return propertyGroup.values.reduce(flattenPropertyGroup, flattenedProperties) + } + return flattenedProperties +} + +export function convertPropertiesToPropertyGroup( + properties: PropertyGroupFilter | AnyPropertyFilter[] | undefined +): PropertyGroupFilter { + if (isPropertyGroup(properties)) { + return properties + } + if (properties && properties.length > 0) { + return { type: FilterLogicalOperator.And, values: [{ type: FilterLogicalOperator.And, values: properties }] } + } + return { type: FilterLogicalOperator.And, values: [] } +} + +/** Flatten a filter group into an array of filters. NB: Logical operators (AND/OR) are lost in the process. */ +export function convertPropertyGroupToProperties( + properties?: PropertyGroupFilter | AnyPropertyFilter[] +): AnyPropertyFilter[] | undefined { + if (isPropertyGroup(properties)) { + return flattenPropertyGroup([], properties).filter(isValidPropertyFilter) + } + if (properties) { + return properties.filter(isValidPropertyFilter) + } + return properties +} + +export function formatPropertyLabel( + item: Record, + cohortsById: Partial>, + keyMapping: KeyMappingInterface, + valueFormatter: (value: PropertyFilterValue | undefined) => string | string[] | null = (s) => [String(s)] +): string { + if (isHogQLPropertyFilter(item as AnyFilterLike)) { + return extractExpressionComment(item.key) + } + const { value, key, operator, type } = item + return type === 'cohort' + ? cohortsById[value]?.name || `ID ${value}` + : (keyMapping[type === 'element' ? 'element' : 'event'][key]?.label || key) + + (isOperatorFlag(operator) + ? ` ${allOperatorsMapping[operator]}` + : ` ${(allOperatorsMapping[operator || 'exact'] || '?').split(' ')[0]} ${ + value && value.length === 1 && value[0] === '' ? '(empty string)' : valueFormatter(value) || '' + } `) +} /** Make sure unverified user property filter input has at least a "type" */ export function sanitizePropertyFilter(propertyFilter: AnyPropertyFilter): AnyPropertyFilter { diff --git a/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss b/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss index b30e128738345..fa5bdc421a424 100644 --- a/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss +++ b/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss @@ -63,7 +63,7 @@ font-size: 12px; &.selected { - background-color: var(--primary); + background-color: var(--primary-3000); color: #fff; } } diff --git a/frontend/src/lib/components/PropertyGroupFilters/propertyGroupFilterLogic.ts b/frontend/src/lib/components/PropertyGroupFilters/propertyGroupFilterLogic.ts index 88d228ea88be8..a592cf444e94e 100644 --- a/frontend/src/lib/components/PropertyGroupFilters/propertyGroupFilterLogic.ts +++ b/frontend/src/lib/components/PropertyGroupFilters/propertyGroupFilterLogic.ts @@ -4,8 +4,9 @@ import { PropertyGroupFilter, FilterLogicalOperator, EmptyPropertyFilter } from import { PropertyGroupFilterLogicProps } from 'lib/components/PropertyFilters/types' import type { propertyGroupFilterLogicType } from './propertyGroupFilterLogicType' -import { convertPropertiesToPropertyGroup, objectsEqual } from 'lib/utils' +import { objectsEqual } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { convertPropertiesToPropertyGroup } from '../PropertyFilters/utils' export const propertyGroupFilterLogic = kea([ path(['lib', 'components', 'PropertyGroupFilters', 'propertyGroupFilterLogic']), diff --git a/frontend/src/lib/components/RestrictedArea.tsx b/frontend/src/lib/components/RestrictedArea.tsx index c334a3a2235e6..a1ec4a419a06a 100644 --- a/frontend/src/lib/components/RestrictedArea.tsx +++ b/frontend/src/lib/components/RestrictedArea.tsx @@ -1,9 +1,9 @@ import { useValues } from 'kea' import { useMemo } from 'react' import { organizationLogic } from '../../scenes/organizationLogic' -import { OrganizationMembershipLevel } from '../constants' +import { EitherMembershipLevel, OrganizationMembershipLevel } from '../constants' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { EitherMembershipLevel, membershipLevelToName } from '../utils/permissioning' +import { membershipLevelToName } from '../utils/permissioning' import { isAuthenticatedTeam, teamLogic } from '../../scenes/teamLogic' export interface RestrictedComponentProps { diff --git a/frontend/src/lib/components/SeriesGlyph.tsx b/frontend/src/lib/components/SeriesGlyph.tsx index 156ebcf5f367b..024e3bde41b70 100644 --- a/frontend/src/lib/components/SeriesGlyph.tsx +++ b/frontend/src/lib/components/SeriesGlyph.tsx @@ -1,5 +1,7 @@ +import { useValues } from 'kea' import { getSeriesColor } from 'lib/colors' -import { alphabet, hexToRGBA } from 'lib/utils' +import { alphabet, hexToRGBA, lightenDarkenColor, RGBToRGBA } from 'lib/utils' +import { themeLogic } from '~/layout/navigation-3000/themeLogic' interface SeriesGlyphProps { className?: string @@ -26,6 +28,7 @@ interface SeriesLetterProps { export function SeriesLetter({ className, hasBreakdown, seriesIndex, seriesColor }: SeriesLetterProps): JSX.Element { const color = seriesColor || getSeriesColor(seriesIndex) + const { isDarkModeOn } = useValues(themeLogic) return ( Log in with {SSO_PROVIDER_NAMES[provider]} diff --git a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts index b1fc22ef02e39..fbfde31c7e69f 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts @@ -4,7 +4,7 @@ import { SubscriptionType } from '~/types' import api from 'lib/api' import { loaders } from 'kea-loaders' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import type { subscriptionsLogicType } from './subscriptionsLogicType' import { getInsightId } from 'scenes/insights/utils' diff --git a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss index 4861992b41aff..97c2e739162ac 100644 --- a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss +++ b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss @@ -48,7 +48,7 @@ } &.taxonomy-icon-built-in { - color: var(--primary); + color: var(--primary-3000); } } } @@ -86,7 +86,7 @@ } &.expand-row { - color: var(--primary); + color: var(--primary-3000); } } } diff --git a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss index d4716286d5906..108cbcb552950 100644 --- a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss +++ b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss @@ -46,6 +46,19 @@ background: var(--side); border-color: var(--side); + .posthog-3000 & { + color: var(--default); + font-weight: 500; + + &:not(.taxonomic-pill-active) { + opacity: 0.7; + } + + &:hover { + opacity: 1; + } + } + &.taxonomic-count-zero { color: var(--muted); cursor: not-allowed; diff --git a/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss b/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss index d0ac9a6fe4220..80a1630463217 100644 --- a/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss +++ b/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss @@ -23,14 +23,14 @@ width: fit-content; padding: 0 0.25rem; border-radius: var(--radius); - background: var(--primary); + background: var(--primary-3000); color: var(--bg-light); line-height: 1.25rem; font-size: 0.75rem; font-weight: 500; &::selection { - background: var(--primary-light); // Default selection background is invisible on primary + background: var(--primary-3000-hover); // Default selection background is invisible on primary } .Spinner { @@ -77,7 +77,7 @@ left: 0; height: var(--timeline-seekbar-thickness); width: calc(100% - var(--timeline-seekbar-arrow-width)); - background: var(--primary); + background: var(--primary-3000); } .TimelineSeekbar__line-start, @@ -91,7 +91,7 @@ display: block; margin: calc(var(--timeline-seekbar-thickness) + 0.125rem) 0; height: var(--timeline-seekbar-arrow-height); - background: var(--primary); + background: var(--primary-3000); } } diff --git a/frontend/src/lib/components/UUIDShortener.tsx b/frontend/src/lib/components/UUIDShortener.tsx index c943725ba270b..133c0eedaf6f3 100644 --- a/frontend/src/lib/components/UUIDShortener.tsx +++ b/frontend/src/lib/components/UUIDShortener.tsx @@ -1,5 +1,5 @@ import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' export function truncateUuid(uuid: string): string { // Simple function to truncate a UUID. Useful for more simple displaying but should always be made clear it is truncated. diff --git a/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx b/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx index dcbd1ca4255d4..b10938af5e89b 100644 --- a/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx +++ b/frontend/src/lib/components/VersionChecker/VersionCheckerBanner.tsx @@ -2,11 +2,15 @@ import { useValues } from 'kea' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { versionCheckerLogic } from './versionCheckerLogic' -export function VersionCheckerBanner(): JSX.Element { +export function VersionCheckerBanner({ minVersionAccepted }: { minVersionAccepted?: string }): JSX.Element { const { versionWarning } = useValues(versionCheckerLogic) - // We don't want to show a message if the diff is too small (we might be still deploying the changes out) - if (!versionWarning || versionWarning.diff < 5) { + if ( + !versionWarning || + (minVersionAccepted && versionWarning.currentVersion + ? versionWarning.currentVersion.localeCompare(minVersionAccepted) >= 0 + : versionWarning.diff < 5) + ) { return <> } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 63ef80121bccc..712c7c77fa957 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -1,4 +1,3 @@ -import { urls } from 'scenes/urls' import { AvailableFeature, ChartDisplayType, LicensePlan, Region, SSOProvider } from '../types' /** Display types which don't allow grouping by unit of time. Sync with backend NON_TIME_SERIES_DISPLAY_TYPES. */ @@ -39,6 +38,8 @@ export enum TeamMembershipLevel { Admin = 8, } +export type EitherMembershipLevel = OrganizationMembershipLevel | TeamMembershipLevel + /** See posthog/api/organization.py for details. */ export enum PluginsAccessLevel { None = 0, @@ -136,6 +137,7 @@ export const FEATURE_FLAGS = { ROLE_BASED_ACCESS: 'role-based-access', // owner: #team-experiments, @liyiy QUERY_RUNNING_TIME: 'query_running_time', // owner: @mariusandra QUERY_TIMINGS: 'query-timings', // owner: @mariusandra + QUERY_ASYNC: 'query-async', // owner: @webjunkie POSTHOG_3000: 'posthog-3000', // owner: @Twixes POSTHOG_3000_NAV: 'posthog-3000-nav', // owner: @Twixes ENABLE_PROMPTS: 'enable-prompts', // owner: @lharries @@ -238,10 +240,6 @@ export const SSO_PROVIDER_NAMES: Record = { saml: 'Single sign-on (SAML)', } -// TODO: Remove UPGRADE_LINK, as the billing page is now universal -export const UPGRADE_LINK = (cloud?: boolean): { url: string; target?: '_blank' } => - cloud ? { url: urls.organizationBilling() } : { url: 'https://posthog.com/pricing', target: '_blank' } - export const DOMAIN_REGEX = /^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$/ export const SECURE_URL_REGEX = /^(?:http(s)?:\/\/)[\w.-]+(?:\.[\w.-]+)+[\w\-._~:/?#[\]@!$&'()*+,;=]+$/gi @@ -256,3 +254,9 @@ export const SESSION_RECORDINGS_PLAYLIST_FREE_COUNT = 5 export const AUTO_REFRESH_DASHBOARD_THRESHOLD_HOURS = 20 export const GENERATED_DASHBOARD_PREFIX = 'Generated Dashboard' + +export const ACTIVITY_PAGE_SIZE = 20 +export const EVENT_DEFINITIONS_PER_PAGE = 50 +export const PROPERTY_DEFINITIONS_PER_EVENT = 5 +export const EVENT_PROPERTY_DEFINITIONS_PER_PAGE = 50 +export const LOGS_PORTION_LIMIT = 50 diff --git a/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss b/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss index 7a5d1c1db4b74..b01a0c24d3f18 100644 --- a/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss +++ b/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss @@ -22,7 +22,7 @@ } .LemonActionableTooltip__icon { - color: var(--primary); + color: var(--primary-3000); display: flex; align-items: center; width: 1.5rem; diff --git a/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss b/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss index 186df996abe84..adc91cce935f8 100644 --- a/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss +++ b/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss @@ -1,5 +1,5 @@ .LemonBadge { - --lemon-badge-color: var(--primary); + --lemon-badge-color: var(--primary-3000); --lemon-badge-size: 1.5rem; --lemon-badge-font-size: 0.75rem; --lemon-badge-position-offset: 0.5rem; diff --git a/frontend/src/lib/lemon-ui/LemonBanner/LemonBanner.scss b/frontend/src/lib/lemon-ui/LemonBanner/LemonBanner.scss index e165aaa435d4f..9a948c4f24dd3 100644 --- a/frontend/src/lib/lemon-ui/LemonBanner/LemonBanner.scss +++ b/frontend/src/lib/lemon-ui/LemonBanner/LemonBanner.scss @@ -1,13 +1,14 @@ .LemonBanner { + align-items: center; border-radius: var(--radius); - padding: 0.5rem 0.75rem; + border: solid 1px var(--border-3000); color: var(--primary-alt); - font-weight: 500; display: flex; - align-items: center; - text-align: left; + font-weight: 500; gap: 0.5rem; min-height: 3rem; + padding: 0.5rem 0.75rem; + text-align: left; &.LemonBanner--info { background-color: var(--primary-alt-highlight); diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss index 5ce621e0bb385..5ed9970665fda 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss @@ -1,24 +1,33 @@ .LemonButton { - position: relative; - transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, - transform 100ms ease; - display: flex; - flex-direction: row; - flex-shrink: 0; align-items: center; - justify-content: flex-start; - padding: 0.25rem 0.75rem; - gap: 0.5rem; + appearance: none !important; // Important as this gets overridden by Ant styles... background: none; border-radius: var(--radius); border: none; + cursor: pointer; + display: flex; + flex-direction: row; + + .posthog-3000 & { + font-family: var(--font-title); + } + + flex-shrink: 0; font-size: 0.875rem; - text-align: left; - line-height: 1.5rem; font-weight: 500; - cursor: pointer; + gap: 0.5rem; + justify-content: flex-start; + line-height: 1.5rem; + padding: 0.25rem 0.75rem; + position: relative; + text-align: left; + transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, + transform 100ms ease; user-select: none; - appearance: none !important; // Important as this gets overridden by Ant styles... + + .font-normal { + font-family: var(--font-sans); + } > span { display: flex; @@ -52,6 +61,7 @@ padding-left: 0.5rem; padding-right: 0.5rem; + > span, .LemonButton__content { overflow: hidden; } @@ -136,12 +146,6 @@ @each $status in ('primary', 'danger', 'primary-alt', 'muted') { &.LemonButton--status-#{$status} { - color: var(--#{$status}-3000, var(--#{$status}, var(--primary))); - - .LemonButton__icon { - color: var(--#{$status}-3000, var(--#{$status})); - } - // Primary - blocked color style &.LemonButton--primary { color: #fff; diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss index d7a53bb238fbb..9e7f178d442bf 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss @@ -1,6 +1,5 @@ .posthog-3000 { - --transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, - transform 200ms ease; + --transition: opacity 200ms ease, transform 200ms ease; .LemonButton { border-width: 0; @@ -36,9 +35,7 @@ padding: 0.25rem 0.75rem; width: 100%; - .LemonButton__icon:first-child { - transition: var(--transition); - color: var(--default); + .LemonButton__icon { opacity: 0.5; } } diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss index fc10519414fee..abe0e6cc3e802 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss @@ -124,6 +124,12 @@ body:not(.posthog-3000) { background: var(--#{$status}-highlight, var(--primary-highlight)); } + color: var(--#{$status}-3000, var(--#{$status}, var(--primary))); + + .LemonButton__icon { + color: var(--#{$status}-3000, var(--#{$status})); + } + &:not([aria-disabled='true']):active { color: var(--#{$status}-dark, var(--primary-dark)); @@ -134,6 +140,8 @@ body:not(.posthog-3000) { // Primary - blocked color style &.LemonButton--primary { + color: #fff; + .LemonButton__icon { color: #fff; } diff --git a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss index 8ffed36ab0d40..135c3c2a43e24 100644 --- a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss +++ b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss @@ -15,6 +15,10 @@ --tick-length: 12.73; // Approximation of tick length, which is (3 + 6) * sqrt(2) --box-color: var(--primary); + .posthog-3000 & { + --box-color: var(--primary-3000); + } + display: flex; align-items: center; cursor: pointer; @@ -59,7 +63,7 @@ &.LemonCheckbox:not(.LemonCheckbox--disabled):hover, &.LemonCheckbox:not(.LemonCheckbox--disabled):active { label { - --box-color: var(--primary-light); + --box-color: var(--primary-3000-hover); .LemonCheckbox__box { border-color: var(--box-color); @@ -68,7 +72,7 @@ } &.LemonCheckbox:not(.LemonCheckbox--disabled):active label { - --box-color: var(--primary-dark); + --box-color: var(--primary-3000-active); } &.LemonCheckbox--checked { diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss index ecf859a354eec..6dd509e146f97 100644 --- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss +++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss @@ -30,11 +30,15 @@ } .LemonCollapsePanel__body { - transition: height 200ms ease; - height: 0; - overflow: hidden; border-top-width: 1px; box-sizing: content-box; + height: 0; + overflow: hidden; + transition: height 200ms ease; + + .posthog-3000 & { + background: var(--bg-light); + } } .LemonCollapsePanel__content { diff --git a/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss b/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss index 1a266d6ba5b08..731f84ed06b90 100644 --- a/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss +++ b/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss @@ -11,6 +11,6 @@ left: calc(-1 * var(--file-drop-target-padding)); height: calc(100% + var(--file-drop-target-padding) * 2); width: calc(100% + var(--file-drop-target-padding) * 2); - border: 3px dashed var(--primary); + border: 3px dashed var(--primary-3000); border-radius: var(--radius); } diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss index 03943ad9f9bf1..a5f81f7376b58 100644 --- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss +++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss @@ -1,27 +1,34 @@ .LemonInput { - transition: background-color 200ms ease, color 200ms ease, border-color 200ms ease, opacity 200ms ease; - display: flex; - min-height: 2.5rem; - padding: 0.25rem 0.5rem; + align-items: center; background: none; border-radius: var(--radius); + border: 1px solid var(--border); + color: var(--default); + cursor: text; + display: flex; font-size: 0.875rem; - text-align: left; + gap: 0.25rem; + justify-content: center; line-height: 1.25rem; - cursor: text; - color: var(--default); - border: 1px solid var(--border); + min-height: 2.5rem; + padding: 0.25rem 0.5rem; + text-align: left; background-color: var(--bg-light); - align-items: center; - justify-content: center; - gap: 0.25rem; &:hover:not([aria-disabled='true']) { - border-color: var(--primary-light); + border-color: var(--primary-3000-hover); + + .posthog-3000 & { + border-color: var(--border-bold); + } } &.LemonInput--focused:not([aria-disabled='true']) { - border-color: var(--primary); + border-color: var(--primary-3000); + + .posthog-3000 & { + border-color: var(--border-bold); + } } &.LemonInput--transparent-background { @@ -67,7 +74,7 @@ &.LemonInput--has-content { > .LemonIcon { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss b/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss index f847cc3127765..cdec3b871b333 100644 --- a/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss +++ b/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss @@ -20,7 +20,7 @@ font-weight: 600; .LemonRow__icon { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.scss b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.scss index 1e9684ab3cc16..fdcd0a1577f6c 100644 --- a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.scss +++ b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.scss @@ -1,8 +1,15 @@ -.LemonSelect--button--clearable { - padding-left: 0.5rem !important; - margin-left: auto; -} - .LemonSelect--clearable { padding-right: 0 !important; + + > span { + padding-right: 0 !important; + } + + .LemonButton__content { + gap: 0.5rem; + + .LemonSelect--button--clearable { + margin-left: auto; + } + } } diff --git a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss index a382503f15b58..59b8e3bd4d44a 100644 --- a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss +++ b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss @@ -23,7 +23,7 @@ &:not(.ant-select-disabled):active { .ant-select-selector { background: var(--bg-light); - border-color: var(--primary); + border-color: var(--primary-3000); box-shadow: none; } } @@ -65,10 +65,14 @@ .LemonSelectMultipleDropdown { background: var(--bg-light); - padding: 0.5rem; border-radius: var(--radius); border: 1px solid var(--primary); margin: -4px 0; // Counteract antd wrapper + padding: 0.5rem; + + .posthog-3000 & { + border: 1px solid var(--primary-3000); + } .ant-select-item { padding: 0; diff --git a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss index 6ce695c71a252..c785fbc3c53b6 100644 --- a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss +++ b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss @@ -1,4 +1,7 @@ .LemonSwitch { + --lemon-switch-height: 1.25rem; + --lemon-switch-width: 2.25rem; + width: fit-content; font-weight: 500; line-height: 1.5rem; @@ -46,6 +49,11 @@ cursor: not-allowed; // A label with for=* also toggles the switch, so it shouldn't have the text select cursor } } + + .posthog-3000 & { + --lemon-switch-height: 1.125rem; + --lemon-switch-width: calc(11 / 6 * var(--lemon-switch-height)); // Same proportion as in IconToggle + } } .LemonSwitch__button { @@ -53,8 +61,8 @@ display: inline-block; flex-shrink: 0; padding: 0; - width: 2.25rem; - height: 1.25rem; + width: var(--lemon-switch-width); + height: var(--lemon-switch-height); background: none; border: none; cursor: pointer; @@ -75,8 +83,21 @@ background-color: var(--border); transition: background-color 100ms ease; + .posthog-3000 & { + border-radius: var(--lemon-switch-height); + height: 100%; + width: 100%; + top: 0; + pointer-events: none; + background-color: var(--border-bold); + } + .LemonSwitch--checked & { background-color: var(--primary-highlight); + + .posthog-3000 & { + background-color: var(--primary-3000); + } } } @@ -89,23 +110,55 @@ border-radius: 0.625rem; background-color: #fff; border: 2px solid var(--border); - transition: background-color 100ms ease, transform 100ms ease, border-color 100ms ease; + transition: background-color 100ms ease, transform 100ms ease, width 100ms ease, border-color 100ms ease; cursor: inherit; display: flex; align-items: center; justify-content: center; + .posthog-3000 & { + --lemon-switch-handle-ratio: calc(3 / 4); // Same proportion as in IconToggle + --lemon-switch-handle-gutter: calc(var(--lemon-switch-height) * calc(1 - var(--lemon-switch-handle-ratio)) / 2); + --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio)); + --lemon-switch-active-translate: translateX( + calc(var(--lemon-switch-width) - var(--lemon-switch-handle-width) - var(--lemon-switch-handle-gutter) * 2) + ); + + top: var(--lemon-switch-handle-gutter); + left: var(--lemon-switch-handle-gutter); + width: var(--lemon-switch-handle-width); + height: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio)); + border: none; + pointer-events: none; + background-color: #fff; + } + .LemonSwitch--checked & { - transform: translateX(1rem); background-color: var(--primary); border-color: var(--primary); + transform: translateX(1rem); + + .posthog-3000 & { + transform: var(--lemon-switch-active-translate); + background-color: #fff; + } } .LemonSwitch--active & { transform: scale(1.1); + + .posthog-3000 & { + --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio) * 1.2); + + transform: none; + } } .LemonSwitch--active.LemonSwitch--checked & { transform: translateX(1rem) scale(1.1); + + .posthog-3000 & { + transform: var(--lemon-switch-active-translate); + } } } diff --git a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.tsx b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.tsx index 945a59523535b..8a83e046da54a 100644 --- a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.tsx +++ b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.tsx @@ -79,7 +79,7 @@ export function LemonSwitch({ buttonComponent = ( {/* wrap it in a div so that the tooltip works even when disabled */} -
{buttonComponent}
+
{buttonComponent}
) } diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss index 2a911f8ef6bb0..6e37c6cd5eecc 100644 --- a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss +++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss @@ -1,13 +1,20 @@ .LemonTable { position: relative; width: 100%; - background: var(--bg-light); + background: var(--bg-table); border-radius: var(--radius); border: 1px solid var(--border); overflow: hidden; flex: 1; --row-base-height: 3rem; + + .posthog-3000 & { + --row-base-height: auto; + + font-size: 13px; + } + --row-horizontal-padding: 1rem; &.LemonTable--with-ribbon { @@ -90,6 +97,16 @@ max-width: 30rem; font-size: 0.75rem; } + + a.Link { + .posthog-3000 & { + color: var(--default); + + &:not(:disabled):hover { + color: var(--primary-3000-hover); + } + } + } } .LemonTable__content > table { @@ -105,10 +122,25 @@ letter-spacing: 0.03125rem; text-transform: uppercase; + .posthog-3000 & { + background: none; + } + > tr { > th { font-weight: 700; text-align: left; + + .posthog-3000 & { + padding-top: 0.5rem; + padding-bottom: 0.5rem; + } + + .LemonButton { + .posthog-3000 & { + margin: -0.5rem 0; + } + } } &.LemonTable__row--grouping { @@ -136,8 +168,21 @@ } > td { + color: var(--text-secondary); padding-top: 0.5rem; padding-bottom: 0.5rem; + + .posthog-3000 & { + padding-top: 0.3rem; + padding-bottom: 0.3rem; + } + + .LemonButton { + .posthog-3000 & { + margin-top: -0.2rem; + margin-bottom: -0.2rem; + } + } } } } @@ -212,8 +257,22 @@ .LemonTable__header { cursor: default; + .posthog-3000 & { + opacity: 0.4; + } + &.LemonTable__header--actionable { cursor: pointer; + + .posthog-3000 & { + &:hover { + opacity: 0.7; + } + + &:active { + opacity: 0.9; + } + } } } @@ -222,6 +281,12 @@ align-items: center; justify-content: space-between; line-height: 1.5; + + div { + .posthog-3000 & { + white-space: nowrap; + } + } } .LemonTable__footer { diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss index 314f0e2806528..4e30b5ddec332 100644 --- a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss +++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss @@ -1,15 +1,15 @@ .LemonTableLoader { - transition: height 200ms ease, top 200ms ease; - z-index: 10; - position: absolute; - left: 0; - padding: 0; - bottom: -1px; - width: 100%; - height: 0; background: var(--primary-bg-active); border: none !important; + bottom: -1px; + height: 0; + left: 0; overflow: hidden; + padding: 0.05rem !important; + position: absolute; + transition: height 200ms ease, top 200ms ease; + width: 100%; + z-index: 10; &::after { content: ''; @@ -20,11 +20,16 @@ height: 100%; animation: LemonTableLoader__swooping 1.5s linear infinite; background: var(--primary); + + .posthog-3000 & { + animation: loading-bar 1.5s linear infinite; + background: var(--primary-3000); + } } &.LemonTableLoader--enter-active, &.LemonTableLoader--enter-done { - height: 0.25rem; + height: 0.125rem; } } diff --git a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss index 4ff742148785c..967a0add69417 100644 --- a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss +++ b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss @@ -62,7 +62,7 @@ } &:active { - color: var(--primary-dark); + color: var(--primary-3000-active); } &.LemonTabs__tab--active { @@ -79,8 +79,9 @@ } .LemonTabs__tab-content { - display: flex; align-items: center; - padding: 0.75rem 0; cursor: pointer; + display: flex; + padding: 0.75rem 0; + white-space: nowrap; } diff --git a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss index 2e664294f0b4b..348d596a1cec3 100644 --- a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss +++ b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss @@ -1,48 +1,99 @@ .LemonTag { - font-size: 0.75rem; - font-weight: var(--font-medium); + align-items: center; background: var(--border); - padding: 0.125rem 0.25rem; border-radius: var(--radius); - display: inline-flex; - align-items: center; color: var(--default); + display: inline-flex; + font-size: 0.75rem; + font-weight: var(--font-medium); line-height: 1rem; + padding: 0.125rem 0.25rem; white-space: nowrap; + .posthog-3000 & { + background: none; + border-radius: calc(var(--radius) * 0.75); + border-style: solid; + border-width: 1px; + font-size: 0.688rem; + padding: 0.075rem 0.25rem; + } + &.primary { - background-color: var(--primary); + background-color: var(--primary-3000); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--primary-3000); + color: var(--primary-3000); + } } &.highlight { background-color: var(--mark); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--mark); + color: var(--mark); + } } &.warning { background-color: var(--warning); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--warning); + color: var(--warning); + } } &.danger { background-color: var(--danger); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--danger); + color: var(--danger); + } } &.success { background-color: var(--success); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--success); + color: var(--success); + } } &.completion { background-color: var(--purple-light); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--purple-light); + color: var(--purple-light); + } } &.caution { background-color: var(--danger-lighter); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--danger-lighter); + color: var(--danger-lighter); + } } &.none { diff --git a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss index 389975e57915a..ccc45dc5f36a2 100644 --- a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss +++ b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss @@ -15,7 +15,7 @@ display: block; &:not(:disabled):hover { - border: 1px solid var(--primary-light); + border: 1px solid var(--primary-3000-hover); } width: 100%; @@ -27,7 +27,7 @@ } &:focus:not(:disabled) { - border: 1px solid var(--primary); + border: 1px solid var(--primary-3000); } .Field--error & { diff --git a/frontend/src/lib/lemon-ui/Link/Link.scss b/frontend/src/lib/lemon-ui/Link/Link.scss index 7b4702499c601..2500c9b62debe 100644 --- a/frontend/src/lib/lemon-ui/Link/Link.scss +++ b/frontend/src/lib/lemon-ui/Link/Link.scss @@ -1,20 +1,20 @@ .Link { - transition: color 200ms ease, opacity 200ms ease; background: none; - color: var(--link); border: none; + color: var(--link); + cursor: pointer; + line-height: inherit; outline: none; padding: 0; - line-height: inherit; - cursor: pointer; + transition: none; &:not(:disabled) { &:hover { - color: var(--primary-light); + color: var(--primary-3000-hover); } &:active { - color: var(--primary-dark); + color: var(--primary-3000-active); } } diff --git a/frontend/src/lib/lemon-ui/Popover/Popover.scss b/frontend/src/lib/lemon-ui/Popover/Popover.scss index 55c5f5986f204..5dbb894d99043 100644 --- a/frontend/src/lib/lemon-ui/Popover/Popover.scss +++ b/frontend/src/lib/lemon-ui/Popover/Popover.scss @@ -38,7 +38,7 @@ opacity: 0; .Popover--actionable & { - border-color: var(--primary); + border-color: var(--primary-3000); } // We set the offset below instead of using floating-ui's offset(), because we need there to be no gap between @@ -136,7 +136,7 @@ } .Popover--actionable & { - border-color: var(--primary); + border-color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss index 9e5a327867146..d3f5896baebca 100644 --- a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss +++ b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss @@ -93,7 +93,7 @@ height: 1.5rem; width: 1.5rem; border-radius: 50%; - background: var(--primary); + background: var(--primary-3000); color: #fff; font-size: 0.625rem; font-weight: 600; diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index b66cbb3358bcb..a8f5774ab5b24 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -1,11 +1,6 @@ -import { KeyMapping, PropertyFilterValue } from '~/types' +import { KeyMapping, KeyMappingInterface, PropertyFilterValue } from '~/types' import { Link } from './lemon-ui/Link' -export interface KeyMappingInterface { - event: Record - element: Record -} - // If adding event properties with labels, check whether they should be added to // PROPERTY_NAME_ALIASES in posthog/api/property_definition.py // see code to output JSON below this diff --git a/frontend/src/lib/utils.test.ts b/frontend/src/lib/utils.test.ts index 49764e4fb3201..b9a49d899dc69 100644 --- a/frontend/src/lib/utils.test.ts +++ b/frontend/src/lib/utils.test.ts @@ -9,8 +9,6 @@ import { chooseOperatorMap, colonDelimitedDuration, compactNumber, - convertPropertiesToPropertyGroup, - convertPropertyGroupToProperties, dateFilterToText, dateMapping, dateStringToDayJs, @@ -20,7 +18,6 @@ import { ensureStringIsNotBlank, eventToDescription, floorMsToClosestSecond, - formatLabel, genericOperatorMap, getFormattedLastWeekDate, hexToRGBA, @@ -44,18 +41,7 @@ import { shortTimeZone, humanFriendlyLargeNumber, } from './utils' -import { - ActionFilter, - AnyPropertyFilter, - ElementType, - EventType, - FilterLogicalOperator, - PropertyFilterType, - PropertyGroupFilter, - PropertyOperator, - PropertyType, - TimeUnitType, -} from '~/types' +import { ElementType, EventType, PropertyType, TimeUnitType } from '~/types' import { dayjs } from 'lib/dayjs' describe('toParams', () => { @@ -111,46 +97,6 @@ describe('identifierToHuman()', () => { }) }) -describe('formatLabel()', () => { - const action: ActionFilter = { - id: 123, - name: 'Test Action', - properties: [], - type: 'actions', - } - - it('formats the label', () => { - expect(formatLabel('some_event', action)).toEqual('some_event') - }) - - it('DAU queries', () => { - expect(formatLabel('some_event', { ...action, math: 'dau' })).toEqual('some_event (Unique users)') - }) - - it('summing by property', () => { - expect(formatLabel('some_event', { ...action, math: 'sum', math_property: 'event_property' })).toEqual( - 'some_event (sum of event_property)' - ) - }) - - it('action with properties', () => { - expect( - formatLabel('some_event', { - ...action, - properties: [ - { - value: 'hello', - key: 'greeting', - operator: PropertyOperator.Exact, - type: PropertyFilterType.Person, - }, - { operator: PropertyOperator.GreaterThan, value: 5, key: '', type: PropertyFilterType.Person }, - ], - }) - ).toEqual('some_event (greeting = hello, > 5)') - }) -}) - describe('midEllipsis()', () => { it('returns same string if short', () => { expect(midEllipsis('12', 10)).toEqual('12') @@ -754,70 +700,6 @@ describe('{floor|ceil}MsToClosestSecond()', () => { }) }) -describe('convertPropertyGroupToProperties()', () => { - it('converts a single layer property group into an array of properties', () => { - const propertyGroup = { - type: FilterLogicalOperator.And, - values: [ - { - type: FilterLogicalOperator.And, - values: [ - { key: '$browser', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - { key: '$current_url', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - ] as AnyPropertyFilter[], - }, - { - type: FilterLogicalOperator.And, - values: [ - { key: '$lib', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - ] as AnyPropertyFilter[], - }, - ], - } - expect(convertPropertyGroupToProperties(propertyGroup)).toEqual([ - { key: '$browser', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - { key: '$current_url', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - { key: '$lib', type: PropertyFilterType.Event, operator: PropertyOperator.IsSet }, - ]) - }) - - it('converts a deeply nested property group into an array of properties', () => { - const propertyGroup: PropertyGroupFilter = { - type: FilterLogicalOperator.And, - values: [ - { - type: FilterLogicalOperator.And, - values: [{ type: FilterLogicalOperator.And, values: [{ key: '$lib' } as any] }], - }, - { type: FilterLogicalOperator.And, values: [{ key: '$browser' } as any] }, - ], - } - expect(convertPropertyGroupToProperties(propertyGroup)).toEqual([{ key: '$lib' }, { key: '$browser' }]) - }) -}) - -describe('convertPropertiesToPropertyGroup', () => { - it('converts properties to one AND operator property group', () => { - const properties: any[] = [{ key: '$lib' }, { key: '$browser' }, { key: '$current_url' }] - expect(convertPropertiesToPropertyGroup(properties)).toEqual({ - type: FilterLogicalOperator.And, - values: [ - { - type: FilterLogicalOperator.And, - values: [{ key: '$lib' }, { key: '$browser' }, { key: '$current_url' }], - }, - ], - }) - }) - - it('converts properties to one AND operator property group', () => { - expect(convertPropertiesToPropertyGroup(undefined)).toEqual({ - type: FilterLogicalOperator.And, - values: [], - }) - }) -}) - describe('calculateDays', () => { it('1 day to 1 day', () => { expect(calculateDays(1, TimeUnitType.Day)).toEqual(1) diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index 4daea66d2c7dc..dd91147876a95 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -1,54 +1,27 @@ import { CSSProperties } from 'react' -import api from './api' import { - ActionFilter, ActionType, ActorType, - AnyCohortCriteriaType, - AnyFilterLike, - AnyFilterType, - AnyPropertyFilter, - BehavioralCohortType, - BehavioralEventType, - ChartDisplayType, - CohortCriteriaGroupFilter, - CohortType, DateMappingOption, - EmptyPropertyFilter, EventType, - FilterLogicalOperator, - FunnelVizType, GroupActorType, - InsightType, - IntervalType, - PropertyFilterValue, - PropertyGroupFilter, - PropertyGroupFilterValue, PropertyOperator, PropertyType, TimeUnitType, - TrendsFilterType, } from '~/types' import * as Sentry from '@sentry/react' import equal from 'fast-deep-equal' import { tagColors } from 'lib/colors' -import { NON_TIME_SERIES_DISPLAY_TYPES, WEBHOOK_SERVICES } from 'lib/constants' -import { KeyMappingInterface } from 'lib/taxonomy' +import { WEBHOOK_SERVICES } from 'lib/constants' import { AlignType } from 'rc-trigger/lib/interface' import { dayjs } from 'lib/dayjs' import { getAppContext } from './utils/getAppContext' -import { - isHogQLPropertyFilter, - isPropertyFilterWithOperator, - isValidPropertyFilter, -} from './components/PropertyFilters/utils' -import { IconCopy } from 'lib/lemon-ui/icons' -import { lemonToast } from 'lib/lemon-ui/lemonToast' -import { BehavioralFilterKey } from 'scenes/cohorts/CohortFilters/types' -import { extractExpressionComment } from '~/queries/nodes/DataTable/utils' -import { urls } from 'scenes/urls' -import { isFunnelsFilter } from 'scenes/insights/sharedUtils' -import { CUSTOM_OPTION_KEY } from './components/DateFilter/dateFilterLogic' +import { CUSTOM_OPTION_KEY } from './components/DateFilter/types' + +/** + * WARNING: Be very careful importing things here. This file is heavily used and can trigger a lot of cyclic imports + * Preferably create a dedicated file in utils/.. + */ export const ANTD_TOOLTIP_PLACEMENTS: Record = { // `@yiminghe/dom-align` objects @@ -182,38 +155,6 @@ export function percentage( }) } -export async function deleteWithUndo>({ - undo = false, - ...props -}: { - undo?: boolean - endpoint: string - object: T - idField?: keyof T - callback?: (undo: boolean, object: T) => void -}): Promise { - await api.update(`api/${props.endpoint}/${props.object[props.idField || 'id']}`, { - ...props.object, - deleted: !undo, - }) - props.callback?.(undo, props.object) - lemonToast[undo ? 'success' : 'info']( - <> - {props.object.name || {props.object.derived_name || 'Unnamed'}} has been{' '} - {undo ? 'restored' : 'deleted'} - , - { - toastId: `delete-item-${props.object.id}-${undo}`, - button: undo - ? undefined - : { - label: 'Undo', - action: () => deleteWithUndo({ undo: true, ...props }), - }, - } - ) -} - export const selectStyle: Record) => Partial> = { control: (base) => ({ ...base, @@ -365,50 +306,6 @@ export function isOperatorDate(operator: PropertyOperator): boolean { ) } -export function formatPropertyLabel( - item: Record, - cohortsById: Partial>, - keyMapping: KeyMappingInterface, - valueFormatter: (value: PropertyFilterValue | undefined) => string | string[] | null = (s) => [String(s)] -): string { - if (isHogQLPropertyFilter(item as AnyFilterLike)) { - return extractExpressionComment(item.key) - } - const { value, key, operator, type } = item - return type === 'cohort' - ? cohortsById[value]?.name || `ID ${value}` - : (keyMapping[type === 'element' ? 'element' : 'event'][key]?.label || key) + - (isOperatorFlag(operator) - ? ` ${allOperatorsMapping[operator]}` - : ` ${(allOperatorsMapping[operator || 'exact'] || '?').split(' ')[0]} ${ - value && value.length === 1 && value[0] === '' ? '(empty string)' : valueFormatter(value) || '' - } `) -} - -/** Format a label that gets returned from the /insights api */ -export function formatLabel(label: string, action: ActionFilter): string { - if (action.math === 'dau') { - label += ` (Unique users) ` - } else if (action.math === 'hogql') { - label += ` (${action.math_hogql})` - } else if (['sum', 'avg', 'min', 'max', 'median', 'p90', 'p95', 'p99'].includes(action.math || '')) { - label += ` (${action.math} of ${action.math_property}) ` - } - if (action.properties?.length) { - label += ` (${action.properties - .map( - (property) => - `${property.key ? `${property.key} ` : ''}${ - allOperatorsMapping[ - (isPropertyFilterWithOperator(property) && property.operator) || 'exact' - ].split(' ')[0] - } ${property.value}` - ) - .join(', ')})` - } - return label.trim() -} - /** Compare objects deeply. */ export function objectsEqual(obj1: any, obj2: any): boolean { return equal(obj1, obj2) @@ -1068,38 +965,6 @@ export function dateStringToDayJs(date: string | null): dayjs.Dayjs | null { return response } -export async function copyToClipboard(value: string, description: string = 'text'): Promise { - if (!navigator.clipboard) { - lemonToast.warning('Oops! Clipboard capabilities are only available over HTTPS or on localhost') - return false - } - - try { - await navigator.clipboard.writeText(value) - lemonToast.info(`Copied ${description} to clipboard`, { - icon: , - }) - return true - } catch (e) { - // If the Clipboard API fails, fallback to textarea method - try { - const textArea = document.createElement('textarea') - textArea.value = value - document.body.appendChild(textArea) - textArea.select() - document.execCommand('copy') - document.body.removeChild(textArea) - lemonToast.info(`Copied ${description} to clipboard`, { - icon: , - }) - return true - } catch (err) { - lemonToast.error(`Could not copy ${description} to clipboard: ${err}`) - return false - } - } -} - export function clamp(value: number, min: number, max: number): number { return value > max ? max : value < min ? min : value } @@ -1258,46 +1123,6 @@ export function midEllipsis(input: string, maxLength: number): string { return `${input.slice(0, middle - excessLeft)}…${input.slice(middle + excessRight)}` } -export const disableHourFor: Record = { - dStart: false, - '-1d': false, - '-7d': false, - '-14d': false, - '-30d': false, - '-90d': true, - mStart: false, - '-1mStart': false, - yStart: true, - all: true, - other: false, -} - -export function autocorrectInterval(filters: Partial): IntervalType | undefined { - if ('display' in filters && filters.display && NON_TIME_SERIES_DISPLAY_TYPES.includes(filters.display)) { - // Non-time-series insights should not have an interval - return undefined - } - if (isFunnelsFilter(filters) && filters.funnel_viz_type !== FunnelVizType.Trends) { - // Only trend funnels support intervals - return undefined - } - if (!filters.interval) { - return 'day' - } - - // @ts-expect-error - Old legacy interval support - const minute_disabled = filters.interval === 'minute' - const hour_disabled = disableHourFor[filters.date_from || 'other'] && filters.interval === 'hour' - - if (minute_disabled) { - return 'hour' - } else if (hour_disabled) { - return 'day' - } else { - return filters.interval - } -} - export function pluralize(count: number, singular: string, plural?: string, includeNumber: boolean = true): string { if (!plural) { plural = singular + 's' @@ -1426,6 +1251,11 @@ export function hexToRGBA(hex: string, alpha = 1): string { return `rgba(${[r, g, b, a].join(',')})` } +export function RGBToRGBA(rgb: string, a: number): string { + const [r, g, b] = rgb.slice(4, rgb.length - 1).split(',') + return `rgba(${[r, g, b, a].join(',')})` +} + export function lightenDarkenColor(hex: string, pct: number): string { /** * Returns a lightened or darkened color, similar to SCSS darken() @@ -1544,64 +1374,6 @@ export function getEventNamesForAction(actionId: string | number, allActions: Ac .flatMap((a) => a.steps?.filter((step) => step.event).map((step) => String(step.event)) as string[]) } -export function isPropertyGroup( - properties: - | PropertyGroupFilter - | PropertyGroupFilterValue - | AnyPropertyFilter[] - | AnyPropertyFilter - | Record - | null - | undefined -): properties is PropertyGroupFilter { - return ( - (properties as PropertyGroupFilter)?.type !== undefined && - (properties as PropertyGroupFilter)?.values !== undefined - ) -} - -export function flattenPropertyGroup( - flattenedProperties: AnyPropertyFilter[], - propertyGroup: PropertyGroupFilter | PropertyGroupFilterValue | AnyPropertyFilter -): AnyPropertyFilter[] { - const obj: AnyPropertyFilter = {} as EmptyPropertyFilter - Object.keys(propertyGroup).forEach(function (k) { - obj[k] = propertyGroup[k] - }) - if (isValidPropertyFilter(obj)) { - flattenedProperties.push(obj) - } - if (isPropertyGroup(propertyGroup)) { - return propertyGroup.values.reduce(flattenPropertyGroup, flattenedProperties) - } - return flattenedProperties -} - -export function convertPropertiesToPropertyGroup( - properties: PropertyGroupFilter | AnyPropertyFilter[] | undefined -): PropertyGroupFilter { - if (isPropertyGroup(properties)) { - return properties - } - if (properties && properties.length > 0) { - return { type: FilterLogicalOperator.And, values: [{ type: FilterLogicalOperator.And, values: properties }] } - } - return { type: FilterLogicalOperator.And, values: [] } -} - -/** Flatten a filter group into an array of filters. NB: Logical operators (AND/OR) are lost in the process. */ -export function convertPropertyGroupToProperties( - properties?: PropertyGroupFilter | AnyPropertyFilter[] -): AnyPropertyFilter[] | undefined { - if (isPropertyGroup(properties)) { - return flattenPropertyGroup([], properties).filter(isValidPropertyFilter) - } - if (properties) { - return properties.filter(isValidPropertyFilter) - } - return properties -} - export const isUserLoggedIn = (): boolean => !getAppContext()?.anonymous /** Sorting function for Array.prototype.sort that works for numbers and strings automatically. */ @@ -1711,41 +1483,6 @@ export function range(startOrEnd: number, end?: number): number[] { return Array.from({ length }, (_, i) => i + start) } -export function processCohort(cohort: CohortType): CohortType { - return { - ...cohort, - ...{ - /* Populate value_property with value and overwrite value with corresponding behavioral filter type */ - filters: { - properties: { - ...cohort.filters.properties, - values: (cohort.filters.properties?.values?.map((group) => - 'values' in group - ? { - ...group, - values: (group.values as AnyCohortCriteriaType[]).map((c) => - c.type && - [BehavioralFilterKey.Cohort, BehavioralFilterKey.Person].includes(c.type) && - !('value_property' in c) - ? { - ...c, - value_property: c.value, - value: - c.type === BehavioralFilterKey.Cohort - ? BehavioralCohortType.InCohort - : BehavioralEventType.HaveProperty, - } - : c - ), - } - : group - ) ?? []) as CohortCriteriaGroupFilter[] | AnyCohortCriteriaType[], - }, - }, - }, - } -} - export function interleave(arr: any[], delimiter: any): any[] { return arr.flatMap((item, index, _arr) => _arr.length - 1 !== index // check for the last item @@ -1773,51 +1510,6 @@ export function downloadFile(file: File): void { }, 0) } -export function insightUrlForEvent(event: Pick): string | undefined { - let insightParams: Partial | undefined - if (event.event === '$pageview') { - insightParams = { - insight: InsightType.TRENDS, - interval: 'day', - display: ChartDisplayType.ActionsLineGraph, - actions: [], - events: [ - { - id: '$pageview', - name: '$pageview', - type: 'events', - order: 0, - properties: [ - { - key: '$current_url', - value: event.properties.$current_url, - type: 'event', - }, - ], - }, - ], - } - } else if (event.event !== '$autocapture') { - insightParams = { - insight: InsightType.TRENDS, - interval: 'day', - display: ChartDisplayType.ActionsLineGraph, - actions: [], - events: [ - { - id: event.event, - name: event.event, - type: 'events', - order: 0, - properties: [], - }, - ], - } - } - - return insightParams ? urls.insightNew(insightParams) : undefined -} - export function inStorybookTestRunner(): boolean { return navigator.userAgent.includes('StorybookTestRunner') } diff --git a/frontend/src/lib/utils/copyToClipboard.tsx b/frontend/src/lib/utils/copyToClipboard.tsx new file mode 100644 index 0000000000000..b29ec0dbbe14b --- /dev/null +++ b/frontend/src/lib/utils/copyToClipboard.tsx @@ -0,0 +1,34 @@ +import { IconCopy } from '@posthog/icons' +import { lemonToast } from '@posthog/lemon-ui' + +export async function copyToClipboard(value: string, description: string = 'text'): Promise { + if (!navigator.clipboard) { + lemonToast.warning('Oops! Clipboard capabilities are only available over HTTPS or on localhost') + return false + } + + try { + await navigator.clipboard.writeText(value) + lemonToast.info(`Copied ${description} to clipboard`, { + icon: , + }) + return true + } catch (e) { + // If the Clipboard API fails, fallback to textarea method + try { + const textArea = document.createElement('textarea') + textArea.value = value + document.body.appendChild(textArea) + textArea.select() + document.execCommand('copy') + document.body.removeChild(textArea) + lemonToast.info(`Copied ${description} to clipboard`, { + icon: , + }) + return true + } catch (err) { + lemonToast.error(`Could not copy ${description} to clipboard: ${err}`) + return false + } + } +} diff --git a/frontend/src/lib/utils/deleteWithUndo.tsx b/frontend/src/lib/utils/deleteWithUndo.tsx new file mode 100644 index 0000000000000..023a0c1bc4ad2 --- /dev/null +++ b/frontend/src/lib/utils/deleteWithUndo.tsx @@ -0,0 +1,34 @@ +import { lemonToast } from '@posthog/lemon-ui' +import api from 'lib/api' + +export async function deleteWithUndo>({ + undo = false, + ...props +}: { + undo?: boolean + endpoint: string + object: T + idField?: keyof T + callback?: (undo: boolean, object: T) => void +}): Promise { + await api.update(`api/${props.endpoint}/${props.object[props.idField || 'id']}`, { + ...props.object, + deleted: !undo, + }) + props.callback?.(undo, props.object) + lemonToast[undo ? 'success' : 'info']( + <> + {props.object.name || {props.object.derived_name || 'Unnamed'}} has been{' '} + {undo ? 'restored' : 'deleted'} + , + { + toastId: `delete-item-${props.object.id}-${undo}`, + button: undo + ? undefined + : { + label: 'Undo', + action: () => deleteWithUndo({ undo: true, ...props }), + }, + } + ) +} diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 5ce1faefa6d13..2410a62146d2c 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -31,7 +31,6 @@ import { } from '~/types' import type { Dayjs } from 'lib/dayjs' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' -import { convertPropertyGroupToProperties } from 'lib/utils' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { now } from 'lib/dayjs' import { @@ -42,7 +41,7 @@ import { isStickinessFilter, isTrendsFilter, } from 'scenes/insights/sharedUtils' -import { isGroupPropertyFilter } from 'lib/components/PropertyFilters/utils' +import { convertPropertyGroupToProperties, isGroupPropertyFilter } from 'lib/components/PropertyFilters/utils' import { EventIndex } from 'scenes/session-recordings/player/eventIndex' import { SurveyTemplateType } from 'scenes/surveys/constants' @@ -1102,6 +1101,7 @@ export const eventUsageLogic = kea([ posthog.capture('survey created', { name: survey.name, id: survey.id, + survey_type: survey.type, questions_length: survey.questions.length, question_types: survey.questions.map((question) => question.type), }) @@ -1110,6 +1110,7 @@ export const eventUsageLogic = kea([ posthog.capture('survey launched', { name: survey.name, id: survey.id, + survey_type: survey.type, question_types: survey.questions.map((question) => question.type), created_at: survey.created_at, start_date: survey.start_date, diff --git a/frontend/src/lib/utils/permissioning.ts b/frontend/src/lib/utils/permissioning.ts index bbd0ca3d0f999..0ec496aaa4da4 100644 --- a/frontend/src/lib/utils/permissioning.ts +++ b/frontend/src/lib/utils/permissioning.ts @@ -1,8 +1,5 @@ -import { ExplicitTeamMemberType, OrganizationMemberType, UserType } from '../../types' -import { OrganizationMembershipLevel, TeamMembershipLevel } from '../constants' - -export type EitherMembershipLevel = OrganizationMembershipLevel | TeamMembershipLevel -export type EitherMemberType = OrganizationMemberType | ExplicitTeamMemberType +import { EitherMemberType, ExplicitTeamMemberType, OrganizationMemberType, UserType } from '../../types' +import { EitherMembershipLevel, OrganizationMembershipLevel, TeamMembershipLevel } from '../constants' /** If access level change is disallowed given the circumstances, returns a reason why so. Otherwise returns null. */ export function getReasonForAccessLevelChangeProhibition( diff --git a/frontend/src/loadPostHogJS.tsx b/frontend/src/loadPostHogJS.tsx index 2acd266241f82..bcd03f327260a 100644 --- a/frontend/src/loadPostHogJS.tsx +++ b/frontend/src/loadPostHogJS.tsx @@ -27,8 +27,8 @@ export function loadPostHogJS(): void { bootstrap: window.POSTHOG_USER_IDENTITY_WITH_FLAGS ? window.POSTHOG_USER_IDENTITY_WITH_FLAGS : {}, opt_in_site_apps: true, loaded: (posthog) => { - if (posthog.webPerformance) { - posthog.webPerformance._forceAllowLocalhost = true + if (posthog.sessionRecording) { + posthog.sessionRecording._forceAllowLocalhostNetworkCapture = true } if (window.IMPERSONATED_SESSION) { diff --git a/frontend/src/models/annotationsModel.ts b/frontend/src/models/annotationsModel.ts index d193e56cee6b3..c22abb4754971 100644 --- a/frontend/src/models/annotationsModel.ts +++ b/frontend/src/models/annotationsModel.ts @@ -1,6 +1,6 @@ import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' import api from 'lib/api' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import type { annotationsModelType } from './annotationsModelType' import { RawAnnotationType, AnnotationType } from '~/types' import { loaders } from 'kea-loaders' diff --git a/frontend/src/models/cohortsModel.ts b/frontend/src/models/cohortsModel.ts index 970458bb44be9..13f994ab31b95 100644 --- a/frontend/src/models/cohortsModel.ts +++ b/frontend/src/models/cohortsModel.ts @@ -2,16 +2,59 @@ import { loaders } from 'kea-loaders' import { kea, path, connect, actions, reducers, selectors, listeners, beforeUnmount, afterMount } from 'kea' import api from 'lib/api' import type { cohortsModelType } from './cohortsModelType' -import { CohortType, ExporterFormat } from '~/types' +import { + AnyCohortCriteriaType, + BehavioralCohortType, + BehavioralEventType, + CohortCriteriaGroupFilter, + CohortType, + ExporterFormat, +} from '~/types' import { personsLogic } from 'scenes/persons/personsLogic' -import { deleteWithUndo, processCohort } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { triggerExport } from 'lib/components/ExportButton/exporter' import { isAuthenticatedTeam, teamLogic } from 'scenes/teamLogic' import Fuse from 'fuse.js' import { permanentlyMount } from 'lib/utils/kea-logic-builders' +import { BehavioralFilterKey } from 'scenes/cohorts/CohortFilters/types' const POLL_TIMEOUT = 5000 +export function processCohort(cohort: CohortType): CohortType { + return { + ...cohort, + ...{ + /* Populate value_property with value and overwrite value with corresponding behavioral filter type */ + filters: { + properties: { + ...cohort.filters.properties, + values: (cohort.filters.properties?.values?.map((group) => + 'values' in group + ? { + ...group, + values: (group.values as AnyCohortCriteriaType[]).map((c) => + c.type && + [BehavioralFilterKey.Cohort, BehavioralFilterKey.Person].includes(c.type) && + !('value_property' in c) + ? { + ...c, + value_property: c.value, + value: + c.type === BehavioralFilterKey.Cohort + ? BehavioralCohortType.InCohort + : BehavioralEventType.HaveProperty, + } + : c + ), + } + : group + ) ?? []) as CohortCriteriaGroupFilter[] | AnyCohortCriteriaType[], + }, + }, + }, + } +} + export const cohortsModel = kea([ path(['models', 'cohortsModel']), connect({ diff --git a/frontend/src/models/notebooksModel.ts b/frontend/src/models/notebooksModel.ts index f59f697b991d2..70936e1c6c8bd 100644 --- a/frontend/src/models/notebooksModel.ts +++ b/frontend/src/models/notebooksModel.ts @@ -6,7 +6,7 @@ import { DashboardType, NotebookListItemType, NotebookNodeType, NotebookTarget } import api from 'lib/api' import posthog from 'posthog-js' import { LOCAL_NOTEBOOK_TEMPLATES } from 'scenes/notebooks/NotebookTemplates/notebookTemplates' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { defaultNotebookContent, EditorFocusPosition, JSONContent } from 'scenes/notebooks/Notebook/utils' diff --git a/frontend/src/queries/nodes/DataNode/DateRange.tsx b/frontend/src/queries/nodes/DataNode/DateRange.tsx index ce48a8ec9d892..f28699cb34146 100644 --- a/frontend/src/queries/nodes/DataNode/DateRange.tsx +++ b/frontend/src/queries/nodes/DataNode/DateRange.tsx @@ -10,7 +10,6 @@ export function DateRange({ query, setQuery }: DateRangeProps): JSX.Element | nu if (isEventsQuery(query)) { return ( { diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index a83d0398a49d4..94c2651343b31 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -477,7 +477,7 @@ export const dataNodeLogic = kea([ abortQuery: async ({ queryId }) => { try { const { currentTeamId } = values - await api.create(`api/projects/${currentTeamId}/insights/cancel`, { client_query_id: queryId }) + await api.delete(`api/projects/${currentTeamId}/query/${queryId}/`) } catch (e) { console.warn('Failed cancelling query', e) } diff --git a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx index 9c9cead143b70..6965548e4afac 100644 --- a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx @@ -17,7 +17,7 @@ import { useValues } from 'kea' import { LemonDivider, lemonToast } from '@posthog/lemon-ui' import { asDisplay } from 'scenes/persons/person-utils' import { urls } from 'scenes/urls' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' const EXPORT_MAX_LIMIT = 10000 diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 4b74e14e09d6a..bcf63f7cb7eff 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -8,8 +8,9 @@ import { teamLogic } from 'scenes/teamLogic' import { IconLink, IconPlayCircle } from 'lib/lemon-ui/icons' import { useActions } from 'kea' import { sessionPlayerModalLogic } from 'scenes/session-recordings/player/modal/sessionPlayerModalLogic' -import { copyToClipboard, insightUrlForEvent } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { dayjs } from 'lib/dayjs' +import { insightUrlForEvent } from 'scenes/insights/utils' interface EventActionProps { event: EventType diff --git a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx index fc088e7edc040..15dc608ebf9ad 100644 --- a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx +++ b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx @@ -1,5 +1,4 @@ import { LemonButton, LemonButtonWithDropdown } from 'lib/lemon-ui/LemonButton' -import { IconBookmarkBorder } from 'lib/lemon-ui/icons' import { DataTableNode } from '~/queries/schema' import equal from 'fast-deep-equal' import { useValues } from 'kea' @@ -48,7 +47,6 @@ export function SavedQueries({ query, setQuery }: SavedQueriesProps): JSX.Elemen }} type="secondary" status="primary-alt" - icon={} > {selectedTitle} diff --git a/frontend/src/queries/nodes/InsightViz/InsightViz.scss b/frontend/src/queries/nodes/InsightViz/InsightViz.scss index a6197196fe264..01315b0d6877b 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightViz.scss +++ b/frontend/src/queries/nodes/InsightViz/InsightViz.scss @@ -68,6 +68,12 @@ } } +.WebAnalyticsDashboard { + .InsightVizDisplay { + --insight-viz-min-height: 25rem; + } +} + .RetentionContainer { width: 100%; display: flex; diff --git a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss index 0c2b080492e42..6637f4f265e04 100644 --- a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss +++ b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss @@ -1,6 +1,11 @@ .PropertyGroupFilters { .property-group { background-color: var(--side); + + .posthog-3000 & { + border-width: 1px; + } + padding: 0.5rem; border-radius: 4px; } diff --git a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/propertyGroupFilterLogic.ts b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/propertyGroupFilterLogic.ts index 772c97ddd7f75..aa99e6a2b2a22 100644 --- a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/propertyGroupFilterLogic.ts +++ b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/propertyGroupFilterLogic.ts @@ -3,9 +3,10 @@ import { actions, kea, key, listeners, path, props, propsChanged, reducers, sele import { PropertyGroupFilter, FilterLogicalOperator, EmptyPropertyFilter } from '~/types' import type { propertyGroupFilterLogicType } from './propertyGroupFilterLogicType' -import { convertPropertiesToPropertyGroup, objectsEqual } from 'lib/utils' +import { objectsEqual } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { StickinessQuery, TrendsQuery } from '~/queries/schema' +import { convertPropertiesToPropertyGroup } from 'lib/components/PropertyFilters/utils' export type PropertyGroupFilterLogicProps = { pageKey: string diff --git a/frontend/src/queries/nodes/InsightViz/utils.ts b/frontend/src/queries/nodes/InsightViz/utils.ts index b21948c769209..1b2c403fdb12b 100644 --- a/frontend/src/queries/nodes/InsightViz/utils.ts +++ b/frontend/src/queries/nodes/InsightViz/utils.ts @@ -103,6 +103,14 @@ export const getShowValueOnSeries = (query: InsightQueryNode): boolean | undefin } } +export const getShowLabelsOnSeries = (query: InsightQueryNode): boolean | undefined => { + if (isTrendsQuery(query)) { + return query.trendsFilter?.show_labels_on_series + } else { + return undefined + } +} + export const getShowPercentStackView = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.show_percent_stack_view diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index df71c0b1cef4b..01c218a290e6c 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -26,13 +26,16 @@ import { isStickinessFilter, isTrendsFilter, } from 'scenes/insights/sharedUtils' -import { flattenObject, toParams } from 'lib/utils' +import { flattenObject, delay, toParams } from 'lib/utils' import { queryNodeToFilter } from './nodes/InsightQuery/utils/queryNodeToFilter' import { now } from 'lib/dayjs' import { currentSessionId } from 'lib/internalMetrics' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' +const QUERY_ASYNC_MAX_INTERVAL_SECONDS = 10 +const QUERY_ASYNC_TOTAL_POLL_SECONDS = 300 + //get export context for a given query export function queryExportContext( query: N, @@ -91,6 +94,43 @@ export function queryExportContext( throw new Error(`Unsupported query: ${query.kind}`) } +async function executeQuery( + queryNode: N, + methodOptions?: ApiMethodOptions, + refresh?: boolean, + queryId?: string +): Promise> { + const queryAsyncEnabled = Boolean(featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC]) + const excludedKinds = ['HogQLMetadata'] + const queryAsync = queryAsyncEnabled && !excludedKinds.includes(queryNode.kind) + const response = await api.query(queryNode, methodOptions, queryId, refresh, queryAsync) + + if (!queryAsync || !response.query_async) { + return response + } + + const pollStart = performance.now() + let currentDelay = 300 // start low, because all queries will take at minimum this + + while (performance.now() - pollStart < QUERY_ASYNC_TOTAL_POLL_SECONDS * 1000) { + await delay(currentDelay) + currentDelay = Math.min(currentDelay * 2, QUERY_ASYNC_MAX_INTERVAL_SECONDS * 1000) + + if (methodOptions?.signal?.aborted) { + const customAbortError = new Error('Query aborted') + customAbortError.name = 'AbortError' + throw customAbortError + } + + const statusResponse = await api.queryStatus.get(response.id) + + if (statusResponse.complete || statusResponse.error) { + return statusResponse.results + } + } + throw new Error('Query timed out') +} + // Return data for a given query export async function query( queryNode: N, @@ -216,7 +256,7 @@ export async function query( response = await fetchLegacyInsights() } } else { - response = await api.query(queryNode, methodOptions, queryId, refresh) + response = await executeQuery(queryNode, methodOptions, refresh, queryId) if (isHogQLQuery(queryNode) && response && typeof response === 'object') { logParams.clickhouse_sql = (response as HogQLQueryResponse)?.clickhouse } diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 019230809f749..7d5f3cb358ffc 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -2413,6 +2413,51 @@ } ] }, + "QueryStatus": { + "additionalProperties": false, + "properties": { + "complete": { + "default": false, + "type": "boolean" + }, + "end_time": { + "format": "date-time", + "type": "string" + }, + "error": { + "default": false, + "type": "boolean" + }, + "error_message": { + "default": "", + "type": "string" + }, + "expiration_time": { + "format": "date-time", + "type": "string" + }, + "id": { + "type": "string" + }, + "query_async": { + "default": true, + "type": "boolean" + }, + "results": {}, + "start_time": { + "format": "date-time", + "type": "string" + }, + "task_id": { + "type": "string" + }, + "team_id": { + "type": "integer" + } + }, + "required": ["id", "query_async", "team_id", "error", "complete", "error_message"], + "type": "object" + }, "QueryTiming": { "additionalProperties": false, "properties": { @@ -2975,6 +3020,9 @@ }, "type": "array" }, + "show_labels_on_series": { + "type": "boolean" + }, "show_legend": { "type": "boolean" }, @@ -3095,6 +3143,18 @@ "additionalProperties": false, "description": "Chart specific rendering options. Use ChartRenderingMetadata for non-serializable values, e.g. onClick handlers", "properties": { + "ActionsPie": { + "additionalProperties": false, + "properties": { + "disableHoverOffset": { + "type": "boolean" + }, + "hideAggregation": { + "type": "boolean" + } + }, + "type": "object" + }, "RETENTION": { "additionalProperties": false, "properties": { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index b0ad656cea099..1f035b9d1acba 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -4,6 +4,7 @@ import { Breakdown, BreakdownKeyType, BreakdownType, + ChartDisplayType, CountPerActorMathType, EventPropertyFilter, EventType, @@ -403,6 +404,10 @@ export interface VizSpecificOptions { hideSizeColumn?: boolean useSmallLayout?: boolean } + [ChartDisplayType.ActionsPie]?: { + disableHoverOffset?: boolean + hideAggregation?: boolean + } } export interface InsightVizNode extends Node, InsightVizNodeViewProps { @@ -424,7 +429,6 @@ interface InsightVizNodeViewProps { embedded?: boolean suppressSessionAnalysisWarning?: boolean hidePersonsModal?: boolean - vizSpecificOptions?: VizSpecificOptions } @@ -542,6 +546,28 @@ export interface QueryResponse { next_allowed_client_refresh?: string } +export type QueryStatus = { + id: string + /** @default true */ + query_async: boolean + /** @asType integer */ + team_id: number + /** @default false */ + error: boolean + /** @default false */ + complete: boolean + /** @default "" */ + error_message: string + results?: any + /** @format date-time */ + start_time?: string + /** @format date-time */ + end_time?: string + /** @format date-time */ + expiration_time?: string + task_id?: string +} + export interface LifecycleQueryResponse extends QueryResponse { results: Record[] } diff --git a/frontend/src/queries/types.ts b/frontend/src/queries/types.ts index f1e63d8f54549..804a1a64719ae 100644 --- a/frontend/src/queries/types.ts +++ b/frontend/src/queries/types.ts @@ -1,4 +1,4 @@ -import { ChartDisplayType, InsightLogicProps, TrendResult } from '~/types' +import { ChartDisplayType, GraphPointPayload, InsightLogicProps, TrendResult } from '~/types' import { ComponentType, HTMLProps } from 'react' import { DataTableNode } from '~/queries/schema' @@ -24,6 +24,9 @@ export interface ChartRenderingMetadata { [ChartDisplayType.WorldMap]?: { countryProps?: (countryCode: string, countryData: TrendResult | undefined) => Omit, 'key'> } + [ChartDisplayType.ActionsPie]?: { + onSegmentClick?: (payload: GraphPointPayload) => void + } } export type QueryContextColumnTitleComponent = ComponentType<{ diff --git a/frontend/src/scenes/PreflightCheck/PreflightCheck.scss b/frontend/src/scenes/PreflightCheck/PreflightCheck.scss index 2b83263873c2e..0628c2ac5d99f 100644 --- a/frontend/src/scenes/PreflightCheck/PreflightCheck.scss +++ b/frontend/src/scenes/PreflightCheck/PreflightCheck.scss @@ -81,7 +81,7 @@ svg, .Preflight__status-text { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/scenes/actions/EventName.tsx b/frontend/src/scenes/actions/EventName.tsx index aadaf08a4cee1..b467e2d044028 100644 --- a/frontend/src/scenes/actions/EventName.tsx +++ b/frontend/src/scenes/actions/EventName.tsx @@ -38,6 +38,7 @@ export function LemonEventName({ renderValue={(v) => (v !== null ? : null)} allowClear={allEventsOption === 'clear'} excludedProperties={allEventsOption !== 'explicit' ? { events: [null] } : undefined} + size="small" /> ) } diff --git a/frontend/src/scenes/actions/actionEditLogic.tsx b/frontend/src/scenes/actions/actionEditLogic.tsx index e1763cdaa0d29..980461d56e80c 100644 --- a/frontend/src/scenes/actions/actionEditLogic.tsx +++ b/frontend/src/scenes/actions/actionEditLogic.tsx @@ -1,6 +1,7 @@ import { actions, afterMount, connect, kea, key, listeners, path, props, reducers } from 'kea' import api from 'lib/api' -import { deleteWithUndo, uuid } from 'lib/utils' +import { uuid } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { actionsModel } from '~/models/actionsModel' import type { actionEditLogicType } from './actionEditLogicType' import { ActionStepType, ActionType } from '~/types' diff --git a/frontend/src/scenes/actions/actionLogic.ts b/frontend/src/scenes/actions/actionLogic.ts index e3781d37902f0..6b47df87c05a7 100644 --- a/frontend/src/scenes/actions/actionLogic.ts +++ b/frontend/src/scenes/actions/actionLogic.ts @@ -4,6 +4,8 @@ import api from 'lib/api' import type { actionLogicType } from './actionLogicType' import { ActionType, Breadcrumb } from '~/types' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from 'scenes/data-management/DataManagementScene' export interface ActionLogicProps { id?: ActionType['id'] @@ -50,14 +52,17 @@ export const actionLogic = kea([ (s) => [s.action], (action): Breadcrumb[] => [ { + key: Scene.DataManagement, name: `Data Management`, path: urls.eventDefinitions(), }, { + key: DataManagementTab.Actions, name: 'Actions', path: urls.actions(), }, { + key: action?.id || 'new', name: action?.name || 'Unnamed', path: action ? urls.action(action.id) : undefined, }, diff --git a/frontend/src/scenes/actions/actionsLogic.ts b/frontend/src/scenes/actions/actionsLogic.ts index c281814856611..f150c85111a64 100644 --- a/frontend/src/scenes/actions/actionsLogic.ts +++ b/frontend/src/scenes/actions/actionsLogic.ts @@ -9,6 +9,8 @@ import { userLogic } from 'scenes/userLogic' import { subscriptions } from 'kea-subscriptions' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from 'scenes/data-management/DataManagementScene' export type ActionsFilterType = 'all' | 'me' @@ -68,10 +70,12 @@ export const actionsLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: `Data Management`, + key: Scene.DataManagement, + name: `Data management`, path: urls.eventDefinitions(), }, { + key: DataManagementTab.Actions, name: 'Actions', path: urls.actions(), }, diff --git a/frontend/src/scenes/apps/AppMetricsGraph.tsx b/frontend/src/scenes/apps/AppMetricsGraph.tsx index b8bb316a2bf3f..957e95499f3bb 100644 --- a/frontend/src/scenes/apps/AppMetricsGraph.tsx +++ b/frontend/src/scenes/apps/AppMetricsGraph.tsx @@ -6,7 +6,8 @@ import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import './AppMetricsGraph.scss' import { inStorybookTestRunner, lightenDarkenColor } from 'lib/utils' -import { AppMetrics, AppMetricsTab } from './appMetricsSceneLogic' +import { AppMetrics } from './appMetricsSceneLogic' +import { AppMetricsTab } from '~/types' export interface AppMetricsGraphProps { tab: AppMetricsTab @@ -31,21 +32,21 @@ export function AppMetricsGraph({ tab, metrics, metricsLoading }: AppMetricsGrap label: descriptions.successes, data: metrics.successes, borderColor: '', - ...colorConfig('data-brand-blue'), + ...colorConfig('data-color-1'), }, ...(descriptions.successes_on_retry ? [ { label: descriptions.successes_on_retry, data: metrics.successes_on_retry, - ...colorConfig('data-yellow'), + ...colorConfig('data-color-13'), }, ] : []), { label: descriptions.failures, data: metrics.failures, - ...colorConfig('data-vermilion'), + ...colorConfig('data-color-5'), }, ], }, diff --git a/frontend/src/scenes/apps/AppMetricsScene.tsx b/frontend/src/scenes/apps/AppMetricsScene.tsx index 74b847872f39e..7b4c62f3d1b83 100644 --- a/frontend/src/scenes/apps/AppMetricsScene.tsx +++ b/frontend/src/scenes/apps/AppMetricsScene.tsx @@ -1,5 +1,5 @@ import { SceneExport } from 'scenes/sceneTypes' -import { appMetricsSceneLogic, AppMetricsTab } from 'scenes/apps/appMetricsSceneLogic' +import { appMetricsSceneLogic } from 'scenes/apps/appMetricsSceneLogic' import { PageHeader } from 'lib/components/PageHeader' import { useValues, useActions } from 'kea' import { MetricsTab } from './MetricsTab' @@ -15,6 +15,7 @@ import { AppLogsTab } from './AppLogsTab' import { LemonButton } from '@posthog/lemon-ui' import { IconSettings } from 'lib/lemon-ui/icons' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' +import { AppMetricsTab } from '~/types' export const scene: SceneExport = { component: AppMetrics, diff --git a/frontend/src/scenes/apps/HistoricalExport.tsx b/frontend/src/scenes/apps/HistoricalExport.tsx index e32cadc276244..0642110b3b3c9 100644 --- a/frontend/src/scenes/apps/HistoricalExport.tsx +++ b/frontend/src/scenes/apps/HistoricalExport.tsx @@ -1,9 +1,9 @@ import { Card } from 'antd' import { useValues } from 'kea' import { AppMetricsGraph } from './AppMetricsGraph' -import { AppMetricsTab } from './appMetricsSceneLogic' import { historicalExportLogic, HistoricalExportLogicProps } from './historicalExportLogic' import { ErrorsOverview, MetricsOverview } from './MetricsTab' +import { AppMetricsTab } from '~/types' export function HistoricalExport(props: HistoricalExportLogicProps): JSX.Element { const { data, dataLoading } = useValues(historicalExportLogic(props)) diff --git a/frontend/src/scenes/apps/MetricsTab.tsx b/frontend/src/scenes/apps/MetricsTab.tsx index 0af2e7984c814..425219384c912 100644 --- a/frontend/src/scenes/apps/MetricsTab.tsx +++ b/frontend/src/scenes/apps/MetricsTab.tsx @@ -1,4 +1,4 @@ -import { AppErrorSummary, AppMetrics, appMetricsSceneLogic, AppMetricsTab } from './appMetricsSceneLogic' +import { AppErrorSummary, AppMetrics, appMetricsSceneLogic } from './appMetricsSceneLogic' import { DescriptionColumns } from './constants' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { humanFriendlyDuration, humanFriendlyNumber } from 'lib/utils' @@ -10,6 +10,7 @@ import { TZLabel } from 'lib/components/TZLabel' import { Link } from 'lib/lemon-ui/Link' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { IconInfo } from 'lib/lemon-ui/icons' +import { AppMetricsTab } from '~/types' export interface MetricsTabProps { tab: AppMetricsTab diff --git a/frontend/src/scenes/apps/appMetricsSceneLogic.ts b/frontend/src/scenes/apps/appMetricsSceneLogic.ts index 8df9733173449..577b3624e6287 100644 --- a/frontend/src/scenes/apps/appMetricsSceneLogic.ts +++ b/frontend/src/scenes/apps/appMetricsSceneLogic.ts @@ -3,7 +3,7 @@ import { loaders } from 'kea-loaders' import type { appMetricsSceneLogicType } from './appMetricsSceneLogicType' import { urls } from 'scenes/urls' -import { Breadcrumb, PluginConfigWithPluginInfo, UserBasicType } from '~/types' +import { AppMetricsTab, AppMetricsUrlParams, Breadcrumb, PluginConfigWithPluginInfo, UserBasicType } from '~/types' import api, { PaginatedResponse } from 'lib/api' import { teamLogic } from 'scenes/teamLogic' import { actionToUrl, urlToAction } from 'kea-router' @@ -12,28 +12,13 @@ import { HISTORICAL_EXPORT_JOB_NAME_V2 } from 'scenes/plugins/edit/interface-job import { interfaceJobsLogic, InterfaceJobsProps } from '../plugins/edit/interface-jobs/interfaceJobsLogic' import { dayjs } from 'lib/dayjs' import { router } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' export interface AppMetricsLogicProps { /** Used as the logic's key */ pluginConfigId: number } -export interface AppMetricsUrlParams { - tab?: AppMetricsTab - from?: string - error?: [string, string] -} - -export enum AppMetricsTab { - Logs = 'logs', - ProcessEvent = 'processEvent', - OnEvent = 'onEvent', - ComposeWebhook = 'composeWebhook', - ExportEvents = 'exportEvents', - ScheduledTask = 'scheduledTask', - HistoricalExports = 'historical_exports', - History = 'history', -} export const TabsWithMetrics = [ AppMetricsTab.ProcessEvent, AppMetricsTab.OnEvent, @@ -197,10 +182,12 @@ export const appMetricsSceneLogic = kea([ (s, p) => [s.pluginConfig, p.pluginConfigId], (pluginConfig, pluginConfigId: number): Breadcrumb[] => [ { + key: Scene.Apps, name: 'Apps', path: urls.projectApps(), }, { + key: pluginConfigId, name: pluginConfig?.plugin_info?.name, path: urls.appMetrics(pluginConfigId), }, diff --git a/frontend/src/scenes/apps/constants.tsx b/frontend/src/scenes/apps/constants.tsx index 9870936b5cfc5..fb5077b3df336 100644 --- a/frontend/src/scenes/apps/constants.tsx +++ b/frontend/src/scenes/apps/constants.tsx @@ -1,4 +1,4 @@ -import { AppMetricsTab } from './appMetricsSceneLogic' +import { AppMetricsTab } from '~/types' interface Description { successes: string diff --git a/frontend/src/scenes/authentication/Login.tsx b/frontend/src/scenes/authentication/Login.tsx index 61a13b54feb53..82552cbc2399f 100644 --- a/frontend/src/scenes/authentication/Login.tsx +++ b/frontend/src/scenes/authentication/Login.tsx @@ -16,6 +16,7 @@ import RegionSelect from './RegionSelect' import { redirectIfLoggedInOtherInstance } from './redirectToLoggedInInstance' import { captureException } from '@sentry/react' import { SupportModalButton } from './SupportModalButton' +import { useButtonStyle } from './useButtonStyles' export const ERROR_MESSAGES: Record = { no_new_organizations: @@ -55,6 +56,7 @@ export function Login(): JSX.Element { const passwordInputRef = useRef(null) const isPasswordHidden = precheckResponse.status === 'pending' || precheckResponse.sso_enforcement + const buttonStyles = useButtonStyle() useEffect(() => { if (preflight?.cloud) { @@ -146,6 +148,7 @@ export function Login(): JSX.Element { type="primary" center loading={isLoginSubmitting || precheckResponseLoading} + {...buttonStyles} > Log in
diff --git a/frontend/src/scenes/authentication/Login2FA.tsx b/frontend/src/scenes/authentication/Login2FA.tsx index 6bd87ca93a34b..7dfa004e16b5d 100644 --- a/frontend/src/scenes/authentication/Login2FA.tsx +++ b/frontend/src/scenes/authentication/Login2FA.tsx @@ -6,10 +6,13 @@ import { Field } from 'lib/forms/Field' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonButton, LemonInput } from '@posthog/lemon-ui' import { BridgePage } from 'lib/components/BridgePage/BridgePage' +import { useButtonStyle } from './useButtonStyles' export function Login2FA(): JSX.Element { const { isTwofactortokenSubmitting, generalError } = useValues(login2FALogic) const { preflight } = useValues(preflightLogic) + const buttonStyles = useButtonStyle() + return ( Login
diff --git a/frontend/src/scenes/authentication/PasswordReset.tsx b/frontend/src/scenes/authentication/PasswordReset.tsx index d1c2b3a037314..9a8a4c9e276c7 100644 --- a/frontend/src/scenes/authentication/PasswordReset.tsx +++ b/frontend/src/scenes/authentication/PasswordReset.tsx @@ -14,6 +14,7 @@ import { Field } from 'lib/forms/Field' import { BridgePage } from 'lib/components/BridgePage/BridgePage' import { IconCheckCircleOutline, IconErrorOutline } from 'lib/lemon-ui/icons' import { SupportModalButton } from './SupportModalButton' +import { useButtonStyle } from './useButtonStyles' export const scene: SceneExport = { component: PasswordReset, @@ -85,6 +86,7 @@ function EmailUnavailable(): JSX.Element { function ResetForm(): JSX.Element { const { isRequestPasswordResetSubmitting } = useValues(passwordResetLogic) + const buttonStyles = useButtonStyle() return (
@@ -108,6 +110,7 @@ function ResetForm(): JSX.Element { htmlType="submit" data-attr="password-reset" loading={isRequestPasswordResetSubmitting} + {...buttonStyles} > Continue @@ -118,13 +121,21 @@ function ResetForm(): JSX.Element { function ResetSuccess(): JSX.Element { const { requestPasswordReset } = useValues(passwordResetLogic) const { push } = useActions(router) + const buttonStyles = useButtonStyle() return (
Request received successfully! If the email {requestPasswordReset?.email || 'you typed'} exists, you’ll receive an email with a reset link soon.
- push('/login')}> + push('/login')} + {...buttonStyles} + > Back to login
@@ -135,6 +146,7 @@ function ResetSuccess(): JSX.Element { function ResetThrottled(): JSX.Element { const { requestPasswordReset } = useValues(passwordResetLogic) const { push } = useActions(router) + const buttonStyles = useButtonStyle() return (
@@ -145,7 +157,14 @@ function ResetThrottled(): JSX.Element { {' '} if you think this has been a mistake.
- push('/login')}> + push('/login')} + {...buttonStyles} + > Back to login
diff --git a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel1.tsx b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel1.tsx index f950fb7ae2a39..b9eca413dcf47 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel1.tsx +++ b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel1.tsx @@ -8,11 +8,13 @@ import { SocialLoginButtons } from 'lib/components/SocialLoginButton/SocialLogin import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { signupLogic } from '../signupLogic' import { Link } from 'lib/lemon-ui/Link' +import { useButtonStyle } from 'scenes/authentication/useButtonStyles' export function SignupPanel1(): JSX.Element | null { const { preflight } = useValues(preflightLogic) const { isSignupPanel1Submitting, signupPanel1 } = useValues(signupLogic) const emailInputRef = useRef(null) + const buttonStyles = useButtonStyle() useEffect(() => { // There's no password in the demo environment @@ -71,6 +73,7 @@ export function SignupPanel1(): JSX.Element | null { data-attr="signup-start" loading={isSignupPanel1Submitting} disabled={isSignupPanel1Submitting} + {...buttonStyles} > Continue diff --git a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx index 07ea86b6bd31a..801f02e626464 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx +++ b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx @@ -6,12 +6,14 @@ import { Field } from 'lib/forms/Field' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { signupLogic } from '../signupLogic' import SignupReferralSource from 'lib/components/SignupReferralSource' +import { useButtonStyle } from 'scenes/authentication/useButtonStyles' const UTM_TAGS = 'utm_campaign=in-product&utm_tag=signup-header' export function SignupPanel2(): JSX.Element | null { const { preflight } = useValues(preflightLogic) const { isSignupPanel2Submitting } = useValues(signupLogic) + const buttonStyles = useButtonStyle() return (
@@ -44,6 +46,7 @@ export function SignupPanel2(): JSX.Element | null { data-attr="signup-submit" loading={isSignupPanel2Submitting} disabled={isSignupPanel2Submitting} + {...buttonStyles} > {!preflight?.demo ? 'Create account' diff --git a/frontend/src/scenes/authentication/signup/verify-email/VerifyEmail.tsx b/frontend/src/scenes/authentication/signup/verify-email/VerifyEmail.tsx index e646a86352de5..f770d53674b39 100644 --- a/frontend/src/scenes/authentication/signup/verify-email/VerifyEmail.tsx +++ b/frontend/src/scenes/authentication/signup/verify-email/VerifyEmail.tsx @@ -51,7 +51,7 @@ export function VerifyEmail(): JSX.Element { return (
- +
{view === 'pending' ? ( <> diff --git a/frontend/src/scenes/authentication/useButtonStyles.ts b/frontend/src/scenes/authentication/useButtonStyles.ts new file mode 100644 index 0000000000000..9f678edaa2018 --- /dev/null +++ b/frontend/src/scenes/authentication/useButtonStyles.ts @@ -0,0 +1,13 @@ +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' + +export function useButtonStyle(): Record { + const is3000 = useFeatureFlag('POSTHOG_3000') + + return is3000 + ? { + size: 'large', + } + : { + size: 'medium', + } +} diff --git a/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx b/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx index 7f5a66fd4798d..3f6dfcf661597 100644 --- a/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx +++ b/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx @@ -19,7 +19,7 @@ export function BatchExportsEditScene(): JSX.Element { return ( <> - +
diff --git a/frontend/src/scenes/batch_exports/BatchExports.scss b/frontend/src/scenes/batch_exports/BatchExports.scss index 835ed19de7ec9..507ce76135072 100644 --- a/frontend/src/scenes/batch_exports/BatchExports.scss +++ b/frontend/src/scenes/batch_exports/BatchExports.scss @@ -7,7 +7,7 @@ @keyframes BatchExportRunIcon__pulse { 0% { outline-offset: 0; - outline-color: var(--primary-light); + outline-color: var(--primary-3000-hover); } 80% { diff --git a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts index 0fe402a2ae9f9..0fa15c3651b99 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts @@ -19,6 +19,7 @@ import { beforeUnload, router } from 'kea-router' import type { batchExportsEditLogicType } from './batchExportEditLogicType' import { dayjs, Dayjs } from 'lib/dayjs' import { batchExportLogic } from './batchExportLogic' +import { Scene } from 'scenes/sceneTypes' export type BatchExportsEditLogicProps = { id: string @@ -267,22 +268,25 @@ export const batchExportsEditLogic = kea([ (s) => [s.batchExportConfig, s.isNew], (config, isNew): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, ...(isNew ? [ { + key: 'new', name: 'New', }, ] : [ { - name: config?.name ?? 'Loading', + key: config?.id ?? 'loading', + name: config?.name, path: config?.id ? urls.batchExport(config.id) : undefined, }, - { + key: 'edit', name: 'Edit', }, ]), diff --git a/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts index b944ad32f546a..9d9825dea2865 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts @@ -8,6 +8,7 @@ import { batchExportLogic } from './batchExportLogic' import { BatchExportsEditLogicProps } from './batchExportEditLogic' import type { batchExportsEditSceneLogicType } from './batchExportEditSceneLogicType' +import { Scene } from 'scenes/sceneTypes' export const batchExportsEditSceneLogic = kea([ props({} as BatchExportsEditLogicProps), @@ -23,22 +24,25 @@ export const batchExportsEditSceneLogic = kea([ (s) => [s.batchExportConfig, s.id], (config, id): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, ...(id === 'new' ? [ { + key: 'new', name: 'New', }, ] : [ { - name: config?.name ?? 'Loading', + key: config?.id || 'loading', + name: config?.name, path: config?.id ? urls.batchExport(config.id) : undefined, }, - { + key: 'edit', name: 'Edit', }, ]), diff --git a/frontend/src/scenes/batch_exports/batchExportLogic.ts b/frontend/src/scenes/batch_exports/batchExportLogic.ts index 66cf9de3e2bfa..6c0c5ca8fcec1 100644 --- a/frontend/src/scenes/batch_exports/batchExportLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportLogic.ts @@ -11,6 +11,7 @@ import { dayjs, Dayjs } from 'lib/dayjs' import { urls } from 'scenes/urls' import type { batchExportLogicType } from './batchExportLogicType' import { router } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' export type BatchExportLogicProps = { id: string @@ -228,11 +229,13 @@ export const batchExportLogic = kea([ (s) => [s.batchExportConfig], (config): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, { - name: config?.name ?? 'Loading', + key: config?.id || 'loading', + name: config?.name, }, ], ], diff --git a/frontend/src/scenes/billing/BillingLimitInput.tsx b/frontend/src/scenes/billing/BillingLimitInput.tsx index 6f1dd652a4acb..fac3943214ea7 100644 --- a/frontend/src/scenes/billing/BillingLimitInput.tsx +++ b/frontend/src/scenes/billing/BillingLimitInput.tsx @@ -7,12 +7,14 @@ import { billingProductLogic } from './billingProductLogic' import { LemonButton, LemonInput } from '@posthog/lemon-ui' import { Tooltip } from 'lib/lemon-ui/Tooltip' import clsx from 'clsx' +import { useRef } from 'react' export const BillingLimitInput = ({ product }: { product: BillingProductV2Type }): JSX.Element | null => { + const limitInputRef = useRef(null) const { billing, billingLoading } = useValues(billingLogic) const { updateBillingLimits } = useActions(billingLogic) const { isEditingBillingLimit, showBillingLimitInput, billingLimitInput, customLimitUsd } = useValues( - billingProductLogic({ product }) + billingProductLogic({ product, billingLimitInputRef: limitInputRef }) ) const { setIsEditingBillingLimit, setBillingLimitInput } = useActions(billingProductLogic({ product })) @@ -78,7 +80,7 @@ export const BillingLimitInput = ({ product }: { product: BillingProductV2Type } return null } return ( -
+
{!isEditingBillingLimit ? ( @@ -104,6 +106,7 @@ export const BillingLimitInput = ({ product }: { product: BillingProductV2Type } <>
): BillingV2Type => { @@ -53,6 +56,8 @@ const parseBillingResponse = (data: Partial): BillingV2Type => { export const billingLogic = kea([ path(['scenes', 'billing', 'billingLogic']), actions({ + setProductSpecificAlert: (productSpecificAlert: BillingAlertConfig | null) => ({ productSpecificAlert }), + setScrollToProductKey: (scrollToProductKey: ProductKey | null) => ({ scrollToProductKey }), setShowLicenseDirectInput: (show: boolean) => ({ show }), reportBillingAlertShown: (alertConfig: BillingAlertConfig) => ({ alertConfig }), reportBillingAlertActionClicked: (alertConfig: BillingAlertConfig) => ({ alertConfig }), @@ -66,6 +71,18 @@ export const billingLogic = kea([ actions: [userLogic, ['loadUser'], eventUsageLogic, ['reportProductUnsubscribed']], }), reducers({ + scrollToProductKey: [ + null as ProductKey | null, + { + setScrollToProductKey: (_, { scrollToProductKey }) => scrollToProductKey, + }, + ], + productSpecificAlert: [ + null as BillingAlertConfig | null, + { + setProductSpecificAlert: (_, { productSpecificAlert }) => productSpecificAlert, + }, + ], showLicenseDirectInput: [ false, { @@ -144,8 +161,12 @@ export const billingLogic = kea([ }, ], billingAlert: [ - (s) => [s.billing, s.preflight, s.projectedTotalAmountUsd], - (billing, preflight, projectedTotalAmountUsd): BillingAlertConfig | undefined => { + (s) => [s.billing, s.preflight, s.projectedTotalAmountUsd, s.productSpecificAlert], + (billing, preflight, projectedTotalAmountUsd, productSpecificAlert): BillingAlertConfig | undefined => { + if (productSpecificAlert) { + return productSpecificAlert + } + if (!billing || !preflight?.cloud) { return } @@ -320,6 +341,10 @@ export const billingLogic = kea([ actions.setActivateLicenseValues({ license: hash.license }) actions.submitActivateLicense() } + if (_search.products) { + const products = _search.products.split(',') + actions.setScrollToProductKey(products[0]) + } actions.setRedirectPath() actions.setIsOnboarding() }, diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index aeb72f177c5be..723e152e9723d 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -1,21 +1,34 @@ -import { actions, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { actions, connect, events, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { BillingProductV2AddonType, BillingProductV2Type, BillingV2PlanType, BillingV2TierType } from '~/types' import { billingLogic } from './billingLogic' import type { billingProductLogicType } from './billingProductLogicType' import { convertAmountToUsage } from './billing-utils' import posthog from 'posthog-js' +import React from 'react' const DEFAULT_BILLING_LIMIT = 500 +export interface BillingProductLogicProps { + product: BillingProductV2Type | BillingProductV2AddonType + billingLimitInputRef?: React.MutableRefObject +} + export const billingProductLogic = kea([ + props({} as BillingProductLogicProps), key((props) => props.product.type), path(['scenes', 'billing', 'billingProductLogic']), connect({ - values: [billingLogic, ['billing', 'isUnlicensedDebug']], - actions: [billingLogic, ['loadBillingSuccess', 'updateBillingLimitsSuccess', 'deactivateProduct']], - }), - props({ - product: {} as BillingProductV2Type | BillingProductV2AddonType, + values: [billingLogic, ['billing', 'isUnlicensedDebug', 'scrollToProductKey']], + actions: [ + billingLogic, + [ + 'loadBillingSuccess', + 'updateBillingLimitsSuccess', + 'deactivateProduct', + 'setProductSpecificAlert', + 'setScrollToProductKey', + ], + ], }), actions({ setIsEditingBillingLimit: (isEditingBillingLimit: boolean) => ({ isEditingBillingLimit }), @@ -215,5 +228,40 @@ export const billingProductLogic = kea([ }) actions.setSurveyID('') }, + setScrollToProductKey: ({ scrollToProductKey }) => { + if (scrollToProductKey && scrollToProductKey === props.product.type) { + const { currentPlan } = values.currentAndUpgradePlans + + if (currentPlan.initial_billing_limit) { + actions.setProductSpecificAlert({ + status: 'warning', + title: 'Billing Limit Automatically Applied', + pathName: '/organization/billing', + dismissKey: `auto-apply-billing-limit-${props.product.type}`, + message: `To protect your costs and ours, we've automatically applied a $${currentPlan?.initial_billing_limit} billing limit for ${props.product.name}.`, + action: { + onClick: () => { + actions.setIsEditingBillingLimit(true) + setTimeout(() => { + if (props.billingLimitInputRef?.current) { + props.billingLimitInputRef?.current.focus() + props.billingLimitInputRef?.current.scrollIntoView({ + behavior: 'smooth', + block: 'nearest', + }) + } + }, 0) + }, + children: 'Update billing limit', + }, + }) + } + } + }, + })), + events(({ actions, values }) => ({ + afterMount: () => { + actions.setScrollToProductKey(values.scrollToProductKey) + }, })), ]) diff --git a/frontend/src/scenes/cohorts/Cohorts.scss b/frontend/src/scenes/cohorts/Cohorts.scss index 8dd6ee18eeee8..ba03ee05269f4 100644 --- a/frontend/src/scenes/cohorts/Cohorts.scss +++ b/frontend/src/scenes/cohorts/Cohorts.scss @@ -22,7 +22,7 @@ border-radius: 4px !important; &:hover { - border-color: var(--primary-light) !important; + border-color: var(--primary-3000-hover) !important; } .ant-upload-drag-container { diff --git a/frontend/src/scenes/cohorts/cohortEditLogic.ts b/frontend/src/scenes/cohorts/cohortEditLogic.ts index a2e28bd868112..58b7d6af4c111 100644 --- a/frontend/src/scenes/cohorts/cohortEditLogic.ts +++ b/frontend/src/scenes/cohorts/cohortEditLogic.ts @@ -1,6 +1,6 @@ import { actions, afterMount, beforeUnmount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import api from 'lib/api' -import { cohortsModel } from '~/models/cohortsModel' +import { cohortsModel, processCohort } from '~/models/cohortsModel' import { ENTITY_MATCH_TYPE, FEATURE_FLAGS } from 'lib/constants' import { AnyCohortCriteriaType, @@ -27,7 +27,6 @@ import { } from 'scenes/cohorts/cohortUtils' import { NEW_COHORT, NEW_CRITERIA, NEW_CRITERIA_GROUP } from 'scenes/cohorts/CohortFilters/constants' import type { cohortEditLogicType } from './cohortEditLogicType' -import { processCohort } from 'lib/utils' import { DataTableNode, Node, NodeKind } from '~/queries/schema' import { isDataTableNode } from '~/queries/utils' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' @@ -316,6 +315,11 @@ export const cohortEditLogic = kea([ cohortsModel.findMounted()?.actions.deleteCohort({ id: values.cohort.id, name: values.cohort.name }) router.actions.push(urls.cohorts()) }, + submitCohort: () => { + if (values.cohortHasErrors) { + lemonToast.error('There was an error submiting this cohort. Make sure the cohort filters are correct.') + } + }, checkIfFinishedCalculating: async ({ cohort }, breakpoint) => { if (cohort.is_calculating) { actions.setPollTimeout( diff --git a/frontend/src/scenes/cohorts/cohortSceneLogic.ts b/frontend/src/scenes/cohorts/cohortSceneLogic.ts index 52ea69b500b72..1af9d5cf7adfc 100644 --- a/frontend/src/scenes/cohorts/cohortSceneLogic.ts +++ b/frontend/src/scenes/cohorts/cohortSceneLogic.ts @@ -5,6 +5,7 @@ import { cohortsModel } from '~/models/cohortsModel' import { CohortLogicProps } from './cohortEditLogic' import type { cohortSceneLogicType } from './cohortSceneLogicType' +import { Scene } from 'scenes/sceneTypes' export const cohortSceneLogic = kea([ props({} as CohortLogicProps), @@ -13,15 +14,22 @@ export const cohortSceneLogic = kea([ selectors({ breadcrumbs: [ - () => [cohortsModel.selectors.cohortsById, (_, props) => props.id], + () => [cohortsModel.selectors.cohortsById, (_, props) => props.id as CohortLogicProps['id']], (cohortsById, cohortId): Breadcrumb[] => { return [ { + key: Scene.PersonsManagement, + name: 'People', + path: urls.persons(), + }, + { + key: 'cohorts', name: 'Cohorts', path: urls.cohorts(), }, { - name: cohortId !== 'new' ? cohortsById[cohortId]?.name || 'Untitled' : 'Untitled', + key: cohortId || 'loading', + name: cohortId && cohortId !== 'new' ? cohortsById[cohortId]?.name || 'Untitled' : 'Untitled', }, ] }, diff --git a/frontend/src/scenes/dashboard/DashboardHeader.tsx b/frontend/src/scenes/dashboard/DashboardHeader.tsx index 806b6a3ada25d..113479b97d1ee 100644 --- a/frontend/src/scenes/dashboard/DashboardHeader.tsx +++ b/frontend/src/scenes/dashboard/DashboardHeader.tsx @@ -364,14 +364,14 @@ export function DashboardHeader(): JSX.Element | null { onChange={(_, tags) => triggerDashboardUpdate({ tags })} saving={dashboardLoading} tagsAvailable={tags.filter((tag) => !dashboard.tags?.includes(tag))} - className="insight-metadata-tags" + className="mt-2" /> ) : dashboard.tags.length ? ( ) : null} diff --git a/frontend/src/scenes/dashboard/DashboardItems.scss b/frontend/src/scenes/dashboard/DashboardItems.scss index 7cf51ba05a33f..8bd848c3b2705 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.scss +++ b/frontend/src/scenes/dashboard/DashboardItems.scss @@ -41,8 +41,8 @@ transition: 100ms ease; max-width: 100%; position: relative; - border: 1px solid var(--primary); - outline: 1px solid var(--primary); + border: 1px solid var(--primary-3000); + outline: 1px solid var(--primary-3000); border-radius: var(--radius); z-index: 2; user-select: none; diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 42690d01d6141..0ceb6c33ee07a 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -53,6 +53,7 @@ import { getResponseBytes, sortDates } from '../insights/utils' import { loaders } from 'kea-loaders' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { calculateLayouts } from 'scenes/dashboard/tileLayouts' +import { Scene } from 'scenes/sceneTypes' export const BREAKPOINTS: Record = { sm: 1024, @@ -734,11 +735,22 @@ export const dashboardLogic = kea([ (s) => [s.dashboard], (dashboard): Breadcrumb[] => [ { + key: Scene.Dashboards, name: 'Dashboards', path: urls.dashboards(), }, { + key: dashboard?.id || 'new', name: dashboard?.id ? dashboard.name || 'Unnamed' : null, + onRename: async (name) => { + if (dashboard) { + await dashboardsModel.asyncActions.updateDashboard({ + id: dashboard.id, + name, + allowUndo: true, + }) + } + }, }, ], ], diff --git a/frontend/src/scenes/dashboard/dashboards/DashboardsTable.tsx b/frontend/src/scenes/dashboard/dashboards/DashboardsTable.tsx index 2f0fa2f4aa926..44143d77c9c22 100644 --- a/frontend/src/scenes/dashboard/dashboards/DashboardsTable.tsx +++ b/frontend/src/scenes/dashboard/dashboards/DashboardsTable.tsx @@ -13,7 +13,8 @@ import { DashboardPrivilegeLevel } from 'lib/constants' import { Link } from 'lib/lemon-ui/Link' import { urls } from 'scenes/urls' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { IconCottage, IconLock, IconPinOutline, IconPinFilled, IconShare } from 'lib/lemon-ui/icons' +import { IconCottage, IconLock } from 'lib/lemon-ui/icons' +import { IconPin, IconPinFilled, IconShare } from '@posthog/icons' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' import { createdAtColumn, createdByColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { More } from 'lib/lemon-ui/LemonButton/More' @@ -70,7 +71,7 @@ export function DashboardsTable({ : () => pinDashboard(id, DashboardEventSource.DashboardsList) } tooltip={pinned ? 'Unpin dashboard' : 'Pin dashboard'} - icon={pinned ? : } + icon={pinned ? : } /> ) }, @@ -215,28 +216,31 @@ export function DashboardsTable({ />
- setFilters({ pinned: !filters.pinned })} - icon={} - > - Pinned - -
-
- setFilters({ shared: !filters.shared })} - icon={} - > - Shared - + Filter to: +
+ setFilters({ pinned: !filters.pinned })} + icon={} + > + Pinned + +
+
+ setFilters({ shared: !filters.shared })} + icon={} + > + Shared + +
Created by: diff --git a/frontend/src/scenes/data-management/DataManagementScene.tsx b/frontend/src/scenes/data-management/DataManagementScene.tsx index 0513de6e11e5e..5a3bf879ea225 100644 --- a/frontend/src/scenes/data-management/DataManagementScene.tsx +++ b/frontend/src/scenes/data-management/DataManagementScene.tsx @@ -9,7 +9,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs' import React from 'react' -import { SceneExport } from 'scenes/sceneTypes' +import { Scene, SceneExport } from 'scenes/sceneTypes' import { PageHeader } from 'lib/components/PageHeader' import { NewActionButton } from 'scenes/actions/NewActionButton' import { Annotations } from 'scenes/annotations' @@ -96,7 +96,7 @@ const tabs: Record< }, [DataManagementTab.IngestionWarnings]: { url: urls.ingestionWarnings(), - label: 'Ingestion Warnings', + label: 'Ingestion warnings', content: , }, [DataManagementTab.Database]: { @@ -135,10 +135,12 @@ const dataManagementSceneLogic = kea([ (tab): Breadcrumb[] => { return [ { + key: Scene.DataManagement, name: `Data Management`, path: tabs.events.url, }, { + key: tab, name: capitalizeFirstLetter(tab), path: tabs[tab].url, }, diff --git a/frontend/src/scenes/data-management/actions/ActionsTable.tsx b/frontend/src/scenes/data-management/actions/ActionsTable.tsx index d7255f347ba9f..3ab88c29bb0be 100644 --- a/frontend/src/scenes/data-management/actions/ActionsTable.tsx +++ b/frontend/src/scenes/data-management/actions/ActionsTable.tsx @@ -1,5 +1,6 @@ import { Link } from 'lib/lemon-ui/Link' -import { deleteWithUndo, stripHTTP } from 'lib/utils' +import { stripHTTP } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { useActions, useValues } from 'kea' import { actionsModel } from '~/models/actionsModel' import { NewActionButton } from '../../actions/NewActionButton' diff --git a/frontend/src/scenes/data-management/definition/definitionLogic.ts b/frontend/src/scenes/data-management/definition/definitionLogic.ts index 80fae17926149..a861464bb8746 100644 --- a/frontend/src/scenes/data-management/definition/definitionLogic.ts +++ b/frontend/src/scenes/data-management/definition/definitionLogic.ts @@ -10,6 +10,8 @@ import { getPropertyLabel } from 'lib/taxonomy' import { userLogic } from 'scenes/userLogic' import { eventDefinitionsTableLogic } from '../events/eventDefinitionsTableLogic' import { propertyDefinitionsTableLogic } from '../properties/propertyDefinitionsTableLogic' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from '../DataManagementScene' export enum DefinitionPageMode { View = 'view', @@ -119,14 +121,17 @@ export const definitionLogic = kea([ (definition, isEvent): Breadcrumb[] => { return [ { + key: Scene.DataManagement, name: `Data Management`, path: isEvent ? urls.eventDefinitions() : urls.propertyDefinitions(), }, { + key: isEvent ? DataManagementTab.EventDefinitions : DataManagementTab.PropertyDefinitions, name: isEvent ? 'Events' : 'Properties', path: isEvent ? urls.eventDefinitions() : urls.propertyDefinitions(), }, { + key: definition?.id || 'new', name: definition?.id !== 'new' ? getPropertyLabel(definition?.name) || 'Untitled' : 'Untitled', }, ] diff --git a/frontend/src/scenes/data-management/events/DefinitionHeader.tsx b/frontend/src/scenes/data-management/events/DefinitionHeader.tsx index cc34e2f364f9e..8edb6f208faab 100644 --- a/frontend/src/scenes/data-management/events/DefinitionHeader.tsx +++ b/frontend/src/scenes/data-management/events/DefinitionHeader.tsx @@ -1,19 +1,10 @@ import { EventDefinition, PropertyDefinition } from '~/types' -import { - IconAutocapture, - IconPageleave, - IconPreview, - PropertyIcon, - IconUnverifiedEvent, - IconVerifiedEvent, - VerifiedPropertyIcon, - IconSelectAll, -} from 'lib/lemon-ui/icons' +import { IconSelectAll } from 'lib/lemon-ui/icons' +import { IconBadge, IconBolt, IconCursor, IconEye, IconLeave, IconList, IconLogomark } from '@posthog/icons' import { getKeyMapping, KEY_MAPPING } from 'lib/taxonomy' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { TaxonomicFilterGroup, TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import clsx from 'clsx' import { Link } from 'lib/lemon-ui/Link' import { urls } from 'scenes/urls' import { @@ -25,20 +16,20 @@ export function getPropertyDefinitionIcon(definition: PropertyDefinition): JSX.E if (KEY_MAPPING.event[definition.name]) { return ( - + ) } if (definition.verified) { return ( - + ) } return ( - + ) } @@ -47,29 +38,32 @@ export function getEventDefinitionIcon(definition: EventDefinition & { value: st // Rest are events if (definition.name === '$pageview' || definition.name === '$screen') { return ( - - + + ) } if (definition.name === '$pageleave') { return ( - + ) } if (definition.name === '$autocapture') { + return + } + if (definition.name && definition.verified) { return ( - - + + ) } - if (definition.name && (definition.verified || !!KEY_MAPPING.event[definition.name])) { + if (definition.name && !!KEY_MAPPING.event[definition.name]) { return ( - - + + ) } @@ -81,8 +75,8 @@ export function getEventDefinitionIcon(definition: EventDefinition & { value: st ) } return ( - - + + ) } @@ -108,7 +102,7 @@ function RawDefinitionHeader({ const isLink = asLink && fullDetailUrl const innerContent = ( - + ) @@ -127,7 +121,27 @@ function RawDefinitionHeader({ {!hideIcon && icon &&
{icon}
} {!hideText && (
-
{linkedInnerContent}
+
+ {linkedInnerContent} + {definition.verified && ( + <> + + + + + )} + {!!KEY_MAPPING.event[definition.name] && ( + + + + )} +
{description ?
{description}
: null}
)} diff --git a/frontend/src/scenes/data-management/events/EventDefinitionProperties.tsx b/frontend/src/scenes/data-management/events/EventDefinitionProperties.tsx index 70a5b6a88a8e8..4d1134eb60133 100644 --- a/frontend/src/scenes/data-management/events/EventDefinitionProperties.tsx +++ b/frontend/src/scenes/data-management/events/EventDefinitionProperties.tsx @@ -1,14 +1,12 @@ import { useActions, useValues } from 'kea' import { useEffect } from 'react' -import { - eventDefinitionsTableLogic, - PROPERTY_DEFINITIONS_PER_EVENT, -} from 'scenes/data-management/events/eventDefinitionsTableLogic' +import { eventDefinitionsTableLogic } from 'scenes/data-management/events/eventDefinitionsTableLogic' import { EventDefinition, PropertyDefinition } from '~/types' import { LemonTable, LemonTableColumn, LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' import { organizationLogic } from 'scenes/organizationLogic' import { PropertyDefinitionHeader } from 'scenes/data-management/events/DefinitionHeader' +import { PROPERTY_DEFINITIONS_PER_EVENT } from 'lib/constants' export function EventDefinitionProperties({ definition }: { definition: EventDefinition }): JSX.Element { const { loadPropertiesForEvent } = useActions(eventDefinitionsTableLogic) diff --git a/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss b/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss index 6f510e589a9c7..9cc22ad4acc2a 100644 --- a/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss +++ b/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss @@ -1,6 +1,7 @@ .events-definition-table { .LemonTable__content > table > tbody { td.definition-column-icon { + padding-right: 0.5rem; width: 36px; .definition-column-name-icon { @@ -8,7 +9,7 @@ align-items: center; justify-content: center; width: 30px; - font-size: 1.5rem; + font-size: 1.2rem; svg.taxonomy-icon { flex-shrink: 0; @@ -37,16 +38,15 @@ justify-content: center; .definition-column-name-content-title { + align-items: center; + display: flex; font-weight: 600; - cursor: pointer; - position: relative; + gap: 0.25rem; overflow: visible; + position: relative; - &::before { - content: ''; - position: absolute; - inset: -5px -50px -5px -10px; - height: 22px; + svg { + color: var(--success); } } } diff --git a/frontend/src/scenes/data-management/events/EventDefinitionsTable.tsx b/frontend/src/scenes/data-management/events/EventDefinitionsTable.tsx index 7571d91bf5731..54a3bd16b3086 100644 --- a/frontend/src/scenes/data-management/events/EventDefinitionsTable.tsx +++ b/frontend/src/scenes/data-management/events/EventDefinitionsTable.tsx @@ -2,10 +2,7 @@ import './EventDefinitionsTable.scss' import { useActions, useValues } from 'kea' import { LemonTable, LemonTableColumn, LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { EventDefinition, EventDefinitionType } from '~/types' -import { - EVENT_DEFINITIONS_PER_PAGE, - eventDefinitionsTableLogic, -} from 'scenes/data-management/events/eventDefinitionsTableLogic' +import { eventDefinitionsTableLogic } from 'scenes/data-management/events/eventDefinitionsTableLogic' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' import { organizationLogic } from 'scenes/organizationLogic' import { EventDefinitionHeader } from 'scenes/data-management/events/DefinitionHeader' @@ -17,6 +14,7 @@ import { combineUrl } from 'kea-router' import { IconPlayCircle } from 'lib/lemon-ui/icons' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { TZLabel } from 'lib/components/TZLabel' +import { EVENT_DEFINITIONS_PER_PAGE } from 'lib/constants' const eventTypeOptions: LemonSelectOptions = [ { value: EventDefinitionType.Event, label: 'All events', 'data-attr': 'event-type-option-event' }, diff --git a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts index 991d4c0b7d5ff..d4b4e66cfe05b 100644 --- a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts +++ b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts @@ -1,9 +1,5 @@ import { initKeaTests } from '~/test/init' -import { - EVENT_DEFINITIONS_PER_PAGE, - eventDefinitionsTableLogic, - PROPERTY_DEFINITIONS_PER_EVENT, -} from 'scenes/data-management/events/eventDefinitionsTableLogic' +import { eventDefinitionsTableLogic } from 'scenes/data-management/events/eventDefinitionsTableLogic' import { api, MOCK_TEAM_ID } from 'lib/api.mock' import { expectLogic, partial } from 'kea-test-utils' import { mockEvent, mockEventDefinitions, mockEventPropertyDefinitions } from '~/test/mocks' @@ -13,6 +9,7 @@ import { combineUrl, router } from 'kea-router' import { keyMappingKeys } from 'lib/taxonomy' import { urls } from 'scenes/urls' import { EventDefinitionType } from '~/types' +import { EVENT_DEFINITIONS_PER_PAGE, PROPERTY_DEFINITIONS_PER_EVENT } from 'lib/constants' describe('eventDefinitionsTableLogic', () => { let logic: ReturnType diff --git a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts index af7c766e1889f..ea35ce72e4dfe 100644 --- a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts +++ b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts @@ -4,9 +4,11 @@ import type { eventDefinitionsTableLogicType } from './eventDefinitionsTableLogi import api, { PaginatedResponse } from 'lib/api' import { keyMappingKeys } from 'lib/taxonomy' import { actionToUrl, combineUrl, router, urlToAction } from 'kea-router' -import { convertPropertyGroupToProperties, objectsEqual } from 'lib/utils' +import { objectsEqual } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { loaders } from 'kea-loaders' +import { EVENT_DEFINITIONS_PER_PAGE, PROPERTY_DEFINITIONS_PER_EVENT } from 'lib/constants' +import { convertPropertyGroupToProperties } from 'lib/components/PropertyFilters/utils' export interface EventDefinitionsPaginatedResponse extends PaginatedResponse { current?: string @@ -37,9 +39,6 @@ function cleanFilters(filter: Partial): Filters { } } -export const EVENT_DEFINITIONS_PER_PAGE = 50 -export const PROPERTY_DEFINITIONS_PER_EVENT = 5 - export function createDefinitionKey(event?: EventDefinition, property?: PropertyDefinition): string { return `${event?.id ?? 'event'}-${property?.id ?? 'property'}` } diff --git a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts index c3509561e1a42..c5ff0132e7a13 100644 --- a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts +++ b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts @@ -8,6 +8,8 @@ import type { ingestionWarningsLogicType } from './ingestionWarningsLogicType' import { teamLogic } from '../../teamLogic' import { range } from 'lib/utils' import { dayjs, dayjsUtcToTimezone } from 'lib/dayjs' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from '../DataManagementScene' export interface IngestionWarningSummary { type: string @@ -47,11 +49,13 @@ export const ingestionWarningsLogic = kea([ (): Breadcrumb[] => { return [ { - name: `Data Management`, + key: Scene.DataManagement, + name: `Data management`, path: urls.eventDefinitions(), }, { - name: 'Ingestion Warnings', + key: DataManagementTab.IngestionWarnings, + name: 'Ingestion warnings', path: urls.ingestionWarnings(), }, ] diff --git a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss index 50b3d7bfb9394..68dfef351b3c0 100644 --- a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss +++ b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss @@ -1,6 +1,7 @@ .event-properties-definition-table { .LemonTable__content > table > tbody { td.definition-column-icon { + padding-right: 0.5rem; width: 36px; .definition-column-name-icon { @@ -8,6 +9,7 @@ align-items: center; justify-content: center; width: 30px; + font-size: 1.2rem; svg.taxonomy-icon { flex-shrink: 0; @@ -32,16 +34,14 @@ justify-content: center; .definition-column-name-content-title { - font-weight: 600; + align-items: center; cursor: pointer; + display: flex; + font-weight: 600; + gap: 0.25rem; - &::before { - content: ''; - position: absolute; - top: -5px; - left: -10px; - right: -50px; - height: 22px; + svg { + color: var(--success); } } } diff --git a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.tsx b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.tsx index 19a9d8429a0db..aa9de92bec92d 100644 --- a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.tsx +++ b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.tsx @@ -5,13 +5,11 @@ import { PropertyDefinition } from '~/types' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' import { organizationLogic } from 'scenes/organizationLogic' import { PropertyDefinitionHeader } from 'scenes/data-management/events/DefinitionHeader' -import { - EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - propertyDefinitionsTableLogic, -} from 'scenes/data-management/properties/propertyDefinitionsTableLogic' +import { propertyDefinitionsTableLogic } from 'scenes/data-management/properties/propertyDefinitionsTableLogic' import { LemonInput, LemonSelect, LemonTag, Link } from '@posthog/lemon-ui' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { urls } from 'scenes/urls' +import { EVENT_PROPERTY_DEFINITIONS_PER_PAGE } from 'lib/constants' export function PropertyDefinitionsTable(): JSX.Element { const { propertyDefinitions, propertyDefinitionsLoading, filters, propertyTypeOptions } = diff --git a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.test.ts b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.test.ts index 97f6abf271d0b..8d9258bb18820 100644 --- a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.test.ts +++ b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.test.ts @@ -5,11 +5,9 @@ import { mockEventPropertyDefinitions } from '~/test/mocks' import { useMocks } from '~/mocks/jest' import { organizationLogic } from 'scenes/organizationLogic' import { combineUrl, router } from 'kea-router' -import { - EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - propertyDefinitionsTableLogic, -} from 'scenes/data-management/properties/propertyDefinitionsTableLogic' +import { propertyDefinitionsTableLogic } from 'scenes/data-management/properties/propertyDefinitionsTableLogic' import { urls } from 'scenes/urls' +import { EVENT_PROPERTY_DEFINITIONS_PER_PAGE } from 'lib/constants' describe('propertyDefinitionsTableLogic', () => { let logic: ReturnType diff --git a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts index 3aa493789acb8..ea8b7a199975f 100644 --- a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts +++ b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts @@ -14,6 +14,7 @@ import { urls } from 'scenes/urls' import type { propertyDefinitionsTableLogicType } from './propertyDefinitionsTableLogicType' import { groupsModel } from '../../../models/groupsModel' import { LemonSelectOption } from 'lib/lemon-ui/LemonSelect' +import { EVENT_PROPERTY_DEFINITIONS_PER_PAGE } from 'lib/constants' export interface Filters { property: string @@ -38,8 +39,6 @@ function removeDefaults(filter: Filters): Partial { } } -export const EVENT_PROPERTY_DEFINITIONS_PER_PAGE = 50 - export interface PropertyDefinitionsTableLogicProps { key: string } diff --git a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx index 168b307e29935..11d16e85e8d3f 100644 --- a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx +++ b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx @@ -4,7 +4,7 @@ import { dataWarehouseSceneLogic } from './dataWarehouseSceneLogic' import { DatabaseTable } from 'scenes/data-management/database/DatabaseTable' import { More } from 'lib/lemon-ui/LemonButton/More' import { LemonButton } from '@posthog/lemon-ui' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { DataWarehouseSceneRow } from '../types' diff --git a/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx b/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx index 14b5710ae431f..7ca0685b35192 100644 --- a/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx @@ -10,6 +10,7 @@ import { DataTableNode } from '~/queries/schema' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import type { dataWarehouseTableLogicType } from './dataWarehouseTableLogicType' import { dataWarehouseSceneLogic } from '../external/dataWarehouseSceneLogic' +import { Scene } from 'scenes/sceneTypes' export interface TableLogicProps { /** A UUID or 'new'. */ @@ -100,10 +101,12 @@ export const dataWarehouseTableLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: `Data Warehouse`, + key: Scene.DataWarehouse, + name: `Data warehouse`, path: urls.dataWarehouseExternal(), }, { + key: 'new', name: 'New', }, ], diff --git a/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx b/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx index c45ec66c3431b..25f506a4df903 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx @@ -3,7 +3,7 @@ import { DatabaseTables } from 'scenes/data-management/database/DatabaseTables' import { DatabaseTable } from 'scenes/data-management/database/DatabaseTable' import { More } from 'lib/lemon-ui/LemonButton/More' import { LemonButton, Link } from '@posthog/lemon-ui' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { teamLogic } from 'scenes/teamLogic' import { DataWarehouseSceneRow } from '../types' import { dataWarehouseSavedQueriesLogic } from './dataWarehouseSavedQueriesLogic' diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts index a7c40b36401b3..2941effa9d151 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts @@ -5,6 +5,7 @@ import { loaders } from 'kea-loaders' import api, { PaginatedResponse } from 'lib/api' import { ExternalDataStripeSource, Breadcrumb } from '~/types' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export interface DataWarehouseSource {} @@ -49,10 +50,12 @@ export const dataWarehouseSettingsLogic = kea([ () => [], (): Breadcrumb[] => [ { + key: Scene.DataWarehouse, name: `Data Warehouse`, path: urls.dataWarehouseExternal(), }, { + key: Scene.DataWarehouseSettings, name: 'Data Warehouse Settings', path: urls.dataWarehouseSettings(), }, diff --git a/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts b/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts index 448c1f4440e67..0ea2885e60c3f 100644 --- a/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts +++ b/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts @@ -15,6 +15,7 @@ import type { earlyAccessFeatureLogicType } from './earlyAccessFeatureLogicType' import { earlyAccessFeaturesLogic } from './earlyAccessFeaturesLogic' import { teamLogic } from 'scenes/teamLogic' import { lemonToast } from '@posthog/lemon-ui' +import { Scene } from 'scenes/sceneTypes' export const NEW_EARLY_ACCESS_FEATURE: NewEarlyAccessFeatureType = { name: '', @@ -121,10 +122,14 @@ export const earlyAccessFeatureLogic = kea([ (s) => [s.earlyAccessFeature], (earlyAccessFeature: EarlyAccessFeatureType): Breadcrumb[] => [ { + key: Scene.EarlyAccessFeatures, name: 'Early Access Management', path: urls.earlyAccessFeatures(), }, - ...(earlyAccessFeature?.name ? [{ name: earlyAccessFeature.name }] : []), + { + key: earlyAccessFeature.id || 'new', + name: earlyAccessFeature.name, + }, ], ], }), diff --git a/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts b/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts index a8f0ec464aa8e..b7032217494b4 100644 --- a/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts +++ b/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts @@ -5,6 +5,7 @@ import { Breadcrumb, EarlyAccessFeatureType } from '~/types' import type { earlyAccessFeaturesLogicType } from './earlyAccessFeaturesLogicType' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export const earlyAccessFeaturesLogic = kea([ path(['scenes', 'features', 'featuresLogic']), @@ -22,7 +23,8 @@ export const earlyAccessFeaturesLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: 'Early Access Management', + key: Scene.EarlyAccessFeatures, + name: 'Early access features', path: urls.earlyAccessFeatures(), }, ], diff --git a/frontend/src/scenes/events/Events.tsx b/frontend/src/scenes/events/Events.tsx index 164363a7b35cc..aa30ee553c381 100644 --- a/frontend/src/scenes/events/Events.tsx +++ b/frontend/src/scenes/events/Events.tsx @@ -14,7 +14,7 @@ export const scene: SceneExport = { export function Events(): JSX.Element { return ( <> - +
diff --git a/frontend/src/scenes/experiments/Experiment.tsx b/frontend/src/scenes/experiments/Experiment.tsx index c3332cf4cea12..30cc2e011e9c6 100644 --- a/frontend/src/scenes/experiments/Experiment.tsx +++ b/frontend/src/scenes/experiments/Experiment.tsx @@ -244,13 +244,13 @@ export function Experiment(): JSX.Element { numbers, hyphens, and underscores.
- {experiment.parameters.feature_flag_variants?.map((variant, index) => ( + {experiment.parameters.feature_flag_variants?.map((_, index) => (
- - {experiment.feature_flag?.key} - + {experiment.feature_flag && ( + + {experiment.feature_flag.key} + + )} diff --git a/frontend/src/scenes/experiments/Experiments.tsx b/frontend/src/scenes/experiments/Experiments.tsx index 5595ba60f5bca..ee702a4b77451 100644 --- a/frontend/src/scenes/experiments/Experiments.tsx +++ b/frontend/src/scenes/experiments/Experiments.tsx @@ -41,7 +41,7 @@ export function Experiments(): JSX.Element { const { hasAvailableFeature } = useValues(userLogic) const EXPERIMENTS_PRODUCT_DESCRIPTION = - 'Experiments help you test changes to your product to see which changes will lead to optimal results. Automatic statistical calculations let you see if the results are valid or if they are likely just a chance occurrence.' + 'A/B testing help you test changes to your product to see which changes will lead to optimal results. Automatic statistical calculations let you see if the results are valid or if they are likely just a chance occurrence.' const getExperimentDuration = (experiment: Experiment): number | undefined => { return experiment.end_date @@ -144,7 +144,7 @@ export function Experiments(): JSX.Element { return (
Experiments
} + title={
A/B testing
} buttons={ hasAvailableFeature(AvailableFeature.EXPERIMENTATION) ? ( @@ -154,14 +154,13 @@ export function Experiments(): JSX.Element { } caption={ <> - Check out our {' '} - Experimentation user guide + Visit the guide {' '} to learn more. @@ -182,7 +181,7 @@ export function Experiments(): JSX.Element { {(shouldShowEmptyState || shouldShowProductIntroduction) && (tab === ExperimentsTabs.Archived ? ( ) : ( diff --git a/frontend/src/scenes/experiments/SecondaryMetrics.tsx b/frontend/src/scenes/experiments/SecondaryMetrics.tsx index 75d22a10d19ec..9c22022c5efa4 100644 --- a/frontend/src/scenes/experiments/SecondaryMetrics.tsx +++ b/frontend/src/scenes/experiments/SecondaryMetrics.tsx @@ -1,4 +1,3 @@ -import { Col, Row } from 'antd' import { useActions, useValues } from 'kea' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { Form } from 'kea-forms' @@ -163,11 +162,11 @@ export function SecondaryMetrics({ {experimentId == 'new' || editingExistingExperiment ? ( - - +
+
{metrics.map((metric, idx) => ( - - +
+
{metric.name}
@@ -185,7 +184,7 @@ export function SecondaryMetrics({ onClick={() => deleteMetric(idx)} />
- +
{metric.filters.insight === InsightType.FUNNELS && ( )} -
+
))} {metrics && !(metrics.length > 2) && ( - -
- - Add metric - -
- +
+ + Add metric + +
)} - - +
+
) : ( <>
Secondary metrics
diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 6f28ea14006b9..7b314e42f979a 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -41,6 +41,7 @@ import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' import { InsightVizNode } from '~/queries/schema' import { groupsModel } from '~/models/groupsModel' +import { Scene } from 'scenes/sceneTypes' export const DEFAULT_DURATION = 14 // days @@ -634,10 +635,12 @@ export const experimentLogic = kea([ (s) => [s.experiment, s.experimentId], (experiment, experimentId): Breadcrumb[] => [ { + key: Scene.Experiments, name: 'Experiments', path: urls.experiments(), }, { + key: experimentId, name: experiment?.name || 'New', path: urls.experiment(experimentId || 'new'), }, diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.scss b/frontend/src/scenes/feature-flags/FeatureFlag.scss index 730181e4ad083..319512c5f7670 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.scss +++ b/frontend/src/scenes/feature-flags/FeatureFlag.scss @@ -1,4 +1,4 @@ -.variant-form-list { +.VariantFormList { font-size: 13px; border: 1px solid var(--border); border-radius: var(--radius); @@ -19,6 +19,10 @@ align-items: center; } } + + .VariantFormList__row { + grid-template-columns: repeat(24, minmax(0, 1fr)); + } } .feature-flag-property-display { @@ -60,7 +64,11 @@ } .FeatureConditionCard { + .posthog-3000 & { + background: var(--bg-light); + } + .FeatureConditionCard--border--highlight { - border-color: var(--primary); + border-color: var(--primary-3000); } } diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 1118c87a2b31d..849913b1303c1 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react' import { Form, Group } from 'kea-forms' -import { Row, Col, Radio, Popconfirm, Skeleton, Card } from 'antd' +import { Radio, Popconfirm, Skeleton, Card } from 'antd' import { useActions, useValues } from 'kea' import { alphabet, capitalizeFirstLetter } from 'lib/utils' import { featureFlagLogic } from './featureFlagLogic' @@ -56,7 +56,6 @@ import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' import { EmptyDashboardComponent } from 'scenes/dashboard/EmptyDashboardComponent' import { FeatureFlagCodeExample } from './FeatureFlagCodeExample' import { billingLogic } from 'scenes/billing/billingLogic' -import { organizationLogic } from '../organizationLogic' import { AnalysisTab } from './FeatureFlagAnalysisTab' import { NodeKind } from '~/queries/schema' import { Query } from '~/queries/Query/Query' @@ -78,7 +77,7 @@ export const scene: SceneExport = { function focusVariantKeyField(index: number): void { setTimeout( - () => document.querySelector(`.variant-form-list input[data-key-index="${index}"]`)?.focus(), + () => document.querySelector(`.VariantFormList input[data-key-index="${index}"]`)?.focus(), 50 ) } @@ -97,8 +96,6 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { featureFlagPermissionsLogic({ flagId: featureFlag.id }) ) - const { currentOrganization } = useValues(organizationLogic) - const { tags } = useValues(tagsModel) const { hasAvailableFeature } = useValues(userLogic) @@ -155,8 +152,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { }) } - const hasMultipleProjects = (currentOrganization?.teams?.length ?? 0) > 1 - if (featureFlags[FEATURE_FLAGS.MULTI_PROJECT_FEATURE_FLAGS] && hasMultipleProjects) { + if (featureFlags[FEATURE_FLAGS.MULTI_PROJECT_FEATURE_FLAGS]) { tabs.push({ label: 'Projects', key: FeatureFlagsTab.PROJECTS, @@ -332,7 +328,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { tagsAvailable={tags.filter( (tag) => !featureFlag.tags?.includes(tag) )} - className="insight-metadata-tags" + className="mt-2" /> ) }} @@ -510,14 +506,14 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { tagsAvailable={tags.filter( (tag) => !featureFlag.tags?.includes(tag) )} - className="insight-metadata-tags" + className="mt-2" /> ) : featureFlag.tags.length ? ( ) : null} @@ -732,17 +728,17 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { <>

Variant keys

- - Key - Description - Payload - Rollout - +
+
Key
+
Description
+
Payload
+
Rollout
+
{variants.map((variant, index) => (
- - +
+
{variant.key} - - +
+
{variant.name || 'There is no description for this variant key'} - - +
+
{featureFlag.filters.payloads?.[index] ? ( )} - - {variant.rollout_percentage}% - +
+
{variant.rollout_percentage}%
+
{index !== variants.length - 1 && }
))} @@ -874,24 +870,22 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { No payload associated with this flag ) ) : ( - - -
- Specify a payload to be returned when the served value is{' '} - - true - -
- - - - - - -
+
+
+ Specify a payload to be returned when the served value is{' '} + + true + +
+ + + + + +
)}
)} @@ -899,33 +893,33 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {

Variant keys

The rollout percentage of feature flag variants must add up to 100% -
- - - Variant key - Description - -
+
+
+
+
Variant key
+
Description
+
+
Payload - + Specify return payload when the variant key matches
- - - Rollout +
+
+ Rollout (Redistribute) - - +
+
{variants.map((variant, index) => ( - - +
+
- - +
+
- - +
+
- - +
+
{({ value, onChange }) => { return ( @@ -966,8 +960,8 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { ) }} - - +
+
{({ value, onChange }) => (
@@ -1012,28 +1006,25 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {
)}
- - - - {variants.length > 1 && ( - } - status="primary-alt" - data-attr={`delete-prop-filter-${index}`} - noPadding - onClick={() => removeVariant(index)} - disabledReason={ - featureFlag.experiment_set && - featureFlag.experiment_set?.length > 0 - ? 'Cannot delete variants from a feature flag that is part of an experiment' - : undefined - } - tooltipPlacement="topRight" - /> - )} - - - +
+
+ {variants.length > 1 && ( + } + status="primary-alt" + data-attr={`delete-prop-filter-${index}`} + noPadding + onClick={() => removeVariant(index)} + disabledReason={ + featureFlag.experiment_set && featureFlag.experiment_set?.length > 0 + ? 'Cannot delete variants from a feature flag that is part of an experiment' + : undefined + } + tooltipPlacement="topRight" + /> + )} +
+
))} {variants.length > 0 && !areVariantRolloutsValid && ( diff --git a/frontend/src/scenes/feature-flags/FeatureFlagProjects.tsx b/frontend/src/scenes/feature-flags/FeatureFlagProjects.tsx index d22a2fb63ec09..ab282a3ed1d54 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagProjects.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagProjects.tsx @@ -1,4 +1,5 @@ import { OrganizationFeatureFlag } from '~/types' +import { OrganizationMembershipLevel } from 'lib/constants' import { createdAtColumn, createdByColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { LemonTable, LemonTableColumn, LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { LemonButton, LemonSelect, LemonTag, Link, LemonBanner } from '@posthog/lemon-ui' @@ -77,71 +78,106 @@ const getColumns = (): LemonTableColumns => { ] } -export default function FeatureFlagProjects(): JSX.Element { +function InfoBanner(): JSX.Element { + const { currentOrganization } = useValues(organizationLogic) + const { featureFlag } = useValues(featureFlagLogic) + const hasMultipleProjects = (currentOrganization?.teams?.length ?? 0) > 1 + + const isMember = + !currentOrganization?.membership_level || + currentOrganization.membership_level < OrganizationMembershipLevel.Admin + + let text + + if (isMember && !hasMultipleProjects) { + text = `You currently have access to only one project. If your organization manages multiple projects and you wish to copy this feature flag across them, request project access from your administrator.` + } else if (!hasMultipleProjects) { + text = `This feature enables the copying of a feature flag across different projects. Once additional projects are added within your organization, you'll be able to replicate this flag to them.` + } else if (!featureFlag.can_edit) { + text = `You don't have the necessary permissions to copy this flag to another project. Contact your administrator to request editing rights.` + } else { + return <> + } + + return ( + + {text} + + ) +} + +function FeatureFlagCopySection(): JSX.Element { const { featureFlag, copyDestinationProject, projectsWithCurrentFlag, featureFlagCopyLoading } = useValues(featureFlagLogic) - const { setCopyDestinationProject, loadProjectsWithCurrentFlag, copyFlag } = useActions(featureFlagLogic) + const { setCopyDestinationProject, copyFlag } = useActions(featureFlagLogic) const { currentOrganization } = useValues(organizationLogic) const { currentTeam } = useValues(teamLogic) + const hasMultipleProjects = (currentOrganization?.teams?.length ?? 0) > 1 + + return hasMultipleProjects && featureFlag.can_edit ? ( + <> +

Feature flag copy

+
Copy your flag and its configuration to another project.
+
+
+
Key
+
+ {featureFlag.key} +
+
+
+
+ +
+
+
Destination project
+ setCopyDestinationProject(id)} + options={ + currentOrganization?.teams + ?.map((team) => ({ value: team.id, label: team.name })) + .filter((option) => option.value !== currentTeam?.id) || [] + } + className="min-w-40" + /> +
+
+
+ } + onClick={() => copyFlag()} + className="w-28 max-w-28" + > + {projectsWithCurrentFlag.find((p) => Number(p.team_id) === copyDestinationProject) + ? 'Update' + : 'Copy'} + +
+
+ + ) : ( + <> + ) +} + +export default function FeatureFlagProjects(): JSX.Element { + const { projectsWithCurrentFlag } = useValues(featureFlagLogic) + const { loadProjectsWithCurrentFlag } = useActions(featureFlagLogic) + useEffect(() => { loadProjectsWithCurrentFlag() }, []) return (
- {featureFlag.can_edit ? ( - <> -

Feature flag copy

-
Copy your flag and its configuration to another project.
-
-
-
Key
-
- {featureFlag.key} -
-
-
-
- -
-
-
Destination project
- setCopyDestinationProject(id)} - options={ - currentOrganization?.teams - ?.map((team) => ({ value: team.id, label: team.name })) - .filter((option) => option.value !== currentTeam?.id) || [] - } - className="min-w-40" - /> -
-
-
- } - onClick={() => copyFlag()} - className="w-28 max-w-28" - > - {projectsWithCurrentFlag.find((p) => Number(p.team_id) === copyDestinationProject) - ? 'Update' - : 'Copy'} - -
-
- - ) : ( - - You currently cannot copy this flag to another project. Contact your administrator to request - editing rights. - - )} + + { return ( - +
{index > 0 &&
OR
}
@@ -314,7 +314,7 @@ export function FeatureFlagReleaseConditions({ )}
- +
) } @@ -327,7 +327,7 @@ export function FeatureFlagReleaseConditions({ const hasMatchingEarlyAccessFeature = featureFlag.features?.find((f: any) => f.flagKey === featureFlag.key) return ( - +
{index > 0 &&
OR
}
@@ -386,7 +386,7 @@ export function FeatureFlagReleaseConditions({
- +
) } @@ -456,11 +456,11 @@ export function FeatureFlagReleaseConditions({
)}
- +
{filterGroups.map((group, index) => isSuper ? renderSuperReleaseConditionGroup(group, index) : renderReleaseConditionGroup(group, index) )} - +
{!readOnly && ( }> Add condition set diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.tsx index 55ed1d2af5636..52e38f6a1c171 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.tsx @@ -1,7 +1,8 @@ import { useActions, useValues } from 'kea' import { featureFlagsLogic, FeatureFlagsTab } from './featureFlagsLogic' import { Link } from 'lib/lemon-ui/Link' -import { copyToClipboard, deleteWithUndo } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { PageHeader } from 'lib/components/PageHeader' import { AnyPropertyFilter, AvailableFeature, FeatureFlagFilters, FeatureFlagType, ProductKey } from '~/types' import { normalizeColumnTitle } from 'lib/components/Table/utils' @@ -257,6 +258,7 @@ export function OverViewTab({
({ operator: 'gt', @@ -836,10 +839,11 @@ export const featureFlagLogic = kea([ (s) => [s.featureFlag], (featureFlag): Breadcrumb[] => [ { + key: Scene.FeatureFlags, name: 'Feature Flags', path: urls.featureFlags(), }, - ...(featureFlag ? [{ name: featureFlag.key || 'Unnamed' }] : []), + { key: featureFlag.id || 'unknown', name: featureFlag.key || 'Unnamed' }, ], ], propertySelectErrors: [ diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index e88e418635a2c..da81fca11890e 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -8,6 +8,7 @@ import { teamLogic } from '../teamLogic' import { urls } from 'scenes/urls' import { router, actionToUrl, urlToAction } from 'kea-router' import { LemonSelectOption } from 'lib/lemon-ui/LemonSelect' +import { Scene } from 'scenes/sceneTypes' export enum FeatureFlagsTab { OVERVIEW = 'overview', @@ -154,7 +155,8 @@ export const featureFlagsLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: 'Feature Flags', + key: Scene.FeatureFlags, + name: 'Feature flags', path: urls.featureFlags(), }, ], diff --git a/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss b/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss index 7f2e0024ca851..ad4a376864a61 100644 --- a/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss +++ b/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss @@ -120,7 +120,7 @@ $glyph_height: 23px; // Based on .funnel-step-glyph .funnel-bar { position: relative; height: 100%; - background: var(--funnel-default); + background: var(--primary-3000); transition: width 0.2s ease, height 0.2s ease; &.first { @@ -148,7 +148,7 @@ $glyph_height: 23px; // Based on .funnel-step-glyph &.outside { left: calc(100% + #{$label_position_offset}); - color: var(--funnel-default); + color: var(--primary-3000); } } } diff --git a/frontend/src/scenes/groups/groupLogic.ts b/frontend/src/scenes/groups/groupLogic.ts index dce2cfdc6e04b..bbdc071690c73 100644 --- a/frontend/src/scenes/groups/groupLogic.ts +++ b/frontend/src/scenes/groups/groupLogic.ts @@ -15,6 +15,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' import { loaders } from 'kea-loaders' import { urlToAction } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' function getGroupEventsQuery(groupTypeIndex: number, groupKey: string): DataTableNode { return { @@ -104,10 +105,17 @@ export const groupLogic = kea([ (s, p) => [s.groupTypeName, p.groupTypeIndex, p.groupKey, s.groupData], (groupTypeName, groupTypeIndex, groupKey, groupData): Breadcrumb[] => [ { + key: Scene.DataManagement, + name: 'People', + path: urls.persons(), + }, + { + key: groupTypeIndex, name: capitalizeFirstLetter(groupTypeName), path: urls.groups(String(groupTypeIndex)), }, { + key: `${groupTypeIndex}-${groupKey}`, name: groupDisplayId(groupKey, groupData?.group_properties || {}), path: urls.group(String(groupTypeIndex), groupKey), }, diff --git a/frontend/src/scenes/insights/Insight.scss b/frontend/src/scenes/insights/Insight.scss index 72c588db18842..e0c6b1dd0f13b 100644 --- a/frontend/src/scenes/insights/Insight.scss +++ b/frontend/src/scenes/insights/Insight.scss @@ -10,11 +10,3 @@ } } } - -.insight-metadata-tags { - margin-top: 0.5rem; - - .ant-tag { - margin-top: 0; - } -} diff --git a/frontend/src/scenes/insights/InsightPageHeader.tsx b/frontend/src/scenes/insights/InsightPageHeader.tsx index 947df6c280332..be1c74bb77cc6 100644 --- a/frontend/src/scenes/insights/InsightPageHeader.tsx +++ b/frontend/src/scenes/insights/InsightPageHeader.tsx @@ -16,7 +16,7 @@ import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { urls } from 'scenes/urls' import { SubscribeButton, SubscriptionsModal } from 'lib/components/Subscriptions/SubscriptionsModal' import { ExportButton } from 'lib/components/ExportButton/ExportButton' -import { deleteWithUndo } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { AddToDashboard } from 'lib/components/AddToDashboard/AddToDashboard' import { InsightSaveButton } from 'scenes/insights/InsightSaveButton' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' @@ -345,14 +345,14 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In saving={insightSaving} onChange={(_, tags) => setInsightMetadata({ tags: tags ?? [] })} tagsAvailable={tags} - className="insight-metadata-tags" + className="mt-2" data-attr="insight-tags" /> ) : insight.tags?.length ? ( diff --git a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts index 6e6cfc856b639..0529a2d311838 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts +++ b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts @@ -2,7 +2,8 @@ import { kea, props, key, path, connect, actions, reducers, selectors, listeners import { EntityTypes, FilterType, Entity, EntityType, ActionFilter, EntityFilter, AnyPropertyFilter } from '~/types' import type { entityFilterLogicType } from './entityFilterLogicType' import { eventUsageLogic, GraphSeriesAddedSource } from 'lib/utils/eventUsageLogic' -import { convertPropertyGroupToProperties, uuid } from 'lib/utils' +import { uuid } from 'lib/utils' +import { convertPropertyGroupToProperties } from 'lib/components/PropertyFilters/utils' export type LocalFilter = ActionFilter & { order: number diff --git a/frontend/src/scenes/insights/insightSceneLogic.tsx b/frontend/src/scenes/insights/insightSceneLogic.tsx index 3ff71b3e53720..e626919fe5c95 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.tsx +++ b/frontend/src/scenes/insights/insightSceneLogic.tsx @@ -85,14 +85,19 @@ export const insightSceneLogic = kea([ insightSelector: [(s) => [s.insightLogicRef], (insightLogicRef) => insightLogicRef?.logic.selectors.insight], insight: [(s) => [(state, props) => s.insightSelector?.(state, props)?.(state, props)], (insight) => insight], breadcrumbs: [ - (s) => [s.insight], - (insight): Breadcrumb[] => [ + (s) => [s.insight, s.insightLogicRef], + (insight, insightLogicRef): Breadcrumb[] => [ { - name: 'Insights', + key: Scene.SavedInsights, + name: 'Product analytics', path: urls.savedInsights(), }, { + key: insight?.short_id || 'new', name: insight?.name || insight?.derived_name || 'Unnamed', + onRename: async (name: string) => { + await insightLogicRef?.logic.asyncActions.setInsightMetadata({ name }) + }, }, ], ], diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index 4b9c09fb8bc00..5a25439193d6e 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -50,6 +50,7 @@ import { getFormula, getInterval, getSeries, + getShowLabelsOnSeries, getShowLegend, getShowPercentStackView, getShowValueOnSeries, @@ -167,9 +168,9 @@ export const insightVizDataLogic = kea([ samplingFactor: [(s) => [s.querySource], (q) => (q ? q.samplingFactor : null)], showLegend: [(s) => [s.querySource], (q) => (q ? getShowLegend(q) : null)], showValueOnSeries: [(s) => [s.querySource], (q) => (q ? getShowValueOnSeries(q) : null)], + showLabelOnSeries: [(s) => [s.querySource], (q) => (q ? getShowLabelsOnSeries(q) : null)], showPercentStackView: [(s) => [s.querySource], (q) => (q ? getShowPercentStackView(q) : null)], vizSpecificOptions: [(s) => [s.query], (q: Node) => (isInsightVizNode(q) ? q.vizSpecificOptions : null)], - insightFilter: [(s) => [s.querySource], (q) => (q ? filterForQuery(q) : null)], trendsFilter: [(s) => [s.querySource], (q) => (isTrendsQuery(q) ? q.trendsFilter : null)], funnelsFilter: [(s) => [s.querySource], (q) => (isFunnelsQuery(q) ? q.funnelsFilter : null)], diff --git a/frontend/src/scenes/insights/utils.tsx b/frontend/src/scenes/insights/utils.tsx index 272b2bf031ba4..26ad7a3a77a8d 100644 --- a/frontend/src/scenes/insights/utils.tsx +++ b/frontend/src/scenes/insights/utils.tsx @@ -3,14 +3,17 @@ import { AnyPartialFilterType, BreakdownKeyType, BreakdownType, + ChartDisplayType, CohortType, EntityFilter, EntityTypes, + EventType, InsightModel, InsightShortId, InsightType, PathsFilterType, PathType, + TrendsFilterType, } from '~/types' import { ensureStringIsNotBlank, humanFriendlyNumber, objectsEqual } from 'lib/utils' import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' @@ -296,3 +299,48 @@ export function concatWithPunctuation(phrases: string[]): string { return `${phrases.slice(0, phrases.length - 1).join(', ')}, and ${phrases[phrases.length - 1]}` } } + +export function insightUrlForEvent(event: Pick): string | undefined { + let insightParams: Partial | undefined + if (event.event === '$pageview') { + insightParams = { + insight: InsightType.TRENDS, + interval: 'day', + display: ChartDisplayType.ActionsLineGraph, + actions: [], + events: [ + { + id: '$pageview', + name: '$pageview', + type: 'events', + order: 0, + properties: [ + { + key: '$current_url', + value: event.properties.$current_url, + type: 'event', + }, + ], + }, + ], + } + } else if (event.event !== '$autocapture') { + insightParams = { + insight: InsightType.TRENDS, + interval: 'day', + display: ChartDisplayType.ActionsLineGraph, + actions: [], + events: [ + { + id: event.event, + name: event.event, + type: 'events', + order: 0, + properties: [], + }, + ], + } + } + + return insightParams ? urls.insightNew(insightParams) : undefined +} diff --git a/frontend/src/scenes/insights/utils/cleanFilters.ts b/frontend/src/scenes/insights/utils/cleanFilters.ts index bd16e3ca1ba79..fced598e698a0 100644 --- a/frontend/src/scenes/insights/utils/cleanFilters.ts +++ b/frontend/src/scenes/insights/utils/cleanFilters.ts @@ -7,6 +7,7 @@ import { FunnelsFilterType, FunnelVizType, InsightType, + IntervalType, LifecycleFilterType, PathsFilterType, PathType, @@ -19,12 +20,12 @@ import { deepCleanFunnelExclusionEvents, getClampedStepRangeFilter, isStepsUndef import { getDefaultEventName } from 'lib/utils/getAppContext' import { BIN_COUNT_AUTO, + NON_TIME_SERIES_DISPLAY_TYPES, NON_VALUES_ON_SERIES_DISPLAY_TYPES, PERCENT_STACK_VIEW_DISPLAY_TYPE, RETENTION_FIRST_TIME, ShownAsValue, } from 'lib/constants' -import { autocorrectInterval } from 'lib/utils' import { DEFAULT_STEP_LIMIT } from 'scenes/paths/pathsDataLogic' import { smoothingOptions } from 'lib/components/SmoothingFilter/smoothings' import { LocalFilter, toLocalFilters } from '../filters/ActionFilter/entityFilterLogic' @@ -165,6 +166,46 @@ export const setTestAccountFilterForNewInsight = ( } } +const disableHourFor: Record = { + dStart: false, + '-1d': false, + '-7d': false, + '-14d': false, + '-30d': false, + '-90d': true, + mStart: false, + '-1mStart': false, + yStart: true, + all: true, + other: false, +} + +export function autocorrectInterval(filters: Partial): IntervalType | undefined { + if ('display' in filters && filters.display && NON_TIME_SERIES_DISPLAY_TYPES.includes(filters.display)) { + // Non-time-series insights should not have an interval + return undefined + } + if (isFunnelsFilter(filters) && filters.funnel_viz_type !== FunnelVizType.Trends) { + // Only trend funnels support intervals + return undefined + } + if (!filters.interval) { + return 'day' + } + + // @ts-expect-error - Old legacy interval support + const minute_disabled = filters.interval === 'minute' + const hour_disabled = disableHourFor[filters.date_from || 'other'] && filters.interval === 'hour' + + if (minute_disabled) { + return 'hour' + } else if (hour_disabled) { + return 'day' + } else { + return filters.interval + } +} + export function cleanFilters( filters: Partial, test_account_filters_default_checked?: boolean diff --git a/frontend/src/scenes/insights/views/Histogram/Histogram.scss b/frontend/src/scenes/insights/views/Histogram/Histogram.scss index e6d3138958889..2742347145d09 100644 --- a/frontend/src/scenes/insights/views/Histogram/Histogram.scss +++ b/frontend/src/scenes/insights/views/Histogram/Histogram.scss @@ -61,7 +61,7 @@ * Bars */ g#bars { - fill: var(--funnel-default); + fill: var(--primary-3000); } g#labels { diff --git a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss index 1ce975beb6fd1..4158e729c4915 100644 --- a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss +++ b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss @@ -3,7 +3,7 @@ align-items: center; .edit-icon { - color: var(--primary); + color: var(--primary-3000); cursor: pointer; font-size: 1rem; } @@ -13,7 +13,7 @@ cursor: pointer; .EntityFilterInfo { - color: var(--primary); + color: var(--primary-3000); font-weight: 500; } } diff --git a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx index 8c87f776db48b..99f5080c825db 100644 --- a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx @@ -182,7 +182,7 @@ export const filterNestedDataset = ( }) } -function createPinstripePattern(color: string): CanvasPattern { +function createPinstripePattern(color: string, isDarkMode: boolean): CanvasPattern { const stripeWidth = 8 // 0.5rem const stripeAngle = -22.5 @@ -197,8 +197,8 @@ function createPinstripePattern(color: string): CanvasPattern { ctx.fillStyle = color ctx.fillRect(0, 0, canvas.width, canvas.height) - // overlay half-transparent white stripe - ctx.fillStyle = 'rgba(255, 255, 255, 0.5)' + // overlay half-transparent black / white stripes + ctx.fillStyle = isDarkMode ? 'rgba(35, 36, 41, 0.5)' : 'rgba(255, 255, 255, 0.5)' ctx.fillRect(0, stripeWidth, 1, 2 * stripeWidth) // create a canvas pattern and rotate it @@ -311,7 +311,7 @@ export function LineGraph_({ : getSeriesColor(dataset.id, compare && !isArea) const hoverColor = dataset?.status ? getBarColorFromStatus(dataset.status, true) : mainColor const areaBackgroundColor = hexToRGBA(mainColor, 0.5) - const areaIncompletePattern = createPinstripePattern(areaBackgroundColor) + const areaIncompletePattern = createPinstripePattern(areaBackgroundColor, isDarkModeOn) let backgroundColor: string | undefined = undefined if (isBackgroundBasedGraphType) { backgroundColor = mainColor @@ -386,7 +386,9 @@ export function LineGraph_({ }, } const gridOptions: Partial = { - borderColor: colors.axisLine as string, + color: colors.axisLine as Color, + borderColor: colors.axisLine as Color, + tickColor: colors.axisLine as Color, borderDash: [4, 2], } @@ -698,11 +700,19 @@ export function LineGraph_({ precision, autoSkip: true, callback: function _renderYLabel(_, i) { - const labelDescriptors = [ - datasets?.[0]?.actions?.[i]?.custom_name ?? datasets?.[0]?.actions?.[i]?.name, // action name - datasets?.[0]?.breakdownValues?.[i], // breakdown value - datasets?.[0]?.compareLabels?.[i], // compare value - ].filter((l) => !!l) + const labelDescriptors = ( + datasets?.[0]?.labels?.[i] + ? [ + // prefer to use the label over the action name if it exists + datasets?.[0]?.labels?.[i], + datasets?.[0]?.compareLabels?.[i], + ] + : [ + datasets?.[0]?.actions?.[i]?.custom_name ?? datasets?.[0]?.actions?.[i]?.name, // action name + datasets?.[0]?.breakdownValues?.[i], // breakdown value + datasets?.[0]?.compareLabels?.[i], // compare value + ] + ).filter((l) => !!l) return labelDescriptors.join(' - ') }, }, diff --git a/frontend/src/scenes/insights/views/LineGraph/PieChart.tsx b/frontend/src/scenes/insights/views/LineGraph/PieChart.tsx index f58924882f82d..0f98110273c0c 100644 --- a/frontend/src/scenes/insights/views/LineGraph/PieChart.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/PieChart.tsx @@ -50,6 +50,11 @@ function getPercentageForDataPoint(context: Context): number { return ((context.dataset.data[context.dataIndex] as number) / total) * 100 } +export interface PieChartProps extends LineGraphProps { + showLabelOnSeries?: boolean | null + disableHoverOffset?: boolean | null +} + export function PieChart({ datasets: _datasets, hiddenLegendKeys, @@ -60,12 +65,14 @@ export function PieChart({ trendsFilter, formula, showValueOnSeries, + showLabelOnSeries, supportsPercentStackView, showPercentStackView, tooltip: tooltipConfig, showPersonsModal = true, labelGroupType, -}: LineGraphProps): JSX.Element { + disableHoverOffset, +}: PieChartProps): JSX.Element { const isPie = type === GraphType.Pie const isPercentStackView = !!supportsPercentStackView && !!showPercentStackView @@ -114,12 +121,14 @@ export function PieChart({ layout: { padding: { top: 12, // 12 px so that the label isn't cropped + left: 20, + right: 20, bottom: 20, // 12 px so that the label isn't cropped + 8 px of padding against the number below }, }, borderWidth: 0, borderRadius: 0, - hoverOffset: onlyOneValue ? 0 : 16, // don't offset hovered segment if it is 100% + hoverOffset: onlyOneValue || disableHoverOffset ? 0 : 16, // don't offset hovered segment if it is 100% onHover(event: ChartEvent, _: ActiveElement[], chart: Chart) { onChartHover(event, chart, onClick) }, @@ -135,7 +144,8 @@ export function PieChart({ }, display: (context) => { const percentage = getPercentageForDataPoint(context) - return showValueOnSeries !== false && // show if true or unset + return (showValueOnSeries !== false || // show if true or unset + showLabelOnSeries) && context.dataset.data.length > 1 && percentage > 5 ? 'auto' @@ -149,6 +159,10 @@ export function PieChart({ return { top: paddingY, bottom: paddingY, left: paddingX, right: paddingX } }, formatter: (value: number, context) => { + if (showLabelOnSeries) { + // cast to any as it seems like TypeScript types are wrong + return (context.dataset as any).labels?.[context.dataIndex] + } if (isPercentStackView) { const percentage = getPercentageForDataPoint(context) return `${percentage.toFixed(1)}%` diff --git a/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss b/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss index d8f9bffe605e7..e4ae155aede2f 100644 --- a/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss +++ b/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss @@ -10,7 +10,7 @@ fill-rule: evenodd; &:hover { - color: var(--primary-dark) !important; + color: var(--primary-3000-hover) !important; } } } diff --git a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss index 2ee530fe2d87a..8f90c06671cf0 100644 --- a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss +++ b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss @@ -5,7 +5,7 @@ // Weird hack - this fixes chrome from not correctly identifying the bounds of the component for the drag preview // https://github.com/react-dnd/react-dnd/issues/832#issuecomment-442071628 transform: translate3d(0, 0, 0); - outline: 1px solid var(--primary); + outline: 1px solid var(--primary-3000); background-color: var(--bg-light); } diff --git a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.tsx b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.tsx index 27544da05c561..594cfa583e4bc 100644 --- a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.tsx +++ b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.tsx @@ -6,8 +6,8 @@ import clsx from 'clsx' import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { useNotebookNode } from '../Nodes/notebookNodeLogic' import { notebookPanelLogic } from '../NotebookPanel/notebookPanelLogic' +import { useNotebookNode } from '../Nodes/NotebookNodeContext' export type DraggableToNotebookBaseProps = { href?: string diff --git a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx index 34f70d6c85f25..cba90e520677c 100644 --- a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx @@ -25,7 +25,7 @@ import { notebookLogic } from '../Notebook/notebookLogic' import { useInView } from 'react-intersection-observer' import { NotebookNodeResource } from '~/types' import { ErrorBoundary } from '~/layout/ErrorBoundary' -import { NotebookNodeContext, NotebookNodeLogicProps, notebookNodeLogic } from './notebookNodeLogic' +import { NotebookNodeLogicProps, notebookNodeLogic } from './notebookNodeLogic' import { posthogNodePasteRule, useSyncedAttributes } from './utils' import { KNOWN_NODES, @@ -39,6 +39,7 @@ import { NotebookNodeTitle } from './components/NotebookNodeTitle' import { notebookNodeLogicType } from './notebookNodeLogicType' import { SlashCommandsPopover } from '../Notebook/SlashCommands' import posthog from 'posthog-js' +import { NotebookNodeContext } from './NotebookNodeContext' import { IconGear } from '@posthog/icons' function NodeWrapper(props: NodeWrapperProps): JSX.Element { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx index 60fa028e0814a..fd5aad3420b5e 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx @@ -9,7 +9,7 @@ import { useEffect, useMemo } from 'react' import clsx from 'clsx' import { NotFound } from 'lib/components/NotFound' import { cohortEditLogic } from 'scenes/cohorts/cohortEditLogic' -import { IconCohort, IconPerson, InsightsTrendsIcon } from 'lib/lemon-ui/icons' +import { IconPeople, IconPerson, IconTrends } from '@posthog/icons' import { Query } from '~/queries/Query/Query' import { LemonDivider, LemonTag } from '@posthog/lemon-ui' import { DataTableNode, NodeKind } from '~/queries/schema' @@ -71,7 +71,7 @@ const Component = ({ attributes }: NotebookNodeProps, + icon: , onClick: () => { setExpanded(false) insertAfter({ @@ -130,7 +130,7 @@ const Component = ({ attributes }: NotebookNodeProps ) : (
- + {cohort.name} ({cohort.count} persons) {cohort.is_static ? 'Static' : 'Dynamic'} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeContext.ts b/frontend/src/scenes/notebooks/Nodes/NotebookNodeContext.ts new file mode 100644 index 0000000000000..d5db3c5035793 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeContext.ts @@ -0,0 +1,10 @@ +import { BuiltLogic } from 'kea' +import { createContext, useContext } from 'react' +import type { notebookNodeLogicType } from './notebookNodeLogicType' + +export const NotebookNodeContext = createContext | undefined>(undefined) + +// Currently there is no way to optionally get bound logics so this context allows us to maybe get a logic if it is "bound" via the provider +export const useNotebookNode = (): BuiltLogic | undefined => { + return useContext(NotebookNodeContext) +} diff --git a/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeTitle.tsx b/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeTitle.tsx index 4e7f6e2a2b045..b154f2de5cabb 100644 --- a/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeTitle.tsx +++ b/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeTitle.tsx @@ -4,6 +4,7 @@ import { notebookNodeLogic } from '../notebookNodeLogic' import { useEffect, useState } from 'react' import { LemonInput, Tooltip } from '@posthog/lemon-ui' import { notebookLogic } from 'scenes/notebooks/Notebook/notebookLogic' +import posthog from 'posthog-js' export function NotebookNodeTitle(): JSX.Element { const { isEditable } = useValues(notebookLogic) @@ -21,6 +22,10 @@ export function NotebookNodeTitle(): JSX.Element { title: newValue ?? undefined, }) + if (title != newValue) { + posthog.capture('notebook node title updated') + } + setEditing(false) } @@ -42,7 +47,10 @@ export function NotebookNodeTitle(): JSX.Element { setEditing(true)} + onDoubleClick={() => { + setEditing(true) + posthog.capture('notebook editing node title') + }} > {title} diff --git a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts index 9fa2e7013196e..c40cdfbbdb781 100644 --- a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts +++ b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts @@ -13,7 +13,6 @@ import { selectors, } from 'kea' import type { notebookNodeLogicType } from './notebookNodeLogicType' -import { createContext, useContext } from 'react' import { notebookLogicType } from '../Notebook/notebookLogicType' import { CustomNotebookNodeAttributes, @@ -281,10 +280,3 @@ export const notebookNodeLogic = kea([ props.notebookLogic.actions.unregisterNodeLogic(values.nodeId) }), ]) - -export const NotebookNodeContext = createContext | undefined>(undefined) - -// Currently there is no way to optionally get bound logics so this context allows us to maybe get a logic if it is "bound" via the provider -export const useNotebookNode = (): BuiltLogic | undefined => { - return useContext(NotebookNodeContext) -} diff --git a/frontend/src/scenes/notebooks/Nodes/utils.tsx b/frontend/src/scenes/notebooks/Nodes/utils.tsx index ede8239000f4d..d60eb6ac8e827 100644 --- a/frontend/src/scenes/notebooks/Nodes/utils.tsx +++ b/frontend/src/scenes/notebooks/Nodes/utils.tsx @@ -28,13 +28,15 @@ export function posthogNodePasteRule(options: { handler: ({ match, chain, range }) => { if (match.input) { chain().deleteRange(range).run() - const attributes = options.getAttributes(match) - if (attributes) { - options.editor.commands.insertContent({ - type: options.type.name, - attrs: attributes, - }) - } + + void Promise.resolve(options.getAttributes(match)).then((attributes) => { + if (attributes) { + options.editor.commands.insertContent({ + type: options.type.name, + attrs: attributes, + }) + } + }) } }, }) diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index 1ba202f5a0348..ed584c90842d8 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -262,7 +262,7 @@ // overriding ::selection is necessary here because // antd makes it invisible otherwise span::selection { - color: var(--primary); + color: var(--primary-3000); } // Overrides for insight controls diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx index 8eda1fc7d11da..5658ddbf7e5cb 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx @@ -2,7 +2,7 @@ import { LemonBanner, LemonButton, LemonDivider } from '@posthog/lemon-ui' import { combineUrl } from 'kea-router' import { IconCopy } from 'lib/lemon-ui/icons' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import posthog from 'posthog-js' import { useState } from 'react' import { urls } from 'scenes/urls' diff --git a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx index 56d8c2c19efaf..6052e382d4da4 100644 --- a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx @@ -1,23 +1,21 @@ import { Extension } from '@tiptap/core' import Suggestion from '@tiptap/suggestion' - import { ReactRenderer } from '@tiptap/react' import { LemonButton, LemonDivider, lemonToast } from '@posthog/lemon-ui' +import { IconBold, IconItalic } from 'lib/lemon-ui/icons' import { - IconBold, - IconCohort, - IconItalic, - IconRecording, - IconTableChart, - IconUploadFile, - InsightSQLIcon, - InsightsFunnelsIcon, - InsightsLifecycleIcon, - InsightsPathsIcon, - InsightsRetentionIcon, - InsightsStickinessIcon, - InsightsTrendsIcon, -} from 'lib/lemon-ui/icons' + IconCursor, + IconFunnels, + IconHogQL, + IconLifecycle, + IconPeople, + IconRetention, + IconRewindPlay, + IconStickiness, + IconTrends, + IconUpload, + IconUserPaths, +} from '@posthog/icons' import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useState } from 'react' import { EditorCommands, EditorRange } from './utils' import { BaseMathType, ChartDisplayType, FunnelVizType, NotebookNodeType, PathType, RetentionPeriod } from '~/types' @@ -99,8 +97,8 @@ const TEXT_CONTROLS: SlashCommandsItem[] = [ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Trend', - search: 'trend insight', - icon: , + search: 'graph trend insight', + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -125,7 +123,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Funnel', search: 'funnel insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -152,7 +150,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Retention', search: 'retention insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -178,8 +176,8 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ }, { title: 'Paths', - search: 'paths insight', - icon: , + search: 'user paths insight', + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -194,7 +192,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Stickiness', search: 'stickiness insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -215,7 +213,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Lifecycle', search: 'lifecycle insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -235,7 +233,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'HogQL', search: 'sql', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -267,7 +265,7 @@ order by count() desc { title: 'Events', search: 'data explore', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -284,9 +282,9 @@ order by count() desc ), }, { - title: 'Persons', - search: 'people users', - icon: , + title: 'People', + search: 'persons users', + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -301,15 +299,15 @@ order by count() desc ), }, { - title: 'Session Replays', - search: 'recordings video', - icon: , + title: 'Session recordings', + search: 'video replay', + icon: , command: (chain, pos) => chain.insertContentAt(pos, { type: NotebookNodeType.RecordingPlaylist, attrs: {} }), }, { title: 'Image', - search: 'picture', - icon: , + search: 'picture gif', + icon: , command: async (chain, pos) => { // Trigger upload followed by insert try { diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index e87521c26ad87..0ffea49bab2d3 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -80,6 +80,7 @@ export const notebookLogic = kea([ clearPreviewContent: true, loadNotebook: true, saveNotebook: (notebook: Pick) => ({ notebook }), + renameNotebook: (title: string) => ({ title }), setEditingNodeId: (editingNodeId: string | null) => ({ editingNodeId }), exportJSON: true, showConflictWarning: true, @@ -265,6 +266,13 @@ export const notebookLogic = kea([ } } }, + renameNotebook: async ({ title }) => { + if (!values.notebook) { + return values.notebook + } + const response = await api.notebooks.update(values.notebook.short_id, { title }) + return response + }, }, ], diff --git a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss index d4a2460692e24..5cd1a52a8ca02 100644 --- a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss +++ b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss @@ -40,7 +40,7 @@ } &--active { - border-color: var(--primary); + border-color: var(--primary-3000); height: 8rem; .NotebookPanelDropzone__message { diff --git a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss index ba0ab7a664877..4534f030b9bf5 100644 --- a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss +++ b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss @@ -121,7 +121,7 @@ } &--active { - border-color: var(--primary); + border-color: var(--primary-3000); height: 8rem; .NotebookPanelDropzone__message { @@ -131,7 +131,7 @@ &--dropped { padding: 1rem; - border-color: var(--primary); + border-color: var(--primary-3000); background-color: var(--bg-light); height: 100%; justify-content: flex-start; diff --git a/frontend/src/scenes/notebooks/NotebookScene.tsx b/frontend/src/scenes/notebooks/NotebookScene.tsx index a6a4ad229270f..a3ea732fa7d9c 100644 --- a/frontend/src/scenes/notebooks/NotebookScene.tsx +++ b/frontend/src/scenes/notebooks/NotebookScene.tsx @@ -7,7 +7,7 @@ import { NotebookSceneLogicProps, notebookSceneLogic } from './notebookSceneLogi import { LemonButton, LemonTag } from '@posthog/lemon-ui' import { NotebookExpandButton, NotebookSyncInfo } from './Notebook/NotebookMeta' import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' -import { IconArrowRight, IconHelpOutline } from 'lib/lemon-ui/icons' +import { IconOpenSidebar, IconInfo } from '@posthog/icons' import { LOCAL_NOTEBOOK_TEMPLATES } from './NotebookTemplates/notebookTemplates' import './NotebookScene.scss' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' @@ -48,7 +48,7 @@ export function NotebookScene(): JSX.Element { return (

- This Notebook is open in the side panel + This Notebook is open in the side panel

@@ -84,7 +84,7 @@ export function NotebookScene(): JSX.Element { } + icon={} size={buttonSize} onClick={() => { if (selectedNotebook === LOCAL_NOTEBOOK_TEMPLATES[0].short_id && visibility === 'visible') { @@ -109,11 +109,11 @@ export function NotebookScene(): JSX.Element { tooltip={ <> Opens the notebook in a side panel, that can be accessed from anywhere in the PostHog - app. This is great for dragging and dropping elements like Insights, Recordings or even - Feature Flags into your active Notebook. + app. This is great for dragging and dropping elements like insights, recordings or even + feature flags into your active notebook. } - sideIcon={} + sideIcon={} > Open in side panel diff --git a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx index a22692be85990..7d313a160ec59 100644 --- a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx @@ -9,7 +9,7 @@ import { BuiltLogic, useActions, useValues } from 'kea' import { dayjs } from 'lib/dayjs' import { NotebookListItemType, NotebookTarget } from '~/types' import { notebooksModel, openNotebook } from '~/models/notebooksModel' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { Popover, PopoverProps } from 'lib/lemon-ui/Popover' import { LemonInput } from 'lib/lemon-ui/LemonInput/LemonInput' import { notebookLogicType } from '../Notebook/notebookLogicType' diff --git a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx index 1a8c2f3159b86..409fff476262c 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx +++ b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx @@ -1,6 +1,7 @@ import { NotebookNodeType } from '~/types' import { LemonSelectMultiple } from 'lib/lemon-ui/LemonSelectMultiple' import { NotebooksListFilters } from 'scenes/notebooks/NotebooksTable/notebooksTableLogic' +import posthog from 'posthog-js' export const fromNodeTypeToLabel: Omit< Record, @@ -48,6 +49,7 @@ export function ContainsTypeFilters({ }, {})} value={filters.contains} onChange={(newValue: string[]) => { + posthog.capture('notebook containing filter applied') setFilters({ contains: newValue.map((x) => x as NotebookNodeType) }) }} data-attr={'notebooks-list-contains-filters'} diff --git a/frontend/src/scenes/notebooks/notebookSceneLogic.ts b/frontend/src/scenes/notebooks/notebookSceneLogic.ts index 2d8656ddd5447..073dabbabe899 100644 --- a/frontend/src/scenes/notebooks/notebookSceneLogic.ts +++ b/frontend/src/scenes/notebooks/notebookSceneLogic.ts @@ -5,6 +5,7 @@ import type { notebookSceneLogicType } from './notebookSceneLogicType' import { notebookLogic } from './Notebook/notebookLogic' import { urls } from 'scenes/urls' import { notebooksModel } from '~/models/notebooksModel' +import { Scene } from 'scenes/sceneTypes' export type NotebookSceneLogicProps = { shortId: string @@ -17,7 +18,7 @@ export const notebookSceneLogic = kea([ values: [notebookLogic(props), ['notebook', 'notebookLoading'], notebooksModel, ['notebooksLoading']], actions: [notebookLogic(props), ['loadNotebook'], notebooksModel, ['createNotebook']], })), - selectors(() => ({ + selectors(({ props }) => ({ notebookId: [() => [(_, props) => props], (props): string => props.shortId], loading: [ @@ -29,11 +30,18 @@ export const notebookSceneLogic = kea([ (s) => [s.notebook, s.loading], (notebook, loading): Breadcrumb[] => [ { + key: Scene.Notebooks, name: 'Notebooks', path: urls.notebooks(), }, { - name: notebook ? notebook?.title || 'Unnamed' : loading ? 'Loading...' : 'Notebook not found', + key: notebook?.short_id || 'new', + name: notebook ? notebook?.title || 'Unnamed' : loading ? null : 'Notebook not found', + onRename: !notebook?.is_template + ? async (title: string) => { + await notebookLogic(props).asyncActions.renameNotebook(title) + } + : undefined, }, ], ], diff --git a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx index f6ef3ee324f77..70e7852a70e44 100644 --- a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx +++ b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx @@ -65,7 +65,7 @@ export const OnboardingBillingStep = ({

- {currentPlan.initial_billing_limit && ( + {currentPlan?.initial_billing_limit && (
To protect your costs and ours, this product has an initial billing limit of $ diff --git a/frontend/src/scenes/organizationLogic.tsx b/frontend/src/scenes/organizationLogic.tsx index c582391f506ef..873903881c6b9 100644 --- a/frontend/src/scenes/organizationLogic.tsx +++ b/frontend/src/scenes/organizationLogic.tsx @@ -1,5 +1,5 @@ import { actions, afterMount, kea, listeners, path, reducers, selectors } from 'kea' -import api from 'lib/api' +import api, { ApiConfig } from 'lib/api' import type { organizationLogicType } from './organizationLogicType' import { AvailableFeature, OrganizationType } from '~/types' import { userLogic } from './userLogic' @@ -92,6 +92,11 @@ export const organizationLogic = kea([ ], }), listeners(({ actions }) => ({ + loadCurrentOrganizationSuccess: ({ currentOrganization }) => { + if (currentOrganization) { + ApiConfig.setCurrentOrganizationId(currentOrganization.id) + } + }, createOrganizationSuccess: () => { window.location.href = '/organization/members' }, diff --git a/frontend/src/scenes/paths/PathNodeCardButton.tsx b/frontend/src/scenes/paths/PathNodeCardButton.tsx index 0bb86e403890e..4e491d5501e81 100644 --- a/frontend/src/scenes/paths/PathNodeCardButton.tsx +++ b/frontend/src/scenes/paths/PathNodeCardButton.tsx @@ -5,7 +5,7 @@ import { userLogic } from 'scenes/userLogic' import { AvailableFeature, PathsFilterType } from '~/types' import { LemonButton, LemonButtonWithDropdown } from '@posthog/lemon-ui' import { IconEllipsis } from 'lib/lemon-ui/icons' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { pageUrl, PathNodeData } from './pathUtils' import { pathsDataLogicType } from './pathsDataLogicType' diff --git a/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx b/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx index 0e1202affc14c..c2e1fac031681 100644 --- a/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx +++ b/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx @@ -13,6 +13,7 @@ import { LemonButton } from '@posthog/lemon-ui' import type { personsManagementSceneLogicType } from './personsManagementSceneLogicType' import { Groups } from 'scenes/groups/Groups' +import { Scene } from 'scenes/sceneTypes' export type PersonsManagementTab = { key: string @@ -51,7 +52,7 @@ export const personsManagementSceneLogic = kea( { key: 'persons', url: urls.persons(), - label: 'Persons', + label: 'People', content: , }, { @@ -116,15 +117,18 @@ export const personsManagementSceneLogic = kea( (tabs, activeTab): Breadcrumb[] => { return [ { + key: Scene.PersonsManagement, name: `People`, path: tabs[0].url, }, activeTab ? { + key: activeTab.key, name: activeTab.label, path: activeTab.url, } : { + key: 'loading', name: 'Loading...', }, ] diff --git a/frontend/src/scenes/persons/PersonDisplay.tsx b/frontend/src/scenes/persons/PersonDisplay.tsx index 06b3e4b2158ad..1feadfb8b3733 100644 --- a/frontend/src/scenes/persons/PersonDisplay.tsx +++ b/frontend/src/scenes/persons/PersonDisplay.tsx @@ -7,7 +7,7 @@ import { PersonPreview } from './PersonPreview' import { useMemo, useState } from 'react' import { router } from 'kea-router' import { asDisplay, asLink } from './person-utils' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { NotebookNodeType } from '~/types' type PersonPropType = diff --git a/frontend/src/scenes/persons/PersonScene.scss b/frontend/src/scenes/persons/PersonScene.scss index b22eea07f7361..e555d6434aecb 100644 --- a/frontend/src/scenes/persons/PersonScene.scss +++ b/frontend/src/scenes/persons/PersonScene.scss @@ -5,7 +5,7 @@ line-height: 1.125rem; margin: 0 0 0 0.25rem; padding: 0 0.25rem 0 0.375rem; - color: var(--primary); + color: var(--primary-3000); cursor: pointer; svg { diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index 136d69f317baf..dc3dbfc01ea4b 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -16,14 +16,15 @@ import { import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { urls } from 'scenes/urls' import { teamLogic } from 'scenes/teamLogic' -import { convertPropertyGroupToProperties, toParams } from 'lib/utils' -import { isValidPropertyFilter } from 'lib/components/PropertyFilters/utils' +import { toParams } from 'lib/utils' +import { convertPropertyGroupToProperties, isValidPropertyFilter } from 'lib/components/PropertyFilters/utils' import { lemonToast } from 'lib/lemon-ui/lemonToast' import { TriggerExportProps } from 'lib/components/ExportButton/exporter' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' import { asDisplay } from './person-utils' import { hogqlQuery } from '~/queries/query' +import { Scene } from 'scenes/sceneTypes' export interface PersonsLogicProps { cohort?: number | 'new' @@ -246,12 +247,14 @@ export const personsLogic = kea([ const showPerson = person && location.pathname.match(/\/person\/.+/) const breadcrumbs: Breadcrumb[] = [ { - name: 'Persons', + key: Scene.PersonsManagement, + name: 'People', path: urls.persons(), }, ] if (showPerson) { breadcrumbs.push({ + key: person.id || 'unknown', name: asDisplay(person), }) } diff --git a/frontend/src/scenes/pipeline/Transformations.tsx b/frontend/src/scenes/pipeline/Transformations.tsx index 1a627f0439bc4..c16333ff78a00 100644 --- a/frontend/src/scenes/pipeline/Transformations.tsx +++ b/frontend/src/scenes/pipeline/Transformations.tsx @@ -20,11 +20,12 @@ import { restrictToParentElement, restrictToVerticalAxis } from '@dnd-kit/modifi import { CSS } from '@dnd-kit/utilities' import { More } from 'lib/lemon-ui/LemonButton/More' import { updatedAtColumn } from 'lib/lemon-ui/LemonTable/columnUtils' -import { deleteWithUndo, humanFriendlyDetailedTime } from 'lib/utils' +import { humanFriendlyDetailedTime } from 'lib/utils' import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown/LemonMarkdown' import { dayjs } from 'lib/dayjs' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' import { NewButton } from './NewButton' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' export function Transformations(): JSX.Element { const { diff --git a/frontend/src/scenes/pipeline/pipelineAppLogic.tsx b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx index 2392124ce2e73..260d214df3785 100644 --- a/frontend/src/scenes/pipeline/pipelineAppLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx @@ -4,6 +4,7 @@ import type { pipelineAppLogicType } from './pipelineAppLogicType' import { Breadcrumb, PipelineAppTabs } from '~/types' import { urls } from 'scenes/urls' import { actionToUrl, urlToAction } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' export interface PipelineAppLogicProps { id: number @@ -29,10 +30,12 @@ export const pipelineAppLogic = kea([ () => [], (): Breadcrumb[] => [ { + key: Scene.Pipeline, name: 'Pipeline', path: urls.pipeline(), }, { + key: 'todo', name: 'App name', }, ], diff --git a/frontend/src/scenes/pipeline/pipelineLogic.tsx b/frontend/src/scenes/pipeline/pipelineLogic.tsx index 017e1966745b7..cf99abf943bfc 100644 --- a/frontend/src/scenes/pipeline/pipelineLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineLogic.tsx @@ -3,6 +3,7 @@ import type { pipelineLogicType } from './pipelineLogicType' import { actionToUrl, urlToAction } from 'kea-router' import { urls } from 'scenes/urls' import { Breadcrumb, PipelineTabs } from '~/types' +import { Scene } from 'scenes/sceneTypes' export const singularName = (tab: PipelineTabs): string => { switch (tab) { @@ -43,12 +44,13 @@ export const pipelineLogic = kea([ breadcrumbs: [ (s) => [s.currentTab], (tab): Breadcrumb[] => { - const breadcrumbs: Breadcrumb[] = [{ name: 'Pipeline' }] - breadcrumbs.push({ - name: humanFriendlyTabName(tab), - }) - - return breadcrumbs + return [ + { key: Scene.Pipeline, name: 'Data pipeline' }, + { + key: tab, + name: humanFriendlyTabName(tab), + }, + ] }, ], })), diff --git a/frontend/src/scenes/pipeline/transformationsLogic.tsx b/frontend/src/scenes/pipeline/transformationsLogic.tsx index 40c83cb3c6750..eadb6799ed898 100644 --- a/frontend/src/scenes/pipeline/transformationsLogic.tsx +++ b/frontend/src/scenes/pipeline/transformationsLogic.tsx @@ -36,7 +36,7 @@ export const pipelineTransformationsLogic = kea, { loadPlugins: async () => { - const results: PluginType[] = await loadPaginatedResults( + const results: PluginType[] = await api.loadPaginatedResults( `api/organizations/@current/pipeline_transformations` ) const plugins: Record = {} @@ -60,7 +60,7 @@ export const pipelineTransformationsLogic = kea { const pluginConfigs: Record = {} - const results = await loadPaginatedResults( + const results = await api.loadPaginatedResults( `api/projects/${values.currentTeamId}/pipeline_transformations_configs` ) @@ -185,21 +185,3 @@ export const pipelineTransformationsLogic = kea { - let results: any[] = [] - for (let i = 0; i <= maxIterations; ++i) { - if (!url) { - break - } - - const { results: partialResults, next } = await api.get(url) - results = results.concat(partialResults) - url = next - } - return results -} diff --git a/frontend/src/scenes/plugins/edit/PluginDrawer.tsx b/frontend/src/scenes/plugins/edit/PluginDrawer.tsx index b5a5b7f31f5a0..979f5fa689c90 100644 --- a/frontend/src/scenes/plugins/edit/PluginDrawer.tsx +++ b/frontend/src/scenes/plugins/edit/PluginDrawer.tsx @@ -1,7 +1,7 @@ import React, { useEffect, useState } from 'react' import { useActions, useValues } from 'kea' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' -import { Form, Switch } from 'antd' +import { Form } from 'antd' import { userLogic } from 'scenes/userLogic' import { PluginImage } from 'scenes/plugins/plugin/PluginImage' import { Drawer } from 'lib/components/Drawer' @@ -18,7 +18,7 @@ import { MOCK_NODE_PROCESS } from 'lib/constants' import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown' import { PluginTags } from '../tabs/apps/components' import { IconLock } from 'lib/lemon-ui/icons' -import { LemonButton, LemonTag, Link } from '@posthog/lemon-ui' +import { LemonButton, LemonSwitch, LemonTag, Link } from '@posthog/lemon-ui' import { IconCode } from '@posthog/icons' window.process = MOCK_NODE_PROCESS @@ -31,10 +31,10 @@ function EnabledDisabledSwitch({ onChange?: (value: boolean) => void }): JSX.Element { return ( - <> - - {value ? 'Enabled' : 'Disabled'} - +
+ + {value ? 'Enabled' : 'Disabled'} +
) } diff --git a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx b/frontend/src/scenes/plugins/plugin/PluginLogs.tsx index 20438edc4a136..87c61298cd182 100644 --- a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx +++ b/frontend/src/scenes/plugins/plugin/PluginLogs.tsx @@ -1,9 +1,10 @@ import { useActions, useValues } from 'kea' import { pluralize } from 'lib/utils' import { PluginLogEntryType } from '../../../types' -import { LOGS_PORTION_LIMIT, pluginLogsLogic, PluginLogsProps } from './pluginLogsLogic' +import { pluginLogsLogic, PluginLogsProps } from './pluginLogsLogic' import { dayjs } from 'lib/dayjs' import { LemonButton, LemonCheckbox, LemonInput, LemonTable, LemonTableColumns } from '@posthog/lemon-ui' +import { LOGS_PORTION_LIMIT } from 'lib/constants' function PluginLogEntryTypeDisplay(type: PluginLogEntryType): JSX.Element { let color: string | undefined diff --git a/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts b/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts index 437393efcb4cf..f2e46d6deb683 100644 --- a/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts +++ b/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts @@ -5,13 +5,12 @@ import { PluginLogEntry, PluginLogEntryType } from '~/types' import { teamLogic } from '../../teamLogic' import type { pluginLogsLogicType } from './pluginLogsLogicType' import { CheckboxValueType } from 'antd/lib/checkbox/Group' +import { LOGS_PORTION_LIMIT } from 'lib/constants' export interface PluginLogsProps { pluginConfigId: number } -export const LOGS_PORTION_LIMIT = 50 - export const pluginLogsLogic = kea([ props({} as PluginLogsProps), key(({ pluginConfigId }: PluginLogsProps) => pluginConfigId), diff --git a/frontend/src/scenes/plugins/pluginsLogic.ts b/frontend/src/scenes/plugins/pluginsLogic.ts index 3e6954c3cf081..dcb20bcfccda5 100644 --- a/frontend/src/scenes/plugins/pluginsLogic.ts +++ b/frontend/src/scenes/plugins/pluginsLogic.ts @@ -29,8 +29,6 @@ export interface PluginSelectionType { url?: string } -const PAGINATION_DEFAULT_MAX_PAGES = 10 - function capturePluginEvent(event: string, plugin: PluginType, type?: PluginInstallationType): void { posthog.capture(event, { plugin_name: plugin.name, @@ -40,23 +38,6 @@ function capturePluginEvent(event: string, plugin: PluginType, type?: PluginInst }) } -async function loadPaginatedResults( - url: string | null, - maxIterations: number = PAGINATION_DEFAULT_MAX_PAGES -): Promise { - let results: any[] = [] - for (let i = 0; i <= maxIterations; ++i) { - if (!url) { - break - } - - const { results: partialResults, next } = await api.get(url) - results = results.concat(partialResults) - url = next - } - return results -} - export const pluginsLogic = kea([ path(['scenes', 'plugins', 'pluginsLogic']), connect(frontendAppsLogic), @@ -102,7 +83,7 @@ export const pluginsLogic = kea([ {} as Record, { loadPlugins: async () => { - const results: PluginType[] = await loadPaginatedResults('api/organizations/@current/plugins') + const results: PluginType[] = await api.loadPaginatedResults('api/organizations/@current/plugins') const plugins: Record = {} for (const plugin of results) { plugins[plugin.id] = plugin @@ -160,7 +141,7 @@ export const pluginsLogic = kea([ { loadPluginConfigs: async () => { const pluginConfigs: Record = {} - const results: PluginConfigType[] = await loadPaginatedResults('api/plugin_config') + const results: PluginConfigType[] = await api.loadPaginatedResults('api/plugin_config') for (const pluginConfig of results) { pluginConfigs[pluginConfig.plugin] = { ...pluginConfig } diff --git a/frontend/src/scenes/plugins/tabs/apps/components.tsx b/frontend/src/scenes/plugins/tabs/apps/components.tsx index 4033a7f86a166..aa0892cf9e0db 100644 --- a/frontend/src/scenes/plugins/tabs/apps/components.tsx +++ b/frontend/src/scenes/plugins/tabs/apps/components.tsx @@ -6,7 +6,7 @@ import { useValues } from 'kea' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { organizationLogic } from 'scenes/organizationLogic' import { PluginsAccessLevel } from 'lib/constants' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { urls } from 'scenes/urls' diff --git a/frontend/src/scenes/products/Products.tsx b/frontend/src/scenes/products/Products.tsx index cba24e76c8a36..5d8bec779ec45 100644 --- a/frontend/src/scenes/products/Products.tsx +++ b/frontend/src/scenes/products/Products.tsx @@ -11,6 +11,7 @@ import { router } from 'kea-router' import { getProductUri } from 'scenes/onboarding/onboardingLogic' import { productsLogic } from './productsLogic' import * as Icons from '@posthog/icons' +import clsx from 'clsx' export const scene: SceneExport = { component: Products, @@ -27,6 +28,7 @@ function OnboardingCompletedButton({ productKey: ProductKey }): JSX.Element { const { onSelectProduct } = useActions(productsLogic) + return ( <> @@ -94,7 +96,7 @@ export function ProductCard({ return (
@@ -102,11 +104,11 @@ export function ProductCard({
{getProductIcon(product.icon_key, 'text-2xl')}
-
+

{product.name}

{product.description}

-
+
{onboardingCompleted ? ( +

Pick your {isFirstProduct ? 'first' : 'next'} product.

diff --git a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx index 0aea88331c5e3..64a4181460b29 100644 --- a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx +++ b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx @@ -33,14 +33,14 @@ export function NewlySeenPersons(): JSX.Element { return ( } diff --git a/frontend/src/scenes/project-homepage/ProjectHomePageCompactListItem.tsx b/frontend/src/scenes/project-homepage/ProjectHomePageCompactListItem.tsx index c61ac0e2bcc8c..2ea324dfaf8b5 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomePageCompactListItem.tsx +++ b/frontend/src/scenes/project-homepage/ProjectHomePageCompactListItem.tsx @@ -23,7 +23,7 @@ export function ProjectHomePageCompactListItem({

{title}
-
{subtitle}
+
{subtitle}
{suffix ? {suffix} : null} diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.scss b/frontend/src/scenes/project-homepage/ProjectHomepage.scss index 3066991290387..f905c0241e4a4 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.scss +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.scss @@ -1,11 +1,11 @@ .project-homepage { - .homepage-dashboard-header { + .HomepageDashboardHeader { margin-top: 1rem; display: flex; justify-content: space-between; align-items: center; - .dashboard-title-container { + .HomepageDashboardHeader__title { display: flex; flex-direction: row; align-items: center; @@ -17,6 +17,16 @@ margin: 0; } } + + .posthog-3000 & { + a { + color: var(--default); + + &:hover { + color: var(--primary-3000); + } + } + } } } diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx index 80cc8e5b02b9f..b26ec70c514d9 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx @@ -8,7 +8,7 @@ import { Scene, SceneExport } from 'scenes/sceneTypes' import { DashboardPlacement } from '~/types' import { inviteLogic } from 'scenes/settings/organization/inviteLogic' import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { IconCottage } from 'lib/lemon-ui/icons' +import { IconHome } from '@posthog/icons' import { projectHomepageLogic } from 'scenes/project-homepage/projectHomepageLogic' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { RecentRecordings } from './RecentRecordings' @@ -81,12 +81,12 @@ export function ProjectHomepage(): JSX.Element {
{currentTeam?.primary_dashboard ? ( <> -
-
+
+
{!dashboard && } {dashboard?.name && ( <> - + = [InsightType.TRENDS]: { name: 'Trends', description: 'Visualize and break down how actions or events vary over time.', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [InsightType.FUNNELS]: { name: 'Funnel', description: 'Discover how many users complete or drop out of a sequence of actions.', - icon: InsightsFunnelsIcon, + icon: IconFunnels, inMenu: true, }, [InsightType.RETENTION]: { name: 'Retention', description: 'See how many users return on subsequent days after an intial action.', - icon: InsightsRetentionIcon, + icon: IconRetention, inMenu: true, }, [InsightType.PATHS]: { name: 'Paths', description: 'Trace the journeys users take within your product and where they drop off.', - icon: InsightsPathsIcon, + icon: IconUserPaths, inMenu: true, }, [InsightType.STICKINESS]: { name: 'Stickiness', description: 'See what keeps users coming back by viewing the interval between repeated actions.', - icon: InsightsStickinessIcon, + icon: IconStickiness, inMenu: true, }, [InsightType.LIFECYCLE]: { name: 'Lifecycle', description: 'Understand growth by breaking down new, resurrected, returning and dormant users.', - icon: InsightsLifecycleIcon, + icon: IconLifecycle, inMenu: true, }, [InsightType.SQL]: { name: 'SQL', description: 'Use HogQL to query your data.', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [InsightType.JSON]: { name: 'Custom', description: 'Save components powered by our JSON query language.', - icon: InsightSQLIcon, + icon: IconBrackets, inMenu: true, }, } @@ -125,37 +128,37 @@ export const QUERY_TYPES_METADATA: Record = { [NodeKind.TrendsQuery]: { name: 'Trends', description: 'Visualize and break down how actions or events vary over time', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.FunnelsQuery]: { name: 'Funnel', description: 'Discover how many users complete or drop out of a sequence of actions', - icon: InsightsFunnelsIcon, + icon: IconFunnels, inMenu: true, }, [NodeKind.RetentionQuery]: { name: 'Retention', description: 'See how many users return on subsequent days after an intial action', - icon: InsightsRetentionIcon, + icon: IconRetention, inMenu: true, }, [NodeKind.PathsQuery]: { name: 'Paths', description: 'Trace the journeys users take within your product and where they drop off', - icon: InsightsPathsIcon, + icon: IconUserPaths, inMenu: true, }, [NodeKind.StickinessQuery]: { name: 'Stickiness', description: 'See what keeps users coming back by viewing the interval between repeated actions', - icon: InsightsStickinessIcon, + icon: IconStickiness, inMenu: true, }, [NodeKind.LifecycleQuery]: { name: 'Lifecycle', description: 'Understand growth by breaking down new, resurrected, returning and dormant users', - icon: InsightsLifecycleIcon, + icon: IconLifecycle, inMenu: true, }, [NodeKind.EventsNode]: { @@ -239,43 +242,43 @@ export const QUERY_TYPES_METADATA: Record = { [NodeKind.SessionsTimelineQuery]: { name: 'Sessions', description: 'Sessions timeline query', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.HogQLQuery]: { name: 'HogQL', description: 'Direct HogQL query', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.HogQLMetadata]: { name: 'HogQL Metadata', description: 'Metadata for a HogQL query', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.DatabaseSchemaQuery]: { name: 'Database Schema', description: 'Introspect the PostHog database schema', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.WebOverviewQuery]: { name: 'Overview Stats', description: 'View overview stats for a website', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.WebStatsTableQuery]: { name: 'Web Table', description: 'A table of results from web analytics, with a breakdown', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.WebTopClicksQuery]: { name: 'Top Clicks', description: 'View top clicks for a website', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, } @@ -301,7 +304,7 @@ export function InsightIcon({ insight }: { insight: InsightModel }): JSX.Element } const insightMetadata = INSIGHT_TYPES_METADATA[insightType] if (insightMetadata && insightMetadata.icon) { - return + return } return null } @@ -426,10 +429,10 @@ export function SavedInsights(): JSX.Element { insight.favorited ? ( ) : ( - + ) } - tooltip={`${insight.favorited ? 'Add to' : 'Remove from'} favorite insights`} + tooltip={`${insight.favorited ? 'Remove from' : 'Add to'} favorite insights`} /> {hasDashboardCollaboration && insight.description && ( @@ -522,7 +525,10 @@ export function SavedInsights(): JSX.Element { return (
- } /> + } + /> setSavedInsightsFilters({ tab })} diff --git a/frontend/src/scenes/saved-insights/newInsightsMenu.tsx b/frontend/src/scenes/saved-insights/newInsightsMenu.tsx index 3e67827c42907..b4eba27666d1a 100644 --- a/frontend/src/scenes/saved-insights/newInsightsMenu.tsx +++ b/frontend/src/scenes/saved-insights/newInsightsMenu.tsx @@ -33,7 +33,7 @@ export function overlayForNewInsightMenu(dataAttr: string): ReactNode[] { >
{listedInsightTypeMetadata.name} - {listedInsightTypeMetadata.description} + {listedInsightTypeMetadata.description}
) diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 8a0ec5f699aa2..5df2ac36f289a 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -45,7 +45,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.WebAnalytics]: { projectBased: true, - name: 'Web Analytics', + name: 'Web analytics', layout: 'app-container', }, [Scene.Cohort]: { @@ -54,43 +54,43 @@ export const sceneConfigurations: Partial> = { }, [Scene.Events]: { projectBased: true, - name: 'Event Explorer', + name: 'Event explorer', }, [Scene.BatchExports]: { projectBased: true, - name: 'Batch Exports', + name: 'Batch exports', }, [Scene.BatchExportEdit]: { projectBased: true, - name: 'Edit Batch Export', + name: 'Edit batch export', }, [Scene.BatchExport]: { projectBased: true, - name: 'Batch Export', + name: 'Batch export', }, [Scene.DataManagement]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.EventDefinition]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.PropertyDefinition]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.Replay]: { projectBased: true, - name: 'Session Replay', + name: 'Session replay', }, [Scene.ReplaySingle]: { projectBased: true, - name: 'Replay Recording', + name: 'Replay recording', }, [Scene.ReplayPlaylist]: { projectBased: true, - name: 'Replay Playlist', + name: 'Replay playlist', }, [Scene.Person]: { projectBased: true, @@ -98,7 +98,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.PersonsManagement]: { projectBased: true, - name: 'Persons & Groups', + name: 'People & groups', }, [Scene.Action]: { projectBased: true, @@ -106,7 +106,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.Group]: { projectBased: true, - name: 'Persons & Groups', + name: 'People & groups', }, [Scene.Pipeline]: { projectBased: true, @@ -118,7 +118,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.Experiments]: { projectBased: true, - name: 'Experiments', + name: 'A/B testing', }, [Scene.Experiment]: { projectBased: true, @@ -126,7 +126,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.FeatureFlags]: { projectBased: true, - name: 'Feature Flags', + name: 'Feature flags', }, [Scene.FeatureFlag]: { projectBased: true, @@ -145,27 +145,27 @@ export const sceneConfigurations: Partial> = { }, [Scene.DataWarehouse]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehousePosthog]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseExternal]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseSavedQueries]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseSettings]: { projectBased: true, - name: 'Data Warehouse Settings', + name: 'Data warehouse settings', }, [Scene.DataWarehouseTable]: { projectBased: true, - name: 'Data Warehouse Table', + name: 'Data warehouse table', }, [Scene.EarlyAccessFeatures]: { projectBased: true, @@ -187,14 +187,14 @@ export const sceneConfigurations: Partial> = { }, [Scene.SavedInsights]: { projectBased: true, - name: 'Insights', + name: 'Product analytics', }, [Scene.ProjectHomepage]: { projectBased: true, name: 'Homepage', }, [Scene.IntegrationsRedirect]: { - name: 'Integrations Redirect', + name: 'Integrations redirect', }, [Scene.Products]: { projectBased: true, @@ -206,7 +206,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.ToolbarLaunch]: { projectBased: true, - name: 'Launch Toolbar', + name: 'Launch toolbar', }, [Scene.Site]: { projectBased: true, diff --git a/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts b/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts index a302c21fbdb76..58c1e8e23a806 100644 --- a/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts +++ b/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts @@ -2,6 +2,7 @@ import { kea, props, path, selectors } from 'kea' import { Breadcrumb, SessionRecordingType } from '~/types' import type { sessionRecordingDetailLogicType } from './sessionRecordingDetailLogicType' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export interface SessionRecordingDetailLogicProps { id?: SessionRecordingType['id'] @@ -12,13 +13,15 @@ export const sessionRecordingDetailLogic = kea( props({} as SessionRecordingDetailLogicProps), selectors({ breadcrumbs: [ - () => [(_, props) => props.id], + () => [(_, props) => props.id as SessionRecordingType['id']], (sessionRecordingId): Breadcrumb[] => [ { + key: Scene.Replay, name: `Replay`, path: urls.replay(), }, { + key: sessionRecordingId, name: sessionRecordingId ?? 'Not Found', path: sessionRecordingId ? urls.replaySingle(sessionRecordingId) : undefined, }, diff --git a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts index 54ef82ab8da18..596692d8ca162 100644 --- a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts +++ b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts @@ -1,5 +1,5 @@ import { BuiltLogic, connect, kea, listeners, path, reducers, selectors } from 'kea' -import { Breadcrumb, PersonType, RecordingSnapshot, SessionRecordingType } from '~/types' +import { Breadcrumb, PersonType, RecordingSnapshot, ReplayTabs, SessionRecordingType } from '~/types' import { urls } from 'scenes/urls' import { loaders } from 'kea-loaders' @@ -14,6 +14,7 @@ import { eventWithTime } from '@rrweb/types' import type { sessionRecordingDataLogicType } from '../player/sessionRecordingDataLogicType' import { prepareRecordingSnapshots, sessionRecordingDataLogic } from '../player/sessionRecordingDataLogic' import { dayjs } from 'lib/dayjs' +import { Scene } from 'scenes/sceneTypes' export type ExportedSessionRecordingFileV1 = { version: '2022-12-02' @@ -196,10 +197,12 @@ export const sessionRecordingFilePlaybackLogic = kea [], (): Breadcrumb[] => [ { - name: `Recordings`, + key: Scene.Replay, + name: `Session replay`, path: urls.replay(), }, { + key: ReplayTabs.FilePlayback, name: 'Import', }, ], diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.scss b/frontend/src/scenes/session-recordings/player/PlayerMeta.scss index 3312579e0b711..2089db1b92dd8 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.scss +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.scss @@ -79,4 +79,14 @@ } } } + + .Link { + .posthog-3000 & { + color: var(--default); + + &:hover { + color: var(--primary-3000); + } + } + } } diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx index 08d675cb62db1..ecc5f95f3ead0 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx @@ -166,12 +166,12 @@ export function PlayerMeta(): JSX.Element { )}
-
+
{!sessionPerson || !startTime ? ( ) : (
- + {'·'} diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index d0fd56e93e16b..c0059f905f168 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -10,7 +10,7 @@ import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { NotebookNodeType } from '~/types' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { sessionPlayerModalLogic } from './modal/sessionPlayerModalLogic' import { personsModalLogic } from 'scenes/trends/persons-modal/personsModalLogic' import { IconNotebook } from 'scenes/notebooks/IconNotebook' diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss index f57dc4c405164..dadd79e777ec8 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss @@ -83,6 +83,14 @@ pointer-events: none; transition: opacity 0.2s ease-in-out; } + + .LemonButton--tertiary { + .posthog-3000 & { + &:hover { + color: var(--primary-3000); + } + } + } } &--inspector-focus { diff --git a/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss b/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss index 1ee0c5586e7d0..53e2b3f3ca9a0 100644 --- a/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss +++ b/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss @@ -136,7 +136,7 @@ } &--primary { - --tick-color: var(--primary); + --tick-color: var(--primary-3000); } .PlayerSeekbarTick__line { diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss index b821b4b8d4c21..b2a7191f56ce2 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss @@ -11,4 +11,8 @@ border-radius: var(--radius) 0 0 var(--radius); transition: transform 200ms linear; will-change: transform; + + .posthog-3000 & { + background-color: var(--primary-3000); + } } diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx index 119951b6a6160..046ba542d8dd6 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx @@ -1,11 +1,12 @@ import { LemonButton, LemonDivider } from '@posthog/lemon-ui' import { IconOpenInNew } from 'lib/lemon-ui/icons' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' -import { capitalizeFirstLetter, autoCaptureEventToDescription, insightUrlForEvent } from 'lib/utils' +import { capitalizeFirstLetter, autoCaptureEventToDescription } from 'lib/utils' import { InspectorListItemEvent } from '../playerInspectorLogic' import { SimpleKeyValueList } from './SimpleKeyValueList' import { Spinner } from 'lib/lemon-ui/Spinner' import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay' +import { insightUrlForEvent } from 'scenes/insights/utils' export interface ItemEventProps { item: InspectorListItemEvent diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx index 96c4a9e01a143..592d926958224 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx @@ -140,7 +140,7 @@ export function ItemPerformanceEvent({ expanded, setExpanded, }: ItemPerformanceEvent): JSX.Element { - const [activeTab, setActiveTab] = useState<'timings' | 'headers' | 'payload' | 'response_body'>('timings') + const [activeTab, setActiveTab] = useState<'timings' | 'headers' | 'payload' | 'response_body' | 'raw'>('timings') const bytes = humanizeBytes(item.encoded_body_size || item.decoded_body_size || 0) const startTime = item.start_time || item.fetch_start || 0 @@ -176,7 +176,11 @@ export function ItemPerformanceEvent({ return acc } - if (['response_headers', 'request_headers', 'request_body', 'response_body', 'response_status'].includes(key)) { + if ( + ['response_headers', 'request_headers', 'request_body', 'response_body', 'response_status', 'raw'].includes( + key + ) + ) { return acc } @@ -392,6 +396,17 @@ export function ItemPerformanceEvent({ ), } : false, + // raw is only available if the feature flag is enabled + // TODO before proper release we should put raw behind its own flag + { + key: 'raw', + label: 'Json', + content: ( + + {JSON.stringify(item.raw, null, 2)} + + ), + }, ]} /> @@ -470,6 +485,11 @@ function StatusRow({ item }: { item: PerformanceEvent }): JSX.Element | null { let statusRow = null let methodRow = null + let fromDiskCache = false + if (item.transfer_size === 0 && item.response_body && item.response_status && item.response_status < 400) { + fromDiskCache = true + } + if (item.response_status) { const statusDescription = `${item.response_status} ${friendlyHttpStatus[item.response_status] || ''}` @@ -483,7 +503,10 @@ function StatusRow({ item }: { item: PerformanceEvent }): JSX.Element | null { statusRow = (
Status code
- {statusDescription} +
+ {statusDescription} + {fromDiskCache && (from cache)} +
) } diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/Timing/NetworkRequestTiming.stories.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/Timing/NetworkRequestTiming.stories.tsx index a02e9bf3dce03..815eefed8bdfb 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/Timing/NetworkRequestTiming.stories.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/Timing/NetworkRequestTiming.stories.tsx @@ -19,27 +19,30 @@ export function Basic(): JSX.Element { + /** * There are defined sections to performance measurement. We may have data for some or all of them * @@ -109,85 +110,114 @@ function colorForSection(section: (typeof perfSections)[number]): string { * * see https://nicj.net/resourcetiming-in-practice/ */ -function calculatePerformanceParts(perfEntry: PerformanceEvent): Record { +export function calculatePerformanceParts(perfEntry: PerformanceEvent): PerformanceMeasures { const performanceParts: Record = {} - if (perfEntry.redirect_start && perfEntry.redirect_end) { - performanceParts['redirect'] = { - start: perfEntry.redirect_start, - end: perfEntry.redirect_end, - color: colorForSection('redirect'), + if (isPresent(perfEntry.redirect_start) && isPresent(perfEntry.redirect_end)) { + if (perfEntry.redirect_end - perfEntry.redirect_start > 0) { + performanceParts['redirect'] = { + start: perfEntry.redirect_start, + end: perfEntry.redirect_end, + color: colorForSection('redirect'), + } } } - if (perfEntry.fetch_start && perfEntry.domain_lookup_start) { - performanceParts['app cache'] = { - start: perfEntry.fetch_start, - end: perfEntry.domain_lookup_start, - color: colorForSection('app cache'), + if (isPresent(perfEntry.fetch_start) && isPresent(perfEntry.domain_lookup_start)) { + if (perfEntry.domain_lookup_start - perfEntry.fetch_start > 0) { + performanceParts['app cache'] = { + start: perfEntry.fetch_start, + end: perfEntry.domain_lookup_start, + color: colorForSection('app cache'), + } } } - if (perfEntry.domain_lookup_end && perfEntry.domain_lookup_start) { - performanceParts['dns lookup'] = { - start: perfEntry.domain_lookup_start, - end: perfEntry.domain_lookup_end, - color: colorForSection('dns lookup'), + if (isPresent(perfEntry.domain_lookup_end) && isPresent(perfEntry.domain_lookup_start)) { + if (perfEntry.domain_lookup_end - perfEntry.domain_lookup_start > 0) { + performanceParts['dns lookup'] = { + start: perfEntry.domain_lookup_start, + end: perfEntry.domain_lookup_end, + color: colorForSection('dns lookup'), + } } } - if (perfEntry.connect_end && perfEntry.connect_start) { - performanceParts['connection time'] = { - start: perfEntry.connect_start, - end: perfEntry.connect_end, - color: colorForSection('connection time'), - } - - if (perfEntry.secure_connection_start) { - performanceParts['tls time'] = { - start: perfEntry.secure_connection_start, + if (isPresent(perfEntry.connect_end) && isPresent(perfEntry.connect_start)) { + if (perfEntry.connect_end - perfEntry.connect_start > 0) { + performanceParts['connection time'] = { + start: perfEntry.connect_start, end: perfEntry.connect_end, - color: colorForSection('tls time'), - reducedHeight: true, + color: colorForSection('connection time'), + } + + if (isPresent(perfEntry.secure_connection_start) && perfEntry.secure_connection_start > 0) { + performanceParts['tls time'] = { + start: perfEntry.secure_connection_start, + end: perfEntry.connect_end, + color: colorForSection('tls time'), + reducedHeight: true, + } } } } - if (perfEntry.connect_end && perfEntry.request_start && perfEntry.connect_end !== perfEntry.request_start) { - performanceParts['request queuing time'] = { - start: perfEntry.connect_end, - end: perfEntry.request_start, - color: colorForSection('request queuing time'), + if ( + isPresent(perfEntry.connect_end) && + isPresent(perfEntry.request_start) && + perfEntry.connect_end !== perfEntry.request_start + ) { + if (perfEntry.request_start - perfEntry.connect_end > 0) { + performanceParts['request queuing time'] = { + start: perfEntry.connect_end, + end: perfEntry.request_start, + color: colorForSection('request queuing time'), + } } } - if (perfEntry.response_start && perfEntry.request_start) { - performanceParts['waiting for first byte'] = { - start: perfEntry.request_start, - end: perfEntry.response_start, - color: colorForSection('waiting for first byte'), + if (isPresent(perfEntry.response_start) && isPresent(perfEntry.request_start)) { + if (perfEntry.response_start - perfEntry.request_start > 0) { + performanceParts['waiting for first byte'] = { + start: perfEntry.request_start, + end: perfEntry.response_start, + color: colorForSection('waiting for first byte'), + } } } - if (perfEntry.response_start && perfEntry.response_end) { - performanceParts['receiving response'] = { - start: perfEntry.response_start, - end: perfEntry.response_end, - color: colorForSection('receiving response'), + if (isPresent(perfEntry.response_start) && isPresent(perfEntry.response_end)) { + if (perfEntry.response_end - perfEntry.response_start > 0) { + // if loading from disk cache then response_start is 0 but fetch_start is not + let start = perfEntry.response_start + if (perfEntry.response_start === 0 && isPresent(perfEntry.fetch_start)) { + start = perfEntry.fetch_start + } + performanceParts['receiving response'] = { + start: start, + end: perfEntry.response_end, + color: colorForSection('receiving response'), + } } } - if (perfEntry.response_end && perfEntry.load_event_end) { - performanceParts['document processing'] = { - start: perfEntry.response_end, - end: perfEntry.load_event_end, - color: colorForSection('document processing'), + if (isPresent(perfEntry.response_end) && isPresent(perfEntry.load_event_end)) { + if (perfEntry.load_event_end - perfEntry.response_end > 0) { + performanceParts['document processing'] = { + start: perfEntry.response_end, + end: perfEntry.load_event_end, + color: colorForSection('document processing'), + } } } return performanceParts } +function percentage(partDuration: number, totalDuration: number, min: number): number { + return Math.min(Math.max(min, (partDuration / totalDuration) * 100), 100) +} + function percentagesWithinEventRange({ partStart, partEnd, @@ -203,20 +233,20 @@ function percentagesWithinEventRange({ const partStartRelativeToTimeline = partStart - rangeStart const partDuration = partEnd - partStart - const partPercentage = Math.max(0.1, (partDuration / totalDuration) * 100) //less than 0.1% is not visible - const partStartPercentage = (partStartRelativeToTimeline / totalDuration) * 100 + const partPercentage = percentage(partDuration, totalDuration, 0.1) + const partStartPercentage = percentage(partStartRelativeToTimeline, totalDuration, 0) return { startPercentage: `${partStartPercentage}%`, widthPercentage: `${partPercentage}%` } } -const TimeLineView = ({ performanceEvent }: { performanceEvent: PerformanceEvent }): JSX.Element => { +const TimeLineView = ({ performanceEvent }: { performanceEvent: PerformanceEvent }): JSX.Element | null => { const rangeStart = performanceEvent.start_time - const rangeEnd = performanceEvent.response_end + const rangeEnd = performanceEvent.load_event_end ? performanceEvent.load_event_end : performanceEvent.response_end if (typeof rangeStart === 'number' && typeof rangeEnd === 'number') { - const performanceParts = calculatePerformanceParts(performanceEvent) + const timings = calculatePerformanceParts(performanceEvent) return (
{perfSections.map((section) => { - const matchedSection = performanceParts[section] + const matchedSection = timings[section] const start = matchedSection?.start const end = matchedSection?.end const partDuration = end - start @@ -263,7 +293,7 @@ const TimeLineView = ({ performanceEvent }: { performanceEvent: PerformanceEvent
) } - return Cannot render performance timeline for this request + return null } const TableView = ({ performanceEvent }: { performanceEvent: PerformanceEvent }): JSX.Element => { @@ -283,11 +313,15 @@ export const NetworkRequestTiming = ({ }): JSX.Element | null => { const [timelineMode, setTimelineMode] = useState(true) + // if timeline view renders null then we fall back to table view + const timelineView = timelineMode ? : null + return (
setTimelineMode(!timelineMode)} data-attr={`switch-timing-to-${timelineMode ? 'table' : 'timeline'}-view`} @@ -296,11 +330,11 @@ export const NetworkRequestTiming = ({
- {timelineMode ? ( - - ) : ( - - )} + {timelineMode && timelineView ? timelineView : }
) } + +function isPresent(x: number | undefined): x is number { + return typeof x === 'number' +} diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/Timing/calculatePerformanceParts.test.ts b/frontend/src/scenes/session-recordings/player/inspector/components/Timing/calculatePerformanceParts.test.ts new file mode 100644 index 0000000000000..24f59d7f4af3d --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/components/Timing/calculatePerformanceParts.test.ts @@ -0,0 +1,192 @@ +import { mapRRWebNetworkRequest } from 'scenes/session-recordings/player/inspector/performance-event-utils' +import { InitiatorType } from 'posthog-js' +import { calculatePerformanceParts } from 'scenes/session-recordings/player/inspector/components/Timing/NetworkRequestTiming' + +jest.mock('lib/colors', () => { + return { + getSeriesColor: jest.fn(() => '#000000'), + } +}) + +describe('calculatePerformanceParts', () => { + it('can calculate TTFB', () => { + const perfEvent = { + connect_end: 9525.599999964237, + connect_start: 9525.599999964237, + decoded_body_size: 18260, + domain_lookup_end: 9525.599999964237, + domain_lookup_start: 9525.599999964237, + duration: 935.5, + encoded_body_size: 18260, + entry_type: 'resource', + fetch_start: 9525.599999964237, + initiator_type: 'fetch', + name: 'http://localhost:8000/api/organizations/@current/plugins/repository/', + next_hop_protocol: 'http/1.1', + redirect_end: 0, + redirect_start: 0, + render_blocking_status: 'non-blocking', + request_start: 9803.099999964237, + response_end: 10461.099999964237, + response_start: 10428.399999976158, + response_status: 200, + secure_connection_start: 0, + start_time: 9525.599999964237, + time_origin: '1699990397357', + timestamp: 1699990406882, + transfer_size: 18560, + window_id: '018bcf51-b1f0-7fe0-ac05-10543621f4f2', + worker_start: 0, + uuid: '12345', + distinct_id: '23456', + session_id: 'abcde', + pageview_id: 'fghij', + current_url: 'http://localhost:8000/insights', + } + + expect(calculatePerformanceParts(perfEvent)).toEqual({ + 'request queuing time': { + color: '#000000', + end: 9803.099999964237, + start: 9525.599999964237, + }, + + 'waiting for first byte': { + color: '#000000', + end: 10428.399999976158, + start: 9803.099999964237, + }, + 'receiving response': { + color: '#000000', + end: 10461.099999964237, + start: 10428.399999976158, + }, + }) + }) + + it('can handle gravatar timings', () => { + const gravatarReqRes = { + name: 'https://www.gravatar.com/avatar/2e7d95b60efbe947f71009a1af1ba8d0?s=96&d=404', + entryType: 'resource', + initiatorType: 'fetch' as InitiatorType, + deliveryType: '', + nextHopProtocol: '', + renderBlockingStatus: 'non-blocking', + workerStart: 0, + redirectStart: 0, + redirectEnd: 0, + domainLookupStart: 0, + domainLookupEnd: 0, + connectStart: 0, + secureConnectionStart: 0, + connectEnd: 0, + requestStart: 0, + responseStart: 0, + firstInterimResponseStart: 0, + // only fetch start and response end + // and transfer size is 0 + // loaded from disk cache + startTime: 18229, + fetchStart: 18228.5, + responseEnd: 18267.5, + endTime: 18268, + duration: 39, + transferSize: 0, + encodedBodySize: 0, + decodedBodySize: 0, + responseStatus: 200, + serverTiming: [], + timeOrigin: 1700296048424, + timestamp: 1700296066652, + method: 'GET', + status: 200, + requestHeaders: {}, + requestBody: null, + responseHeaders: { + 'cache-control': 'max-age=300', + 'content-length': '13127', + 'content-type': 'image/png', + expires: 'Sat, 18 Nov 2023 08:32:46 GMT', + 'last-modified': 'Wed, 02 Feb 2022 09:11:05 GMT', + }, + responseBody: '�PNGblah', + } + const mappedToPerfEvent = mapRRWebNetworkRequest(gravatarReqRes, 'windowId', 1700296066652) + expect(calculatePerformanceParts(mappedToPerfEvent)).toEqual({ + // 'app cache' not included - end would be before beginning + // 'connection time' has 0 length + // 'dns lookup' has 0 length + // 'redirect has 0 length + // 'tls time' has 0 length + // TTFB has 0 length + 'receiving response': { + color: '#000000', + end: 18267.5, + start: 18228.5, + }, + }) + }) + + it('can handle no TLS connection timing', () => { + const tlsFreeReqRes = { + name: 'http://localhost:8000/decide/?v=3&ip=1&_=1700319068450&ver=1.91.1', + entryType: 'resource', + startTime: 6648, + duration: 93.40000003576279, + initiatorType: 'xmlhttprequest' as InitiatorType, + deliveryType: '', + nextHopProtocol: 'http/1.1', + renderBlockingStatus: 'non-blocking', + workerStart: 0, + redirectStart: 0, + redirectEnd: 0, + fetchStart: 6647.699999988079, + domainLookupStart: 6648.800000011921, + domainLookupEnd: 6648.800000011921, + connectStart: 6648.800000011921, + secureConnectionStart: 0, + connectEnd: 6649.300000011921, + requestStart: 6649.5, + responseStart: 6740.800000011921, + firstInterimResponseStart: 0, + responseEnd: 6741.100000023842, + transferSize: 2383, + encodedBodySize: 2083, + decodedBodySize: 2083, + responseStatus: 200, + serverTiming: [], + endTime: 6741, + timeOrigin: 1700319061802, + timestamp: 1700319068449, + isInitial: true, + } + const mappedToPerfEvent = mapRRWebNetworkRequest(tlsFreeReqRes, 'windowId', 1700319068449) + expect(calculatePerformanceParts(mappedToPerfEvent)).toEqual({ + 'app cache': { + color: '#000000', + end: 6648.800000011921, + start: 6647.699999988079, + }, + 'connection time': { + color: '#000000', + end: 6649.300000011921, + start: 6648.800000011921, + }, + 'waiting for first byte': { + color: '#000000', + end: 6740.800000011921, + start: 6649.5, + }, + 'receiving response': { + color: '#000000', + end: 6741.100000023842, + start: 6740.800000011921, + }, + 'request queuing time': { + color: '#000000', + end: 6649.5, + start: 6649.300000011921, + }, + }) + }) +}) diff --git a/frontend/src/scenes/session-recordings/player/inspector/performance-event-utils.ts b/frontend/src/scenes/session-recordings/player/inspector/performance-event-utils.ts index 51d9770d0e41f..8d432f4b0d6a5 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/performance-event-utils.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/performance-event-utils.ts @@ -1,10 +1,9 @@ import { eventWithTime } from '@rrweb/types' -import posthog from 'posthog-js' +import { CapturedNetworkRequest } from 'posthog-js' import { PerformanceEvent } from '~/types' const NETWORK_PLUGIN_NAME = 'posthog/network@1' const RRWEB_NETWORK_PLUGIN_NAME = 'rrweb/network@1' -const IGNORED_POSTHOG_PATHS = ['/s/', '/e/', '/i/v0/e/'] export const PerformanceEventReverseMapping: { [key: number]: keyof PerformanceEvent } = { // BASE_PERFORMANCE_EVENT_COLUMNS @@ -58,8 +57,97 @@ export const PerformanceEventReverseMapping: { [key: number]: keyof PerformanceE 40: 'timestamp', } +export const RRWebPerformanceEventReverseMapping: Record = { + // BASE_PERFORMANCE_EVENT_COLUMNS + entryType: 'entry_type', + timeOrigin: 'time_origin', + name: 'name', + + // RESOURCE_EVENT_COLUMNS + startTime: 'start_time', + redirectStart: 'redirect_start', + redirectEnd: 'redirect_end', + workerStart: 'worker_start', + fetchStart: 'fetch_start', + domainLookupStart: 'domain_lookup_start', + domainLookupEnd: 'domain_lookup_end', + connectStart: 'connect_start', + secureConnectionStart: 'secure_connection_start', + connectEnd: 'connect_end', + requestStart: 'request_start', + responseStart: 'response_start', + responseEnd: 'response_end', + decodedBodySize: 'decoded_body_size', + encodedBodySize: 'encoded_body_size', + initiatorType: 'initiator_type', + nextHopProtocol: 'next_hop_protocol', + renderBlockingStatus: 'render_blocking_status', + responseStatus: 'response_status', + transferSize: 'transfer_size', + + // LARGEST_CONTENTFUL_PAINT_EVENT_COLUMNS + largestContentfulPaintElement: 'largest_contentful_paint_element', + largestContentfulPaintRenderTime: 'largest_contentful_paint_render_time', + largestContentfulPaintLoadTime: 'largest_contentful_paint_load_time', + largestContentfulPaintSize: 'largest_contentful_paint_size', + largestContentfulPaintId: 'largest_contentful_paint_id', + largestContentfulPaintUrl: 'largest_contentful_paint_url', + + // NAVIGATION_EVENT_COLUMNS + domComplete: 'dom_complete', + domContentLoadedEvent: 'dom_content_loaded_event', + domInteractive: 'dom_interactive', + loadEventEnd: 'load_event_end', + loadEventStart: 'load_event_start', + redirectCount: 'redirect_count', + navigationType: 'navigation_type', + unloadEventEnd: 'unload_event_end', + unloadEventStart: 'unload_event_start', + + // Added after v1 + duration: 'duration', + timestamp: 'timestamp', + + //rrweb/network@1 + isInitial: 'is_initial', + requestHeaders: 'request_headers', + responseHeaders: 'response_headers', + requestBody: 'request_body', + responseBody: 'response_body', + method: 'method', +} + +export function mapRRWebNetworkRequest( + capturedRequest: CapturedNetworkRequest, + windowId: string, + timestamp: PerformanceEvent['timestamp'] +): PerformanceEvent { + const data: Partial = { + timestamp: timestamp, + window_id: windowId, + raw: capturedRequest, + } + + Object.entries(RRWebPerformanceEventReverseMapping).forEach(([key, value]) => { + if (key in capturedRequest) { + data[value] = capturedRequest[key] + } + }) + + // KLUDGE: this shouldn't be necessary but let's display correctly while we figure out why it is. + if (!data.name && 'url' in capturedRequest) { + data.name = capturedRequest.url as string | undefined + } + + return data as PerformanceEvent +} + export function matchNetworkEvents(snapshotsByWindowId: Record): PerformanceEvent[] { - const eventsMapping: Record> = {} + // we only support rrweb/network@1 events or posthog/network@1 events in any one recording + // apart from during testing, where we might have both + // if we have both, we only display posthog/network@1 events + const events: PerformanceEvent[] = [] + const rrwebEvents: PerformanceEvent[] = [] // we could do this in one pass, but it's easier to log missing events // when we have all the posthog/network@1 events first @@ -83,93 +171,27 @@ export function matchNetworkEvents(snapshotsByWindowId: Record { - const snapshots = snapshotsByWindowId[1] - snapshots.forEach((snapshot: eventWithTime) => { if ( snapshot.type === 6 && // RRWeb plugin event type snapshot.data.plugin === RRWEB_NETWORK_PLUGIN_NAME ) { const payload = snapshot.data.payload as any + if (!Array.isArray(payload.requests) || payload.requests.length === 0) { return } payload.requests.forEach((capturedRequest: any) => { - const matchedURL = eventsMapping[capturedRequest.url] - - const matchedStartTime = matchedURL ? matchedURL[capturedRequest.startTime] : null - - if (matchedStartTime && matchedStartTime.length === 1) { - matchedStartTime[0].response_status = capturedRequest.status - matchedStartTime[0].request_headers = capturedRequest.requestHeaders - matchedStartTime[0].request_body = capturedRequest.requestBody - matchedStartTime[0].response_headers = capturedRequest.responseHeaders - matchedStartTime[0].response_body = capturedRequest.responseBody - matchedStartTime[0].method = capturedRequest.method - } else if (matchedStartTime && matchedStartTime.length > 1) { - // find in eventsMapping[capturedRequest.url][capturedRequest.startTime] by matching capturedRequest.endTime and element.response_end - const matchedEndTime = matchedStartTime.find( - (x) => - typeof x.response_end === 'number' && - Math.round(x.response_end) === capturedRequest.endTime - ) - if (matchedEndTime) { - matchedEndTime.response_status = capturedRequest.status - matchedEndTime.request_headers = capturedRequest.requestHeaders - matchedEndTime.request_body = capturedRequest.requestBody - matchedEndTime.response_headers = capturedRequest.responseHeaders - matchedEndTime.response_body = capturedRequest.responseBody - matchedEndTime.method = capturedRequest.method - } else { - const capturedURL = new URL(capturedRequest.url) - const capturedPath = capturedURL.pathname - - if (!IGNORED_POSTHOG_PATHS.some((x) => capturedPath === x)) { - posthog.capture('Had matches but still could not match rrweb/network@1 event', { - rrwebNetworkEvent: payload, - possibleMatches: matchedStartTime, - totalMatchedURLs: Object.keys(eventsMapping).length, - }) - } - } - } else { - const capturedURL = new URL(capturedRequest.url) - const capturedPath = capturedURL.pathname - if (!IGNORED_POSTHOG_PATHS.some((x) => capturedPath === x)) { - posthog.capture('Could not match rrweb/network@1 event', { - rrwebNetworkEvent: payload, - possibleMatches: eventsMapping[capturedRequest.url], - totalMatchedURLs: Object.keys(eventsMapping).length, - }) - } - } + const data: PerformanceEvent = mapRRWebNetworkRequest(capturedRequest, windowId, snapshot.timestamp) + + rrwebEvents.push(data) }) } }) }) - // now flatten the eventsMapping into a single array - return Object.values(eventsMapping).reduce((acc: PerformanceEvent[], eventsByURL) => { - Object.values(eventsByURL).forEach((eventsByTime) => { - acc.push(...eventsByTime) - }) - return acc - }, []) + return events.length ? events : rrwebEvents } diff --git a/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx b/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx index 4e34628a2f797..4c2003842efff 100644 --- a/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx +++ b/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx @@ -5,7 +5,7 @@ import { Form } from 'kea-forms' import { IconCopy } from 'lib/lemon-ui/icons' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { Field } from 'lib/forms/Field' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { playerShareLogic, PlayerShareLogicProps } from './playerShareLogic' import { SharingModalContent } from 'lib/components/Sharing/SharingModal' import { captureException } from '@sentry/react' diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx index d3acb8f7aa308..a2c34cfb26a1b 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx @@ -162,7 +162,7 @@ function PinnedIndicator(): JSX.Element | null { function ViewedIndicator(props: { viewed: boolean }): JSX.Element | null { return !props.viewed ? ( -
+
) : null } diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss index 455928f1dad82..379fe80fd42cc 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss @@ -20,6 +20,12 @@ width: 25%; overflow: hidden; height: 100%; + + .text-link { + .posthog-3000 & { + color: var(--default); + } + } } .SessionRecordingsPlaylist__player { @@ -64,11 +70,11 @@ transition: background-color 200ms ease, border 200ms ease; &--active { - border-left-color: var(--primary); + border-left-color: var(--primary-3000); } &:hover { - background-color: var(--primary-highlight); + background-color: var(--primary-3000-highlight); } .SessionRecordingPreview__property-icon:hover { diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index ec3aa4b9a723c..90368ee82d0cc 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -23,7 +23,7 @@ import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { SessionRecordingsPlaylistSettings } from './SessionRecordingsPlaylistSettings' import { SessionRecordingsPlaylistTroubleshooting } from './SessionRecordingsPlaylistTroubleshooting' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' import { LemonTableLoader } from 'lib/lemon-ui/LemonTable/LemonTableLoader' import { DraggableToNotebook } from 'scenes/notebooks/AddToNotebook/DraggableToNotebook' import { range } from 'd3' diff --git a/frontend/src/scenes/session-recordings/playlist/playlistUtils.ts b/frontend/src/scenes/session-recordings/playlist/playlistUtils.ts index 9767d4b41809a..68ae4384e0504 100644 --- a/frontend/src/scenes/session-recordings/playlist/playlistUtils.ts +++ b/frontend/src/scenes/session-recordings/playlist/playlistUtils.ts @@ -2,7 +2,8 @@ import { PropertyOperator, RecordingFilters, SessionRecordingPlaylistType } from import { cohortsModelType } from '~/models/cohortsModelType' import { toLocalFilters } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { getDisplayNameFromEntityFilter } from 'scenes/insights/utils' -import { convertPropertyGroupToProperties, deleteWithUndo, genericOperatorMap } from 'lib/utils' +import { genericOperatorMap } from 'lib/utils' +import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { getKeyMapping } from 'lib/taxonomy' import api from 'lib/api' import { lemonToast } from 'lib/lemon-ui/lemonToast' @@ -11,6 +12,7 @@ import { router } from 'kea-router' import { urls } from 'scenes/urls' import { openBillingPopupModal } from 'scenes/billing/BillingPopup' import { PLAYLIST_LIMIT_REACHED_MESSAGE } from 'scenes/session-recordings/sessionRecordingsLogic' +import { convertPropertyGroupToProperties } from 'lib/components/PropertyFilters/utils' function getOperatorSymbol(operator: PropertyOperator | null): string { if (!operator) { diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts index f5e310872f570..761c1f0f5e2ef 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts @@ -17,6 +17,7 @@ import type { sessionRecordingsPlaylistSceneLogicType } from './sessionRecording import { PINNED_RECORDINGS_LIMIT } from './sessionRecordingsPlaylistLogic' import api from 'lib/api' import { addRecordingToPlaylist, removeRecordingFromPlaylist } from '../player/utils/playerUtils' +import { Scene } from 'scenes/sceneTypes' export interface SessionRecordingsPlaylistLogicProps { shortId: string @@ -135,14 +136,17 @@ export const sessionRecordingsPlaylistSceneLogic = kea [s.playlist], (playlist): Breadcrumb[] => [ { + key: Scene.Replay, name: 'Replay', path: urls.replay(), }, { + key: ReplayTabs.Playlists, name: 'Playlists', path: urls.replay(ReplayTabs.Playlists), }, { + key: playlist?.short_id || 'new', name: playlist?.name || playlist?.derived_name || '(Untitled)', path: urls.replayPlaylist(playlist?.short_id || ''), }, diff --git a/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts b/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts index 10d58cdcb3d07..be26144e3d296 100644 --- a/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts +++ b/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts @@ -5,11 +5,12 @@ import { actionToUrl, router, urlToAction } from 'kea-router' import type { sessionRecordingsLogicType } from './sessionRecordingsLogicType' import { SESSION_RECORDINGS_PLAYLIST_FREE_COUNT } from 'lib/constants' import { capitalizeFirstLetter } from 'lib/utils' +import { Scene } from 'scenes/sceneTypes' export const humanFriendlyTabName = (tab: ReplayTabs): string => { switch (tab) { case ReplayTabs.Recent: - return 'Recent Recordings' + return 'Recent recordings' case ReplayTabs.Playlists: return 'Playlists' case ReplayTabs.FilePlayback: @@ -48,11 +49,13 @@ export const sessionRecordingsLogic = kea([ const breadcrumbs: Breadcrumb[] = [] if (tab !== ReplayTabs.Recent) { breadcrumbs.push({ + key: Scene.Replay, name: 'Replay', path: urls.replay(), }) } breadcrumbs.push({ + key: tab, name: humanFriendlyTabName(tab), }) diff --git a/frontend/src/scenes/settings/Settings.tsx b/frontend/src/scenes/settings/Settings.tsx index 80097630a0486..d4b99a7750532 100644 --- a/frontend/src/scenes/settings/Settings.tsx +++ b/frontend/src/scenes/settings/Settings.tsx @@ -1,8 +1,8 @@ import { LemonBanner, LemonButton, LemonDivider } from '@posthog/lemon-ui' import { IconChevronRight, IconLink } from 'lib/lemon-ui/icons' -import { SettingsLogicProps, settingsLogic } from './settingsLogic' +import { settingsLogic } from './settingsLogic' import { useActions, useValues } from 'kea' -import { SettingLevelIds } from './types' +import { SettingLevelIds, SettingsLogicProps } from './types' import clsx from 'clsx' import { capitalizeFirstLetter } from 'lib/utils' import { useResizeBreakpoints } from 'lib/hooks/useResizeObserver' diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx index 2d8304b40ca7f..a950609c6fa02 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx @@ -40,7 +40,7 @@ export function ConfigureSAMLModal(): JSX.Element { {`${siteUrl}/complete/saml/`} - {configureSAMLModalId ?? undefined} + {configureSAMLModalId || 'unknown'} {siteUrl} diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx index 548a89ad5ca5a..f62668714b831 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifiedDomains.tsx @@ -12,11 +12,11 @@ import { SSOSelect } from './SSOSelect' import { VerifyDomainModal } from './VerifyDomainModal' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { Link } from 'lib/lemon-ui/Link' -import { UPGRADE_LINK } from 'lib/constants' import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch/LemonSwitch' import { ConfigureSAMLModal } from './ConfigureSAMLModal' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { IconInfo } from '@posthog/icons' +import { urls } from 'scenes/urls' const iconStyle = { marginRight: 4, fontSize: '1.15em', paddingTop: 2 } @@ -140,11 +140,7 @@ function VerifiedDomainsTable(): JSX.Element { render: function SSOEnforcement(_, { sso_enforcement, is_verified, id, has_saml }, index) { if (!isSSOEnforcementAvailable) { return index === 0 ? ( - + Upgrade to enable SSO enforcement @@ -170,11 +166,7 @@ function VerifiedDomainsTable(): JSX.Element { render: function SAML(_, { is_verified, saml_acs_url, saml_entity_id, saml_x509_cert, has_saml }, index) { if (!isSAMLAvailable) { return index === 0 ? ( - + Upgrade to enable SAML ) : ( diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx index 6fcc0606652c1..f461bb2ef737b 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx @@ -51,9 +51,11 @@ export function VerifyDomainModal(): JSX.Element {
{domainBeingVerified?.verification_challenge}
- + {domainBeingVerified && ( + + )}
diff --git a/frontend/src/scenes/settings/settingsLogic.ts b/frontend/src/scenes/settings/settingsLogic.ts index b754950febf11..3b72938fd742c 100644 --- a/frontend/src/scenes/settings/settingsLogic.ts +++ b/frontend/src/scenes/settings/settingsLogic.ts @@ -3,21 +3,11 @@ import { SettingsMap } from './SettingsMap' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' -import { SettingSection, Setting, SettingSectionId, SettingLevelId, SettingId } from './types' +import { SettingSection, Setting, SettingSectionId, SettingLevelId, SettingId, SettingsLogicProps } from './types' import type { settingsLogicType } from './settingsLogicType' import { urls } from 'scenes/urls' -import { copyToClipboard } from 'lib/utils' - -export type SettingsLogicProps = { - logicKey?: string - // Optional - if given, renders only the given level - settingLevelId?: SettingLevelId - // Optional - if given, renders only the given section - sectionId?: SettingSectionId - // Optional - if given, renders only the given setting - settingId?: SettingId -} +import { copyToClipboard } from 'lib/utils/copyToClipboard' export const settingsLogic = kea([ props({} as SettingsLogicProps), diff --git a/frontend/src/scenes/settings/settingsSceneLogic.ts b/frontend/src/scenes/settings/settingsSceneLogic.ts index ecd2b85d06e3f..5fcc8bb4b9b18 100644 --- a/frontend/src/scenes/settings/settingsSceneLogic.ts +++ b/frontend/src/scenes/settings/settingsSceneLogic.ts @@ -10,6 +10,7 @@ import { SettingSectionId, SettingLevelId, SettingLevelIds } from './types' import type { settingsSceneLogicType } from './settingsSceneLogicType' import { settingsLogic } from './settingsLogic' +import { Scene } from 'scenes/sceneTypes' export const settingsSceneLogic = kea([ path(['scenes', 'settings', 'settingsSceneLogic']), @@ -28,10 +29,12 @@ export const settingsSceneLogic = kea([ (s) => [s.selectedLevel, s.selectedSectionId, s.sections], (selectedLevel, selectedSectionId): Breadcrumb[] => [ { + key: Scene.Settings, name: `Settings`, path: urls.settings('project'), }, { + key: selectedSectionId || selectedLevel, name: selectedSectionId ? SettingsMap.find((x) => x.id === selectedSectionId)?.title : capitalizeFirstLetter(selectedLevel), diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index 30ee8324d0ebe..038da8dd71126 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -1,5 +1,14 @@ -import { FEATURE_FLAGS } from 'lib/constants' -import { EitherMembershipLevel } from 'lib/utils/permissioning' +import { EitherMembershipLevel, FEATURE_FLAGS } from 'lib/constants' + +export type SettingsLogicProps = { + logicKey?: string + // Optional - if given, renders only the given level + settingLevelId?: SettingLevelId + // Optional - if given, renders only the given section + sectionId?: SettingSectionId + // Optional - if given, renders only the given setting + settingId?: SettingId +} export type SettingLevelId = 'user' | 'project' | 'organization' export const SettingLevelIds: SettingLevelId[] = ['project', 'organization', 'user'] diff --git a/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx b/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx index f9f79327e1ba4..aca059f1e1fd5 100644 --- a/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx +++ b/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx @@ -90,7 +90,9 @@ function PersonalAPIKeysTable(): JSX.Element { dataIndex: 'value', render: function RenderValue(value) { return value ? ( - {`${value}`} + + {String(value)} + ) : ( secret ) diff --git a/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts b/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts index 54314396d9ae8..4097d6997f895 100644 --- a/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts +++ b/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts @@ -3,7 +3,7 @@ import { kea, path, listeners } from 'kea' import api from 'lib/api' import { PersonalAPIKeyType } from '~/types' import type { personalAPIKeysLogicType } from './personalAPIKeysLogicType' -import { copyToClipboard } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' import { lemonToast } from 'lib/lemon-ui/lemonToast' export const personalAPIKeysLogic = kea([ diff --git a/frontend/src/scenes/sites/siteLogic.ts b/frontend/src/scenes/sites/siteLogic.ts index 7476f4eb51ab9..06ad0e13fdf98 100644 --- a/frontend/src/scenes/sites/siteLogic.ts +++ b/frontend/src/scenes/sites/siteLogic.ts @@ -2,6 +2,7 @@ import { kea, props, selectors, path } from 'kea' import { Breadcrumb } from '~/types' import type { siteLogicType } from './siteLogicType' +import { Scene } from 'scenes/sceneTypes' export interface SiteLogicProps { url: string @@ -15,9 +16,11 @@ export const siteLogic = kea([ (_, p) => [p.url], (url): Breadcrumb[] => [ { + key: Scene.Site, name: `Site`, }, { + key: url, name: url, }, ], diff --git a/frontend/src/scenes/surveys/SurveyTemplates.scss b/frontend/src/scenes/surveys/SurveyTemplates.scss index e0706c96e65df..c9622e7624689 100644 --- a/frontend/src/scenes/surveys/SurveyTemplates.scss +++ b/frontend/src/scenes/surveys/SurveyTemplates.scss @@ -1,17 +1,17 @@ @import '../../styles/mixins'; .SurveyTemplateContainer { - flex: 1; - display: flex; align-items: center; + background: var(--bg-light); + border-radius: var(--radius); border: 1px solid var(--border); - border-radius: 6px; + display: flex; + flex: 1; min-height: 300px; - margin-top: 2px; &:hover { cursor: pointer; - border-color: var(--primary-light); + border-color: var(--primary-3000-hover); } .SurveyTemplate { diff --git a/frontend/src/scenes/surveys/SurveyTemplates.tsx b/frontend/src/scenes/surveys/SurveyTemplates.tsx index 5eee9ba520d9a..759245b2f174e 100644 --- a/frontend/src/scenes/surveys/SurveyTemplates.tsx +++ b/frontend/src/scenes/surveys/SurveyTemplates.tsx @@ -28,7 +28,7 @@ export function SurveyTemplates(): JSX.Element { } /> -
+
{defaultSurveyTemplates.map((template, idx) => { return (
- + {showSurveysDisabledBanner ? ( ([ (s) => [s.survey], (survey: Survey): Breadcrumb[] => [ { + key: Scene.Surveys, name: 'Surveys', path: urls.surveys(), }, - ...(survey?.name ? [{ name: survey.name }] : []), + { key: survey?.id || 'new', name: survey.name }, ], ], dataTableQuery: [ diff --git a/frontend/src/scenes/surveys/surveyViewViz.tsx b/frontend/src/scenes/surveys/surveyViewViz.tsx index 0b5786ab2c109..a8fbec2df8fcd 100644 --- a/frontend/src/scenes/surveys/surveyViewViz.tsx +++ b/frontend/src/scenes/surveys/surveyViewViz.tsx @@ -483,8 +483,10 @@ export function OpenTextViz({ return (
-
- {event.properties[surveyResponseField]} +
+ {typeof event.properties[surveyResponseField] !== 'string' + ? JSON.stringify(event.properties[surveyResponseField]) + : event.properties[surveyResponseField]}
([ () => [], (): Breadcrumb[] => [ { + key: Scene.Surveys, name: 'Surveys', path: urls.surveys(), }, diff --git a/frontend/src/scenes/teamLogic.tsx b/frontend/src/scenes/teamLogic.tsx index 26fadbed4ed66..32f877f7ee5c7 100644 --- a/frontend/src/scenes/teamLogic.tsx +++ b/frontend/src/scenes/teamLogic.tsx @@ -1,5 +1,5 @@ import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' -import api from 'lib/api' +import api, { ApiConfig } from 'lib/api' import type { teamLogicType } from './teamLogicType' import { CorrelationConfigType, PropertyOperator, TeamPublicType, TeamType } from '~/types' import { userLogic } from './userLogic' @@ -206,6 +206,11 @@ export const teamLogic = kea([ ], })), listeners(({ actions }) => ({ + loadCurrentTeamSuccess: ({ currentTeam }) => { + if (currentTeam) { + ApiConfig.setCurrentTeamId(currentTeam.id) + } + }, createTeamSuccess: () => { organizationLogic.actions.loadCurrentOrganization() }, diff --git a/frontend/src/scenes/trends/trendsDataLogic.ts b/frontend/src/scenes/trends/trendsDataLogic.ts index 3c4571cbbdbe8..83b6996957d91 100644 --- a/frontend/src/scenes/trends/trendsDataLogic.ts +++ b/frontend/src/scenes/trends/trendsDataLogic.ts @@ -26,6 +26,7 @@ export const trendsDataLogic = kea([ 'interval', 'breakdown', 'showValueOnSeries', + 'showLabelOnSeries', 'showPercentStackView', 'supportsPercentStackView', 'trendsFilter', @@ -36,6 +37,7 @@ export const trendsDataLogic = kea([ 'isNonTimeSeriesDisplay', 'isSingleSeries', 'hasLegend', + 'vizSpecificOptions', ], ], actions: [insightVizDataLogic(props), ['setInsightData', 'updateInsightFilter']], @@ -55,7 +57,7 @@ export const trendsDataLogic = kea([ ], }), - selectors({ + selectors(({ values }) => ({ results: [ (s) => [s.insightData], (insightData: TrendAPIResponse | null): TrendResult[] => { @@ -129,7 +131,12 @@ export const trendsDataLogic = kea([ } }, ], - }), + + pieChartVizOptions: [ + () => [() => values.vizSpecificOptions], + (vizSpecificOptions) => vizSpecificOptions?.[ChartDisplayType.ActionsPie], + ], + })), listeners(({ actions, values }) => ({ loadMoreBreakdownValues: async () => { diff --git a/frontend/src/scenes/trends/viz/ActionsPie.tsx b/frontend/src/scenes/trends/viz/ActionsPie.tsx index 286f76650c55b..4b76cc9f56618 100644 --- a/frontend/src/scenes/trends/viz/ActionsPie.tsx +++ b/frontend/src/scenes/trends/viz/ActionsPie.tsx @@ -2,7 +2,7 @@ import './ActionsPie.scss' import { useState, useEffect } from 'react' import { getSeriesColor } from 'lib/colors' import { useValues } from 'kea' -import { ChartParams, GraphType, GraphDataset } from '~/types' +import { ChartParams, GraphType, GraphDataset, ChartDisplayType } from '~/types' import { insightLogic } from 'scenes/insights/insightLogic' import { formatAggregationAxisValue } from 'scenes/insights/aggregationAxisFormat' import { openPersonsModal } from '../persons-modal/PersonsModal' @@ -15,7 +15,12 @@ import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' import { formatBreakdownLabel } from 'scenes/insights/utils' import { trendsDataLogic } from '../trendsDataLogic' -export function ActionsPie({ inSharedMode, inCardView, showPersonsModal = true }: ChartParams): JSX.Element | null { +export function ActionsPie({ + inSharedMode, + inCardView, + showPersonsModal = true, + context, +}: ChartParams): JSX.Element | null { const [data, setData] = useState(null) const [total, setTotal] = useState(0) @@ -29,10 +34,16 @@ export function ActionsPie({ inSharedMode, inCardView, showPersonsModal = true } trendsFilter, formula, showValueOnSeries, + showLabelOnSeries, supportsPercentStackView, showPercentStackView, + pieChartVizOptions, } = useValues(trendsDataLogic(insightProps)) + const renderingMetadata = context?.chartRenderingMetadata?.[ChartDisplayType.ActionsPie] + + const showAggregation = !pieChartVizOptions?.hideAggregation + function updateData(): void { const _data = [...indexedResults].sort((a, b) => b.aggregated_value - a.aggregated_value) const days = _data.length > 0 ? _data[0].days : [] @@ -69,6 +80,27 @@ export function ActionsPie({ inSharedMode, inCardView, showPersonsModal = true } } }, [indexedResults, hiddenLegendKeys]) + const onClick = + renderingMetadata?.onSegmentClick || + (!showPersonsModal || formula + ? undefined + : (payload) => { + const { points, index, crossDataset } = payload + const dataset = points.referencePoint.dataset + const label = dataset.labels?.[index] + + const urls = urlsForDatasets(crossDataset, index) + const selectedUrl = urls[index]?.value + + if (selectedUrl) { + openPersonsModal({ + urls, + urlsIndex: index, + title: , + }) + } + }) + return data ? ( data[0] && data[0].labels ? (
@@ -86,33 +118,18 @@ export function ActionsPie({ inSharedMode, inCardView, showPersonsModal = true } trendsFilter={trendsFilter} formula={formula} showValueOnSeries={showValueOnSeries} + showLabelOnSeries={showLabelOnSeries} supportsPercentStackView={supportsPercentStackView} showPercentStackView={showPercentStackView} - onClick={ - !showPersonsModal || formula - ? undefined - : (payload) => { - const { points, index, crossDataset } = payload - const dataset = points.referencePoint.dataset - const label = dataset.labels?.[index] - - const urls = urlsForDatasets(crossDataset, index) - const selectedUrl = urls[index]?.value - - if (selectedUrl) { - openPersonsModal({ - urls, - urlsIndex: index, - title: , - }) - } - } - } + onClick={onClick} + disableHoverOffset={pieChartVizOptions?.disableHoverOffset} />
-

- {formatAggregationAxisValue(trendsFilter, total)} -

+ {showAggregation && ( +

+ {formatAggregationAxisValue(trendsFilter, total)} +

+ )}
{inCardView && trendsFilter?.show_legend && }
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index d84ac0cfa7473..51a66af2ff593 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -7,11 +7,11 @@ import { InsightShortId, ReplayTabs, PipelineTabs, + AppMetricsUrlParams, PipelineAppTabs, } from '~/types' import { combineUrl } from 'kea-router' import { ExportOptions } from '~/exporter/types' -import { AppMetricsUrlParams } from './apps/appMetricsSceneLogic' import { PluginTab } from './plugins/types' import { toParams } from 'lib/utils' import { SettingId, SettingLevelId, SettingSectionId } from './settings/types' diff --git a/frontend/src/scenes/userLogic.ts b/frontend/src/scenes/userLogic.ts index 8964fc32ab291..251eace6e5cd5 100644 --- a/frontend/src/scenes/userLogic.ts +++ b/frontend/src/scenes/userLogic.ts @@ -1,10 +1,9 @@ -import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' +import { actions, afterMount, kea, listeners, path, reducers, selectors } from 'kea' import api from 'lib/api' import type { userLogicType } from './userLogicType' import { AvailableFeature, OrganizationBasicType, ProductKey, UserType } from '~/types' import posthog from 'posthog-js' import { getAppContext } from 'lib/utils/getAppContext' -import { preflightLogic } from './PreflightCheck/preflightLogic' import { lemonToast } from 'lib/lemon-ui/lemonToast' import { loaders } from 'kea-loaders' import { forms } from 'kea-forms' @@ -17,9 +16,6 @@ export interface UserDetailsFormType { export const userLogic = kea([ path(['scenes', 'userLogic']), - connect({ - values: [preflightLogic, ['preflight']], - }), actions(() => ({ loadUser: (resetOnFailure?: boolean) => ({ resetOnFailure }), updateCurrentTeam: (teamId: number, destination?: string) => ({ teamId, destination }), diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx index fc20cfbf9d691..ff4ac124db98a 100644 --- a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx @@ -2,11 +2,11 @@ import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleCompo import { DataTableNode, InsightVizNode, NodeKind, WebStatsBreakdown } from '~/queries/schema' import { UnexpectedNeverError } from 'lib/utils' import { useActions, useValues } from 'kea' -import { GeographyTab, webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' +import { DeviceTab, GeographyTab, webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' import { useCallback, useMemo } from 'react' import { Query } from '~/queries/Query/Query' import { countryCodeToFlag, countryCodeToName } from 'scenes/insights/views/WorldMap' -import { PropertyFilterType } from '~/types' +import { GraphPointPayload, PropertyFilterType } from '~/types' import { ChartDisplayType } from '~/types' const PercentageCell: QueryContextColumnComponent = ({ value }) => { @@ -173,9 +173,12 @@ export const webAnalyticsDataTableQueryContext: QueryContext = { } export const WebStatsTrendTile = ({ query }: { query: InsightVizNode }): JSX.Element => { - const { togglePropertyFilter, setGeographyTab } = useActions(webAnalyticsLogic) - const { hasCountryFilter } = useValues(webAnalyticsLogic) + const { togglePropertyFilter, setGeographyTab, setDeviceTab } = useActions(webAnalyticsLogic) + const { hasCountryFilter, deviceTab, hasDeviceTypeFilter, hasBrowserFilter, hasOSFilter } = + useValues(webAnalyticsLogic) const { key: worldMapPropertyName } = webStatsBreakdownToPropertyName(WebStatsBreakdown.Country) + const { key: deviceTypePropertyName } = webStatsBreakdownToPropertyName(WebStatsBreakdown.DeviceType) + const onWorldMapClick = useCallback( (breakdownValue: string) => { togglePropertyFilter(PropertyFilterType.Event, worldMapPropertyName, breakdownValue) @@ -187,6 +190,33 @@ export const WebStatsTrendTile = ({ query }: { query: InsightVizNode }): JSX.Ele [togglePropertyFilter, worldMapPropertyName] ) + const onDeviceTilePieChartClick = useCallback( + (graphPoint: GraphPointPayload) => { + if (graphPoint.seriesId == null) { + return + } + const dataset = graphPoint.crossDataset?.[graphPoint.seriesId] + if (!dataset) { + return + } + const breakdownValue = dataset.breakdownValues?.[graphPoint.index] + if (!breakdownValue) { + return + } + togglePropertyFilter(PropertyFilterType.Event, deviceTypePropertyName, breakdownValue) + + // switch to a different tab if we can, try them in this order: DeviceType Browser OS + if (deviceTab !== DeviceTab.DEVICE_TYPE && !hasDeviceTypeFilter) { + setDeviceTab(DeviceTab.DEVICE_TYPE) + } else if (deviceTab !== DeviceTab.BROWSER && !hasBrowserFilter) { + setDeviceTab(DeviceTab.BROWSER) + } else if (deviceTab !== DeviceTab.OS && !hasOSFilter) { + setDeviceTab(DeviceTab.OS) + } + }, + [togglePropertyFilter, deviceTypePropertyName, deviceTab, hasDeviceTypeFilter, hasBrowserFilter, hasOSFilter] + ) + const context = useMemo((): QueryContext => { return { ...webAnalyticsDataTableQueryContext, @@ -201,6 +231,9 @@ export const WebStatsTrendTile = ({ query }: { query: InsightVizNode }): JSX.Ele } }, }, + [ChartDisplayType.ActionsPie]: { + onSegmentClick: onDeviceTilePieChartClick, + }, }, } }, [onWorldMapClick]) diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx index 1a01cd4051ab5..830183d3ea037 100644 --- a/frontend/src/scenes/web-analytics/WebDashboard.tsx +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -152,7 +152,7 @@ const WebQuery = ({ query }: { query: QuerySchema }): JSX.Element => { export const WebAnalyticsDashboard = (): JSX.Element => { return ( -
+
diff --git a/frontend/src/scenes/web-analytics/WebTabs.tsx b/frontend/src/scenes/web-analytics/WebTabs.tsx index d96e72e9e5746..ede21aab56cb9 100644 --- a/frontend/src/scenes/web-analytics/WebTabs.tsx +++ b/frontend/src/scenes/web-analytics/WebTabs.tsx @@ -59,7 +59,7 @@ export const WebTabs = ({
-
{activeTab?.content}
+
{activeTab?.content}
) } diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index 43a3740b04bff..f8f2284f26eed 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -184,7 +184,7 @@ export const webAnalyticsLogic = kea([ }, ], deviceTab: [ - DeviceTab.BROWSER as string, + DeviceTab.DEVICE_TYPE as string, { setDeviceTab: (_, { tab }) => tab, }, @@ -449,6 +449,39 @@ export const webAnalyticsLogic = kea([ activeTabId: deviceTab, setTabId: actions.setDeviceTab, tabs: [ + { + id: DeviceTab.DEVICE_TYPE, + title: 'Top Device Types', + linkText: 'Device Type', + query: { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + breakdown: { breakdown: '$device_type', breakdown_type: 'event' }, + dateRange, + series: [ + { + event: '$pageview', + kind: NodeKind.EventsNode, + math: BaseMathType.UniqueUsers, + }, + ], + trendsFilter: { + display: ChartDisplayType.ActionsPie, + show_labels_on_series: true, + }, + filterTestAccounts: true, + properties: webAnalyticsFilters, + }, + hidePersonsModal: true, + vizSpecificOptions: { + [ChartDisplayType.ActionsPie]: { + disableHoverOffset: true, + hideAggregation: true, + }, + }, + }, + }, { id: DeviceTab.BROWSER, title: 'Top browsers', @@ -479,21 +512,6 @@ export const webAnalyticsLogic = kea([ }, }, }, - { - id: DeviceTab.DEVICE_TYPE, - title: 'Top device types', - linkText: 'Device type', - query: { - full: true, - kind: NodeKind.DataTableNode, - source: { - kind: NodeKind.WebStatsTableQuery, - properties: webAnalyticsFilters, - breakdownBy: WebStatsBreakdown.DeviceType, - dateRange, - }, - }, - }, ], }, { @@ -619,6 +637,24 @@ export const webAnalyticsLogic = kea([ return webAnalyticsFilters.some((filter) => filter.key === '$geoip_country_code') }, ], + hasDeviceTypeFilter: [ + (s) => [s.webAnalyticsFilters], + (webAnalyticsFilters: WebAnalyticsPropertyFilters) => { + return webAnalyticsFilters.some((filter) => filter.key === '$device_type') + }, + ], + hasBrowserFilter: [ + (s) => [s.webAnalyticsFilters], + (webAnalyticsFilters: WebAnalyticsPropertyFilters) => { + return webAnalyticsFilters.some((filter) => filter.key === '$browser') + }, + ], + hasOSFilter: [ + (s) => [s.webAnalyticsFilters], + (webAnalyticsFilters: WebAnalyticsPropertyFilters) => { + return webAnalyticsFilters.some((filter) => filter.key === '$os') + }, + ], })), loaders(() => ({ // load the status check query here and pass the response into the component, so the response @@ -661,12 +697,38 @@ export const webAnalyticsLogic = kea([ shouldShowGeographyTile: { _default: null as boolean | null, loadShouldShowGeographyTile: async (): Promise => { - const response = await api.propertyDefinitions.list({ - event_names: ['$pageview'], - properties: ['$geoip_country_code'], - }) - const countryCodeDefinition = response.results.find((r) => r.name === '$geoip_country_code') - return !!countryCodeDefinition && !isDefinitionStale(countryCodeDefinition) + const [propertiesResponse, pluginsResponse, pluginsConfigResponse] = await Promise.allSettled([ + api.propertyDefinitions.list({ + event_names: ['$pageview'], + properties: ['$geoip_country_code'], + }), + api.loadPaginatedResults('api/organizations/@current/plugins'), + api.loadPaginatedResults('api/plugin_config'), + ]) + + const hasNonStaleCountryCodeDefinition = + propertiesResponse.status === 'fulfilled' && + propertiesResponse.value.results.some( + (property) => property.name === '$geoip_country_code' && !isDefinitionStale(property) + ) + + if (!hasNonStaleCountryCodeDefinition) { + return false + } + + const geoIpPlugin = + pluginsResponse.status === 'fulfilled' && + pluginsResponse.value.find( + (plugin) => plugin.url === 'https://www.npmjs.com/package/@posthog/geoip-plugin' + ) + const geoIpPluginId = geoIpPlugin ? geoIpPlugin.id : undefined + + const geoIpPluginConfig = + isNotNil(geoIpPluginId) && + pluginsConfigResponse.status === 'fulfilled' && + pluginsConfigResponse.value.find((plugin) => plugin.id === geoIpPluginId) + + return !!geoIpPluginConfig && geoIpPluginConfig.enabled }, }, })), diff --git a/frontend/src/styles/fonts.scss b/frontend/src/styles/fonts.scss index 9633ed07675ee..60443636e9b6b 100644 --- a/frontend/src/styles/fonts.scss +++ b/frontend/src/styles/fonts.scss @@ -29,3 +29,39 @@ unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; } + +/* Matter; bold (800); latin */ +@font-face { + font-family: MatterSQ; + font-style: normal; + font-weight: 800; + font-display: swap; + + // src: url('../../public/MatterSQ-Bold.woff2') format('woff2'), url('../../public/MatterSQ-Bold.woff') format('woff'); + src: url('https://d1sdjtjk6xzm7.cloudfront.net/MatterSQ-Bold.woff2') format('woff2'), + url('https://d1sdjtjk6xzm7.cloudfront.net/MatterSQ-Bold.woff') format('woff'); +} + +/* Matter; bold (700); latin */ +@font-face { + font-family: MatterSQ; + font-style: normal; + font-weight: 700; + font-display: swap; + + // src: url('../../public/MatterSQ-SemiBold.woff2') format('woff2'), url('../../public/MatterSQ-SemiBold.woff') format('woff'); + src: url('https://d1sdjtjk6xzm7.cloudfront.net/MatterSQ-SemiBold.woff2') format('woff2'), + url('https://d1sdjtjk6xzm7.cloudfront.net/MatterSQ-SemiBold.woff') format('woff'); +} + +/* Matter; medium (500); latin */ +@font-face { + font-family: MatterSQ; + font-style: normal; + font-weight: 500; + font-display: swap; + + // src: url('../../public/MatterSQ-Medium.woff2') format('woff2'), url('../../public/MatterSQ-Medium.woff') format('woff'); + src: url('https://d1sdjtjk6xzm7.cloudfront.net/public/MatterSQ-Medium.woff2') format('woff2'), + url('https://d1sdjtjk6xzm7.cloudfront.net/public/MatterSQ-Medium.woff') format('woff'); +} diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss index fe3c66cd4b75b..69d3d66238b01 100644 --- a/frontend/src/styles/global.scss +++ b/frontend/src/styles/global.scss @@ -395,7 +395,7 @@ input::-ms-clear { cursor: pointer; div:nth-child(1) { - background: var(--primary); + background: var(--primary-3000); } div:nth-child(2) { @@ -526,10 +526,13 @@ input::-ms-clear { body { // Until we have 3000 rolled out we fallback to standard colors --text-3000: var(--default); + --text-secondary-3000: var(--text-secondary); --muted-3000: var(--muted); --trace-3000: var(--trace-3000-light); --primary-3000: var(--primary); + --primary-3000-highlight: var(--primary-highlight); --primary-3000-hover: var(--primary-light); + --primary-3000-active: var(--primary-dark); --secondary-3000: var(--secondary); --secondary-3000-hover: var(--secondary-light); --accent-3000: var(--side); @@ -539,16 +542,20 @@ body { --glass-bg-3000: var(--bg-light); --glass-border-3000: var(--border); --bg-light: #fff; - --link: var(--primary); + --bg-table: var(--bg-light); + --link: var(--primary-3000); touch-action: manipulation; // Disable double-tap-to-zoom on mobile, making taps slightly snappier &.posthog-3000[theme='light'] { --text-3000: var(--text-3000-light); + --text-secondary-3000: var(--text-secondary-3000-light); --muted-3000: var(--muted-3000-light); --trace-3000: var(--trace-3000-light); --primary-3000: var(--primary-3000-light); + --primary-3000-highlight: var(--primary-3000-highlight-light); --primary-3000-hover: var(--primary-3000-hover-light); + --primary-3000-active: var(--primary-3000-active-light); --secondary-3000: var(--secondary-3000-light); --secondary-3000-hover: var(--secondary-3000-hover-light); --accent-3000: var(--accent-3000-light); @@ -558,6 +565,7 @@ body { --glass-bg-3000: var(--glass-bg-3000-light); --glass-border-3000: var(--glass-border-3000-light); --bg-light: #fff; + --bg-table: #f9faf7; --link: var(--link-3000-light); --shadow-elevation-3000: var(--shadow-elevation-3000-light); --primary-3000-frame-bg: var(--primary-3000-frame-bg-light); @@ -565,6 +573,7 @@ body { --primary-3000-button-bg: var(--primary-3000-button-bg-light); --primary-3000-button-border: var(--primary-3000-button-border-light); --primary-3000-button-border-hover: var(--primary-3000-button-border-hover-light); + --primary-alt-highlight-3000: var(--primary-alt-highlight-3000-light); --secondary-3000-frame-bg: var(--secondary-3000-frame-bg-light); --secondary-3000-frame-border: var(--secondary-3000-frame-border-light); --secondary-3000-button-bg: var(--secondary-3000-button-bg-light); @@ -574,10 +583,13 @@ body { &.posthog-3000[theme='dark'] { --text-3000: var(--text-3000-dark); + --text-secondary-3000: var(--text-secondary-3000-dark); --muted-3000: var(--muted-3000-dark); --trace-3000: var(--trace-3000-dark); --primary-3000: var(--primary-3000-dark); + --primary-3000-highlight: var(--primary-3000-highlight-dark); --primary-3000-hover: var(--primary-3000-hover-dark); + --primary-3000-active: var(--primary-3000-active-dark); --secondary-3000: var(--secondary-3000-dark); --secondary-3000-hover: var(--secondary-3000-hover-dark); --accent-3000: var(--accent-3000-dark); @@ -587,6 +599,7 @@ body { --glass-bg-3000: var(--glass-bg-3000-dark); --glass-border-3000: var(--glass-border-3000-dark); --bg-light: var(--accent-3000); + --bg-table: #232429; --brand-key: #fff; // In dark mode the black in PostHog's logo is replaced with white for proper contrast --link: var(--link-3000-dark); --shadow-elevation-3000: var(--shadow-elevation-3000-dark); @@ -595,15 +608,23 @@ body { --primary-3000-button-bg: var(--primary-3000-button-bg-dark); --primary-3000-button-border: var(--primary-3000-button-border-dark); --primary-3000-button-border-hover: var(--primary-3000-button-border-hover-dark); + --primary-alt-highlight-3000: var(--primary-alt-highlight-3000-dark); --secondary-3000-frame-bg: var(--secondary-3000-frame-bg-dark); --secondary-3000-frame-border: var(--secondary-3000-frame-border-dark); --secondary-3000-button-bg: var(--secondary-3000-button-bg-dark); --secondary-3000-button-border: var(--secondary-3000-button-border-dark); --secondary-3000-button-border-hover: var(--secondary-3000-button-border-hover-dark); + --data-color-2: #7f26d9; + --data-color-3: #3e7a76; + --data-color-4: #bf0d6c; + --data-color-5: #f0474f; + --data-color-6: #b36114; + --data-color-10: #6576b3; } &.posthog-3000 { --default: var(--text-3000); + --text-secondary: var(--text-secondary-3000); --muted: var(--muted-3000); --muted-alt: var(--muted-3000); --primary-alt: var(--text-3000); @@ -611,10 +632,28 @@ body { --border-bold: var(--border-bold-3000); --mid: var(--bg-3000); --side: var(--bg-3000); + --primary-alt-highlight: var(--primary-alt-highlight-3000); + --data-color-1: #1d4aff; + --data-color-10: #35416b; background: var(--bg-3000); overflow: hidden; // Each area handles scrolling individually (e.g. navbar, scene, side panel) + .LemonButton, + .Link { + .text-link { + color: var(--text-3000); + } + + &:hover { + .text-link { + color: var(--primary-3000); + } + } + } + + --shadow-elevation: var(--shadow-elevation-3000); + * > { ::-webkit-scrollbar { width: 0.5rem; @@ -634,6 +673,13 @@ body { } } } + + h1, + h2, + h3, + h4 { + font-family: var(--font-title); + } } h1, diff --git a/frontend/src/styles/vars.scss b/frontend/src/styles/vars.scss index 1bc41d70b2014..fe7b07bb77ffa 100644 --- a/frontend/src/styles/vars.scss +++ b/frontend/src/styles/vars.scss @@ -103,7 +103,7 @@ $colors: ( 'border-bold': rgb(0 0 0 / 24%), 'border-active': rgb(0 0 0 / 36%), 'transparent': transparent, - 'link': var(--link), + 'link': var(--primary-3000), // Colors of the PostHog logo 'brand-blue': #1d4aff, 'brand-red': #f54e00, @@ -113,19 +113,25 @@ $colors: ( // This becomes white in dark mode // PostHog 3000 'text-3000-light': #111, + 'text-secondary-3000-light': rgba(#111, 0.7), 'muted-3000-light': rgba(#111, 0.5), 'trace-3000-light': rgba(#111, 0.25), - 'primary-3000-light': rgba(#000, 0.75), - 'primary-3000-hover-light': #000, + 'primary-3000-light': #f54e01, + 'primary-3000-highlight-light': rgba(#f54e01, 0.1), + 'primary-3000-hover-light': #f54e01, + 'primary-3000-active-light': #f54e01, + 'secondary-3000-light': rgba(#cfd1c2, 0.6), 'secondary-3000-hover-light': #cfd1c2, 'accent-3000-light': #eeefe9, 'bg-3000-light': #f3f4ef, + 'bg-hover-3000-light': #f3f4ef, 'border-3000-light': #dadbd2, 'border-bold-3000-light': #c1c2b9, 'glass-bg-3000-light': #e4e5deb3, 'glass-border-3000-light': #e4e5de, - 'link-3000-light': var(--primary), + + 'link-3000-light': #f54e00, 'primary-3000-frame-bg-light': #eb9d2a, 'primary-3000-frame-border-light': #c28926, 'primary-3000-button-bg-light': #fff, @@ -141,24 +147,32 @@ $colors: ( 'shadow-elevation-3000-light': 0 2px 0 var(--border-3000-light), 'shadow-elevation-3000-dark': 0 2px 0 var(--border-3000-dark), 'text-3000-dark': #fff, + 'text-secondary-3000-dark': rgba(#fff, 0.7), 'muted-3000-dark': rgba(#fff, 0.5), 'trace-3000-dark': rgba(#fff, 0.25), - 'primary-3000-dark': var(--primary), - 'primary-3000-hover-dark': var(--primary-light), + 'primary-3000-dark': #f7a503, + 'primary-3000-highlight-dark': rgba(#f7a503, 0.1), + 'primary-3000-hover-dark': #f7a503, + 'primary-3000-active-dark': #f7a503, + 'primary-alt-highlight-3000-light': #e5e7e0, + 'secondary-3000-dark': #1d1f27, 'secondary-3000-hover-dark': #575d77, - 'accent-3000-dark': #1d1f27, - 'bg-3000-dark': #151619, - 'border-3000-dark': #2b2c32, + 'accent-3000-dark': #21242b, + 'bg-3000-dark': #1d1f27, + 'bg-hover-3000-dark': #292b36, + 'border-3000-dark': #35373e, 'border-bold-3000-dark': #3f4046, - 'glass-bg-3000-dark': #1d1f27b3, + 'glass-bg-3000-dark': #21242bb3, 'glass-border-3000-dark': var(--border-3000-dark), - 'link-3000-dark': rgb(47 129 247), + 'link-3000-dark': #f1a82c, + 'primary-3000-frame-bg-dark': #926826, 'primary-3000-frame-border-dark': #a97a2f, 'primary-3000-button-bg-dark': #e0a045, 'primary-3000-button-border-dark': #b17816, 'primary-3000-button-border-hover-dark': #8e5b03, + 'primary-alt-highlight-3000-dark': #232429, 'secondary-3000-frame-bg-dark': #323232, 'secondary-3000-frame-border-dark': #383838, @@ -168,6 +182,7 @@ $colors: ( // The derived colors 'text-3000': var(--text-3000), + 'text-secondary-3000': var(--text-secondary-3000), 'muted-3000': var(--muted-3000), 'trace-3000': var(--trace-3000), 'primary-3000': var(--primary-3000), @@ -176,10 +191,11 @@ $colors: ( 'secondary-3000-hover': var(--secondary-3000-hover), 'accent-3000': var(--accent-3000), 'bg-3000': var(--bg-3000), + 'bg-hover-3000': var(--bg-hover-3000), 'border-3000': var(--border-3000), 'border-bold-3000': var(--border-bold-3000), 'glass-bg-3000': var(--glass-bg-3000), - 'glass-border-3000': var(--glass-border-3000), + 'glass-border-3000': var(--border-3000), 'link-3000': var(--link-3000), // 'bg-light': var(--accent-3000), 'primary-3000-frame-bg': var(--primary-3000-frame-bg), @@ -227,6 +243,8 @@ $_lifecycle_dormant: $_danger; --font-semibold: 600; --font-sans: -apple-system, blinkmacsystemfont, 'Inter', 'Segoe UI', 'Roboto', 'Helvetica Neue', helvetica, arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; + --font-title: 'MatterSQ', -apple-system, blinkmacsystemfont, 'Inter', 'Segoe UI', 'Roboto', 'Helvetica Neue', + helvetica, arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; --font-mono: ui-monospace, 'SFMono-Regular', 'SF Mono', 'Menlo', 'Consolas', 'Liberation Mono', monospace; // Dashboard item colors @@ -238,21 +256,22 @@ $_lifecycle_dormant: $_danger; // Tag colors --purple-light: #dcb1e3; - // Data colors (e.g. insight series). Note: colors.ts relies on these values being hexadecimal - --data-brand-blue: var(--primary); - --data-purple: #621da6; - --data-viridian: #42827e; - --data-magenta: #ce0e74; - --data-vermilion: #f14f58; - --data-brown: #7c440e; - --data-green: #529a0a; - --data-blue: #0476fb; - --data-pink: #fe729e; - --data-navy: var(--primary-alt); - --data-turquoise: #41cbc4; - --data-brick: #b64b02; - --data-yellow: #e4a604; - --data-lilac: #a56eff; + //// Data colors (e.g. insight series). Note: colors.ts relies on these values being hexadecimal + --data-color-1: #1d4aff; + --data-color-2: #621da6; + --data-color-3: #42827e; + --data-color-4: #ce0e74; + --data-color-5: #f14f58; + --data-color-6: #7c440e; + --data-color-7: #529a0a; + --data-color-8: #0476fb; + --data-color-9: #fe729e; + --data-color-10: #35416b; + --data-color-11: #41cbc4; + --data-color-12: #b64b02; + --data-color-13: #e4a604; + --data-color-14: #a56eff; + --data-color-15: #30d5c8; // Lifecycle series --lifecycle-new: #{$_lifecycle_new}; @@ -266,7 +285,6 @@ $_lifecycle_dormant: $_danger; // Funnels // TODO: unify with lib/colors.ts, getGraphColors() - --funnel-default: var(--primary); --funnel-background: var(--border-light); --funnel-axis: var(--border); --funnel-grid: #ddd; @@ -279,7 +297,7 @@ $_lifecycle_dormant: $_danger; --recording-seekbar-red: var(--brand-red); --recording-hover-event: var(--primary-bg-hover); --recording-hover-event-mid: var(--primary-bg-active); - --recording-hover-event-dark: var(--primary); + --recording-hover-event-dark: var(--primary-3000); --recording-current-event: #eef2ff; --recording-current-event-dark: var(--primary-alt); --recording-failure-event: #fee9e2; @@ -317,7 +335,7 @@ $_lifecycle_dormant: $_danger; // which means they aren't available in the toolbar --toastify-color-dark: var(--accent-3000-dark); --toastify-color-light: var(--bg-light); - --toastify-color-info: var(--primary); + --toastify-color-info: var(--primary-3000); --toastify-color-success: var(--success); --toastify-color-warning: var(--warning); --toastify-color-error: var(--danger); diff --git a/frontend/src/test/init.ts b/frontend/src/test/init.ts index dc896f740e8c9..32e4e2b7d8110 100644 --- a/frontend/src/test/init.ts +++ b/frontend/src/test/init.ts @@ -7,6 +7,7 @@ import { MOCK_DEFAULT_TEAM } from 'lib/api.mock' import { dayjs } from 'lib/dayjs' import { organizationLogic } from 'scenes/organizationLogic' import { teamLogic } from 'scenes/teamLogic' +import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' process.on('unhandledRejection', (err) => { console.warn(err) @@ -33,6 +34,7 @@ export function initKeaTests(mountCommonLogic = true, teamForWindowContext: Team ;(history as any).replaceState = history.replace initKea({ beforePlugins: [testUtilsPlugin], routerLocation: history.location, routerHistory: history }) if (mountCommonLogic) { + preflightLogic.mount() teamLogic.mount() organizationLogic.mount() } diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 27f80e6b98922..1273d98ef07e7 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -279,6 +279,8 @@ export interface ExplicitTeamMemberType extends BaseMemberType { effective_level: OrganizationMembershipLevel } +export type EitherMemberType = OrganizationMemberType | ExplicitTeamMemberType + /** * While OrganizationMemberType and ExplicitTeamMemberType refer to actual Django models, * this interface is only used in the frontend for fusing the data from these models together. @@ -1149,6 +1151,10 @@ export interface PerformanceEvent { request_body?: Body response_body?: Body method?: string + + //rrweb/network@1 - i.e. not in ClickHouse table + is_initial?: boolean + raw?: Record } export interface CurrentBillCycleType { @@ -1752,6 +1758,7 @@ export interface TrendsFilterType extends FilterType { aggregation_axis_prefix?: string // a prefix to add to the aggregation axis e.g. £ aggregation_axis_postfix?: string // a postfix to add to the aggregation axis e.g. % show_values_on_series?: boolean + show_labels_on_series?: boolean show_percent_stack_view?: boolean } @@ -2690,6 +2697,11 @@ export interface KeyMapping { system?: boolean } +export interface KeyMappingInterface { + event: Record + element: Record +} + export interface TileParams { title: string targetPath: string @@ -2753,16 +2765,30 @@ export interface DateMappingOption { defaultInterval?: IntervalType } -export interface Breadcrumb { +interface BreadcrumbBase { + /** E.g. scene identifier or item ID. Particularly important if `onRename` is used. */ + key: string | number /** Name to display. */ name: string | null | undefined /** Symbol, e.g. a lettermark or a profile picture. */ symbol?: React.ReactNode - /** Path to link to. */ - path?: string /** Whether to show a custom popover */ popover?: Pick } +interface LinkBreadcrumb extends BreadcrumbBase { + /** Path to link to. */ + path?: string + onRename?: never +} +interface RenamableBreadcrumb extends BreadcrumbBase { + path?: never + /** When this is set, an "Edit" button shows up next to the title */ + onRename?: (newName: string) => Promise +} +export type Breadcrumb = LinkBreadcrumb | RenamableBreadcrumb +export type FinalizedBreadcrumb = + | (LinkBreadcrumb & { globalKey: string }) + | (RenamableBreadcrumb & { globalKey: string }) export enum GraphType { Bar = 'bar', @@ -3396,6 +3422,23 @@ export enum SDKTag { export type SDKInstructionsMap = Partial> +export interface AppMetricsUrlParams { + tab?: AppMetricsTab + from?: string + error?: [string, string] +} + +export enum AppMetricsTab { + Logs = 'logs', + ProcessEvent = 'processEvent', + OnEvent = 'onEvent', + ComposeWebhook = 'composeWebhook', + ExportEvents = 'exportEvents', + ScheduledTask = 'scheduledTask', + HistoricalExports = 'historical_exports', + History = 'history', +} + export enum SidePanelTab { Notebooks = 'notebook', Support = 'support', diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 7ad1758c3c617..27497f398a6fa 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0364_team_external_data_workspace_rows +posthog: 0366_alter_action_created_by sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/package.json b/package.json index c99c80f987667..e510b90210c08 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ "build:esbuild": "node frontend/build.mjs", "schema:build": "pnpm run schema:build:json && pnpm run schema:build:python", "schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'frontend/src/queries/schema.ts' --no-type-check > frontend/src/queries/schema.json && prettier --write frontend/src/queries/schema.json", - "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", + "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", "grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp", "grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4", "grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4", @@ -76,7 +76,7 @@ "@medv/finder": "^2.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.4.6", - "@posthog/icons": "0.2.0", + "@posthog/icons": "0.4.10", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", "@rrweb/types": "^2.0.0-alpha.11", @@ -136,7 +136,7 @@ "monaco-editor": "^0.39.0", "papaparse": "^5.4.1", "pmtiles": "^2.11.0", - "posthog-js": "1.91.1", + "posthog-js": "1.92.1", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -311,7 +311,7 @@ ], "!(posthog/hogql/grammar/*)*.{py,pyi}": [ "ruff format", - "ruff check" + "ruff check --fix" ] }, "browserslist": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5369926d5c3d6..bb52460b7206a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.0' +lockfileVersion: '6.1' settings: autoInstallPeers: true @@ -36,8 +36,8 @@ dependencies: specifier: 4.4.6 version: 4.4.6(monaco-editor@0.39.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/icons': - specifier: 0.2.0 - version: 0.2.0(react-dom@18.2.0)(react@18.2.0) + specifier: 0.4.10 + version: 0.4.10(react-dom@18.2.0)(react@18.2.0) '@posthog/plugin-scaffold': specifier: ^1.4.4 version: 1.4.4 @@ -216,8 +216,8 @@ dependencies: specifier: ^2.11.0 version: 2.11.0 posthog-js: - specifier: 1.91.1 - version: 1.91.1 + specifier: 1.92.1 + version: 1.92.1 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -958,6 +958,11 @@ packages: resolution: {integrity: sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==} engines: {node: '>=6.9.0'} + /@babel/helper-string-parser@7.23.4: + resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-validator-identifier@7.22.20: resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} engines: {node: '>=6.9.0'} @@ -1000,12 +1005,12 @@ packages: dependencies: '@babel/types': 7.23.0 - /@babel/parser@7.23.3: - resolution: {integrity: sha512-uVsWNvlVsIninV2prNz/3lHCb+5CJ+e+IUBfbjToAHODtfGYLfCFuY4AU7TskI+dAKk+njsPiBjq1gKTvZOBaw==} + /@babel/parser@7.23.4: + resolution: {integrity: sha512-vf3Xna6UEprW+7t6EtOmFpHNAuxw3xqPZghy+brsnusscJRW5BMUzzHZc5ICjULee81WeUV2jjakG09MDglJXQ==} engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.23.3 + '@babel/types': 7.23.4 dev: true /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10): @@ -2122,11 +2127,11 @@ packages: '@babel/helper-validator-identifier': 7.22.20 to-fast-properties: 2.0.0 - /@babel/types@7.23.3: - resolution: {integrity: sha512-OZnvoH2l8PK5eUvEcUyCt/sXgr/h+UWpVuBbOljwcrAgUl6lpchoQ++PHGyQy1AtYnVA6CEq3y5xeEI10brpXw==} + /@babel/types@7.23.4: + resolution: {integrity: sha512-7uIFwVYpoplT5jp/kVv6EF93VaJ8H+Yn5IczYiaAi98ajzjfoZfslet/e0sLh+wVBjb2qqIut1b0S26VSafsSQ==} engines: {node: '>=6.9.0'} dependencies: - '@babel/helper-string-parser': 7.22.5 + '@babel/helper-string-parser': 7.23.4 '@babel/helper-validator-identifier': 7.22.20 to-fast-properties: 2.0.0 dev: true @@ -3417,8 +3422,8 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/icons@0.2.0(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-ftFoIropSJaFbxzzt1eGOgJCsbK0+L5KDdxKcpbhl4nMbmCEI/awzj98l+0pp/JAJzDrAsqEou7MvdJrntOGbw==} + /@posthog/icons@0.4.10(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-92/pvHxVSWpNri8XoT9cfLfzf7RRvYGn8qMM6vUhMwkebBiurg8/oQHY1rZ0GcKLvCvzyAtgIr4o/N7ma9kWlQ==} peerDependencies: react: '>=16.14.0' react-dom: '>=16.14.0' @@ -5896,8 +5901,8 @@ packages: /@types/babel__core@7.20.4: resolution: {integrity: sha512-mLnSC22IC4vcWiuObSRjrLd9XcBTGf59vUSoq2jkQDJ/QQ8PMI9rSuzE+aEV8karUMbskw07bKYoUJCKTUaygg==} dependencies: - '@babel/parser': 7.23.3 - '@babel/types': 7.23.3 + '@babel/parser': 7.23.4 + '@babel/types': 7.23.4 '@types/babel__generator': 7.6.7 '@types/babel__template': 7.4.4 '@types/babel__traverse': 7.20.4 @@ -5912,7 +5917,7 @@ packages: /@types/babel__generator@7.6.7: resolution: {integrity: sha512-6Sfsq+EaaLrw4RmdFWE9Onp63TOUue71AWb4Gpa6JxzgTYtimbM086WnYTy2U67AofR++QKCo08ZP6pwx8YFHQ==} dependencies: - '@babel/types': 7.23.3 + '@babel/types': 7.23.4 dev: true /@types/babel__template@7.4.3: @@ -5925,8 +5930,8 @@ packages: /@types/babel__template@7.4.4: resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} dependencies: - '@babel/parser': 7.23.3 - '@babel/types': 7.23.3 + '@babel/parser': 7.23.4 + '@babel/types': 7.23.4 dev: true /@types/babel__traverse@7.20.3: @@ -5938,7 +5943,7 @@ packages: /@types/babel__traverse@7.20.4: resolution: {integrity: sha512-mSM/iKUk5fDDrEV/e83qY+Cr3I1+Q3qqTuEn++HAWYjEa1+NxZr6CNrcJGf2ZTnq4HoFGC3zaTPZTobCzCFukA==} dependencies: - '@babel/types': 7.23.3 + '@babel/types': 7.23.4 dev: true /@types/body-parser@1.19.4: @@ -15782,8 +15787,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.91.1: - resolution: {integrity: sha512-Pj4mqCT8p4JdEXOwdZ1lNFU4W8a+Uv7zZs3FIBvvFnnXcMak8Fr8ns6RTTdWo3UQqZGD7iuarYcwTYI8E5UHdA==} + /posthog-js@1.92.1: + resolution: {integrity: sha512-xtuTfM/acfDauiEfIdKF6d911KUZQ7RLii2COAYEoPWr3cVUFoNUoRQz9QJvgDlV2j22Zwl+mnXacUeua+Yi1A==} dependencies: fflate: 0.4.8 dev: false diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 4880c77e8f86d..ccb6670af4584 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -13,8 +13,8 @@ from statshog.defaults.django import statsd from posthog.api.geoip import get_geoip_properties -from posthog.api.utils import get_project_id, get_token from posthog.api.survey import SURVEY_TARGETING_FLAG_PREFIX +from posthog.api.utils import get_project_id, get_token from posthog.database_healthcheck import DATABASE_FOR_FLAG_MATCHING from posthog.exceptions import RequestParsingError, generate_exception_response from posthog.logging.timing import timed @@ -222,9 +222,16 @@ def get_decide(request: HttpRequest): else False ) - if settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS and str(team.id) in settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS: - if random() < settings.NEW_ANALYTICS_CAPTURE_SAMPLING_RATE: - response["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} + if str(team.id) not in settings.NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS: + if ( + "*" in settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS + or str(team.id) in settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS + ): + if random() < settings.NEW_ANALYTICS_CAPTURE_SAMPLING_RATE: + response["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} + + if settings.ELEMENT_CHAIN_AS_STRING_TEAMS and str(team.id) in settings.ELEMENT_CHAIN_AS_STRING_TEAMS: + response["elementsChainAsString"] = True if team.session_recording_opt_in and ( on_permitted_recording_domain(team, request) or not team.recording_domains diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index f513e9e74b6a4..45bd8e077de1f 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -503,11 +503,11 @@ def local_evaluation(self, request: request.Request, **kwargs): should_send_cohorts = "send_cohorts" in request.GET cohorts = {} - seen_cohorts_cache: Dict[str, Cohort] = {} + seen_cohorts_cache: Dict[int, Cohort] = {} if should_send_cohorts: seen_cohorts_cache = { - str(cohort.pk): cohort + cohort.pk: cohort for cohort in Cohort.objects.using(DATABASE_FOR_LOCAL_EVALUATION).filter( team_id=self.team_id, deleted=False ) @@ -547,12 +547,11 @@ def local_evaluation(self, request: request.Request, **kwargs): ): # don't duplicate queries for already added cohorts if id not in cohorts: - parsed_cohort_id = str(id) - if parsed_cohort_id in seen_cohorts_cache: - cohort = seen_cohorts_cache[parsed_cohort_id] + if id in seen_cohorts_cache: + cohort = seen_cohorts_cache[id] else: cohort = Cohort.objects.using(DATABASE_FOR_LOCAL_EVALUATION).get(id=id) - seen_cohorts_cache[parsed_cohort_id] = cohort + seen_cohorts_cache[id] = cohort if not cohort.is_static: cohorts[cohort.pk] = cohort.properties.to_dict() diff --git a/posthog/api/insight.py b/posthog/api/insight.py index a31f2dd9dbe05..20ec5e93d0619 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -21,7 +21,6 @@ from rest_framework.settings import api_settings from rest_framework_csv import renderers as csvrenderers from sentry_sdk import capture_exception -from statshog.defaults.django import statsd from posthog import schema from posthog.api.documentation import extend_schema @@ -32,6 +31,7 @@ TrendResultsSerializer, TrendSerializer, ) +from posthog.clickhouse.cancel import cancel_query_on_cluster from posthog.api.routing import StructuredViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin @@ -43,7 +43,6 @@ synchronously_update_cache, ) from posthog.caching.insights_api import should_refresh_insight -from posthog.client import sync_execute from posthog.constants import ( BREAKDOWN_VALUES_LIMIT, INSIGHT, @@ -95,7 +94,6 @@ ClickHouseSustainedRateThrottle, ) from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL -from posthog.settings.data_stores import CLICKHOUSE_CLUSTER from prometheus_client import Counter from posthog.user_permissions import UserPermissionsSerializerMixin from posthog.utils import ( @@ -1034,11 +1032,7 @@ def activity(self, request: request.Request, **kwargs): def cancel(self, request: request.Request, **kwargs): if "client_query_id" not in request.data: raise serializers.ValidationError({"client_query_id": "Field is required."}) - sync_execute( - f"KILL QUERY ON CLUSTER '{CLICKHOUSE_CLUSTER}' WHERE query_id LIKE %(client_query_id)s", - {"client_query_id": f"{self.team.pk}_{request.data['client_query_id']}%"}, - ) - statsd.incr("clickhouse.query.cancellation_requested", tags={"team_id": self.team.pk}) + cancel_query_on_cluster(team_id=self.team.pk, client_query_id=request.data["client_query_id"]) return Response(status=status.HTTP_201_CREATED) @action(methods=["POST"], detail=False) diff --git a/posthog/api/organization_feature_flag.py b/posthog/api/organization_feature_flag.py index d149de721dccb..44648bd2cd0f2 100644 --- a/posthog/api/organization_feature_flag.py +++ b/posthog/api/organization_feature_flag.py @@ -12,6 +12,7 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.api.feature_flag import FeatureFlagSerializer from posthog.api.feature_flag import CanEditFeatureFlag +from posthog.api.shared import UserBasicSerializer from posthog.models import FeatureFlag, Team from posthog.models.cohort import Cohort from posthog.models.filters.filter import Filter @@ -44,15 +45,10 @@ def retrieve(self, request, *args, **kwargs): { "flag_id": flag.id, "team_id": flag.team_id, - "created_by": { - "id": flag.created_by.id, - "uuid": flag.created_by.uuid, - "distinct_id": flag.created_by.distinct_id, - "first_name": flag.created_by.first_name, - "email": flag.created_by.email, - "is_email_verified": flag.created_by.is_email_verified, - }, - "filters": flag.filters, + "created_by": UserBasicSerializer(flag.created_by).data + if hasattr(flag, "created_by") and flag.created_by + else None, + "filters": flag.get_filters(), "created_at": flag.created_at, "active": flag.active, } @@ -101,7 +97,7 @@ def copy_flags(self, request, *args, **kwargs): continue # get all linked cohorts, sorted by creation order - seen_cohorts_cache: Dict[str, Cohort] = {} + seen_cohorts_cache: Dict[int, Cohort] = {} sorted_cohort_ids = flag_to_copy.get_cohort_ids( seen_cohorts_cache=seen_cohorts_cache, sort_by_topological_order=True ) @@ -111,7 +107,7 @@ def copy_flags(self, request, *args, **kwargs): # create cohorts in the destination project if len(sorted_cohort_ids): for cohort_id in sorted_cohort_ids: - original_cohort = seen_cohorts_cache[str(cohort_id)] + original_cohort = seen_cohorts_cache[cohort_id] # search in destination project by name destination_cohort = Cohort.objects.filter( @@ -125,10 +121,13 @@ def copy_flags(self, request, *args, **kwargs): ).property_groups for prop in prop_group.flat: - if prop.type == "cohort": - original_child_cohort_id = prop.value - original_child_cohort = seen_cohorts_cache[str(original_child_cohort_id)] - prop.value = name_to_dest_cohort_id[original_child_cohort.name] + if prop.type == "cohort" and not isinstance(prop.value, list): + try: + original_child_cohort_id = int(prop.value) + original_child_cohort = seen_cohorts_cache[original_child_cohort_id] + prop.value = name_to_dest_cohort_id[original_child_cohort.name] + except (ValueError, TypeError): + continue destination_cohort_serializer = CohortSerializer( data={ @@ -155,14 +154,17 @@ def copy_flags(self, request, *args, **kwargs): props = group.get("properties", []) for prop in props: if isinstance(prop, dict) and prop.get("type") == "cohort": - original_cohort_id = prop["value"] - cohort_name = (seen_cohorts_cache[str(original_cohort_id)]).name - prop["value"] = name_to_dest_cohort_id[cohort_name] + try: + original_cohort_id = int(prop["value"]) + cohort_name = (seen_cohorts_cache[original_cohort_id]).name + prop["value"] = name_to_dest_cohort_id[cohort_name] + except (ValueError, TypeError): + continue flag_data = { "key": flag_to_copy.key, "name": flag_to_copy.name, - "filters": flag_to_copy.filters, + "filters": flag_to_copy.get_filters(), "active": flag_to_copy.active, "rollout_percentage": flag_to_copy.rollout_percentage, "ensure_experience_continuity": flag_to_copy.ensure_experience_continuity, diff --git a/posthog/api/query.py b/posthog/api/query.py index 224aedce40464..021139911cb96 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -1,11 +1,11 @@ import json import re -from typing import Dict, Optional, cast, Any, List +import uuid +from typing import Dict -from django.http import HttpResponse, JsonResponse +from django.http import JsonResponse from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, OpenApiResponse -from pydantic import BaseModel from rest_framework import viewsets from rest_framework.decorators import action from rest_framework.exceptions import ParseError, ValidationError, NotAuthenticated @@ -17,46 +17,31 @@ from posthog import schema from posthog.api.documentation import extend_schema +from posthog.api.services.query import process_query from posthog.api.routing import StructuredViewSetMixin +from posthog.clickhouse.client.execute_async import ( + cancel_query, + enqueue_process_query_task, + get_query_status, +) from posthog.clickhouse.query_tagging import tag_queries from posthog.errors import ExposedCHQueryError from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt -from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.hogql.errors import HogQLException -from posthog.hogql.metadata import get_hogql_metadata -from posthog.hogql.modifiers import create_default_modifiers_for_team -from posthog.hogql_queries.query_runner import get_query_runner -from posthog.models import Team from posthog.models.user import User from posthog.permissions import ( ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission, ) -from posthog.queries.time_to_see_data.serializers import ( - SessionEventsQuerySerializer, - SessionsQuerySerializer, -) -from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions from posthog.rate_limit import ( AIBurstRateThrottle, AISustainedRateThrottle, TeamRateThrottle, ) -from posthog.schema import HogQLMetadata +from posthog.schema import QueryStatus from posthog.utils import refresh_requested_by_client -QUERY_WITH_RUNNER = [ - "LifecycleQuery", - "TrendsQuery", - "WebOverviewQuery", - "WebTopSourcesQuery", - "WebTopClicksQuery", - "WebTopPagesQuery", - "WebStatsTableQuery", -] -QUERY_WITH_RUNNER_NO_CACHE = ["EventsQuery", "PersonsQuery", "HogQLQuery", "SessionsTimelineQuery"] - class QueryThrottle(TeamRateThrottle): scope = "query" @@ -116,40 +101,73 @@ def get_throttles(self): OpenApiParameter( "client_query_id", OpenApiTypes.STR, - description="Client provided query ID. Can be used to cancel queries.", + description="Client provided query ID. Can be used to retrieve the status or cancel the query.", + ), + OpenApiParameter( + "async", + OpenApiTypes.BOOL, + description=( + "(Experimental) " + "Whether to run the query asynchronously. Defaults to False." + " If True, the `id` of the query can be used to check the status and to cancel it." + ), ), ], responses={ 200: OpenApiResponse(description="Query results"), }, ) - def list(self, request: Request, **kw) -> HttpResponse: - self._tag_client_query_id(request.GET.get("client_query_id")) - query_json = QuerySchemaParser.validate_query(self._query_json_from_request(request)) - # allow lists as well as dicts in response with safe=False - try: - return JsonResponse(process_query(self.team, query_json, request=request), safe=False) - except HogQLException as e: - raise ValidationError(str(e)) - except ExposedCHQueryError as e: - raise ValidationError(str(e), e.code_name) - - def post(self, request, *args, **kwargs): + def create(self, request, *args, **kwargs) -> JsonResponse: request_json = request.data query_json = request_json.get("query") - self._tag_client_query_id(request_json.get("client_query_id")) - # allow lists as well as dicts in response with safe=False + query_async = request_json.get("async", False) + refresh_requested = refresh_requested_by_client(request) + + client_query_id = request_json.get("client_query_id") or uuid.uuid4().hex + self._tag_client_query_id(client_query_id) + + if query_async: + query_id = enqueue_process_query_task( + team_id=self.team.pk, + query_json=query_json, + query_id=client_query_id, + refresh_requested=refresh_requested, + ) + return JsonResponse(QueryStatus(id=query_id, team_id=self.team.pk).model_dump(), safe=False) + try: - return JsonResponse(process_query(self.team, query_json, request=request), safe=False) - except HogQLException as e: - raise ValidationError(str(e)) - except ExposedCHQueryError as e: - raise ValidationError(str(e), e.code_name) + result = process_query(self.team, query_json, refresh_requested=refresh_requested) + return JsonResponse(result, safe=False) + except (HogQLException, ExposedCHQueryError) as e: + raise ValidationError(str(e), getattr(e, "code_name", None)) except Exception as e: self.handle_column_ch_error(e) capture_exception(e) raise e + @extend_schema( + description="(Experimental)", + responses={ + 200: OpenApiResponse(description="Query status"), + }, + ) + @extend_schema( + description="(Experimental)", + responses={ + 200: OpenApiResponse(description="Query status"), + }, + ) + def retrieve(self, request: Request, pk=None, *args, **kwargs) -> JsonResponse: + status = get_query_status(team_id=self.team.pk, query_id=pk) + return JsonResponse(status.__dict__, safe=False) + + @extend_schema( + description="(Experimental)", + ) + def destroy(self, request, pk=None, *args, **kwargs): + cancel_query(self.team.pk, pk) + return Response(status=204) + @action(methods=["GET"], detail=False) def draft_sql(self, request: Request, *args, **kwargs) -> Response: if not isinstance(request.user, User): @@ -177,8 +195,10 @@ def handle_column_ch_error(self, error): return def _tag_client_query_id(self, query_id: str | None): - if query_id is not None: - tag_queries(client_query_id=query_id) + if query_id is None: + return + + tag_queries(client_query_id=query_id) def _query_json_from_request(self, request): if request.method == "POST": @@ -205,73 +225,3 @@ def parsing_error(ex): except (json.JSONDecodeError, UnicodeDecodeError) as error_main: raise ValidationError("Invalid JSON: %s" % (str(error_main))) return query - - -def _unwrap_pydantic(response: Any) -> Dict | List: - if isinstance(response, list): - return [_unwrap_pydantic(item) for item in response] - - elif isinstance(response, BaseModel): - resp1: Dict[str, Any] = {} - for key in response.__fields__.keys(): - resp1[key] = _unwrap_pydantic(getattr(response, key)) - return resp1 - - elif isinstance(response, dict): - resp2: Dict[str, Any] = {} - for key in response.keys(): - resp2[key] = _unwrap_pydantic(response.get(key)) - return resp2 - - return response - - -def _unwrap_pydantic_dict(response: Any) -> Dict: - return cast(dict, _unwrap_pydantic(response)) - - -def process_query( - team: Team, - query_json: Dict, - in_export_context: Optional[bool] = False, - request: Optional[Request] = None, -) -> Dict: - # query_json has been parsed by QuerySchemaParser - # it _should_ be impossible to end up in here with a "bad" query - query_kind = query_json.get("kind") - tag_queries(query=query_json) - - if query_kind in QUERY_WITH_RUNNER: - refresh_requested = refresh_requested_by_client(request) if request else False - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) - return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) - elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) - return _unwrap_pydantic_dict(query_runner.calculate()) - elif query_kind == "HogQLMetadata": - metadata_query = HogQLMetadata.model_validate(query_json) - metadata_response = get_hogql_metadata(query=metadata_query, team=team) - return _unwrap_pydantic_dict(metadata_response) - elif query_kind == "DatabaseSchemaQuery": - database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) - return serialize_database(database) - elif query_kind == "TimeToSeeDataSessionsQuery": - sessions_query_serializer = SessionsQuerySerializer(data=query_json) - sessions_query_serializer.is_valid(raise_exception=True) - return {"results": get_sessions(sessions_query_serializer).data} - elif query_kind == "TimeToSeeDataQuery": - serializer = SessionEventsQuerySerializer( - data={ - "team_id": team.pk, - "session_start": query_json["sessionStart"], - "session_end": query_json["sessionEnd"], - "session_id": query_json["sessionId"], - } - ) - serializer.is_valid(raise_exception=True) - return get_session_events(serializer) or {} - else: - if query_json.get("source"): - return process_query(team, query_json["source"]) - - raise ValidationError(f"Unsupported query kind: {query_kind}") diff --git a/posthog/api/search.py b/posthog/api/search.py index e346c3a905737..a48f716f902f7 100644 --- a/posthog/api/search.py +++ b/posthog/api/search.py @@ -1,10 +1,11 @@ +import functools import re from typing import Any from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector from django.db.models import Model, Value, CharField, F, QuerySet -from django.db.models.functions import Cast +from django.db.models.functions import Cast, JSONObject # type: ignore from django.http import HttpResponse -from rest_framework import viewsets +from rest_framework import viewsets, serializers from rest_framework.permissions import IsAuthenticated from rest_framework.request import Request from rest_framework.response import Response @@ -12,25 +13,91 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission from posthog.models import Action, Cohort, Insight, Dashboard, FeatureFlag, Experiment, Team +from posthog.models.notebook.notebook import Notebook LIMIT = 25 +ENTITY_MAP = { + "insight": { + "klass": Insight, + "search_fields": {"name": "A", "derived_name": "B", "description": "C"}, + "extra_fields": ["name", "derived_name", "description"], + }, + "dashboard": { + "klass": Dashboard, + "search_fields": {"name": "A", "description": "C"}, + "extra_fields": ["name", "description"], + }, + "experiment": { + "klass": Experiment, + "search_fields": {"name": "A", "description": "C"}, + "extra_fields": ["name", "description"], + }, + "feature_flag": {"klass": FeatureFlag, "search_fields": {"key": "A", "name": "C"}, "extra_fields": ["key", "name"]}, + "notebook": { + "klass": Notebook, + "search_fields": {"title": "A", "text_content": "C"}, + "extra_fields": ["title", "text_content"], + }, + "action": { + "klass": Action, + "search_fields": {"name": "A", "description": "C"}, + "extra_fields": ["name", "description"], + }, + "cohort": { + "klass": Cohort, + "search_fields": {"name": "A", "description": "C"}, + "extra_fields": ["name", "description"], + }, +} +""" +Map of entity names to their class, search_fields and extra_fields. + +The value in search_fields corresponds to the PostgreSQL weighting i.e. A, B, C or D. +""" + + +class QuerySerializer(serializers.Serializer): + """Validates and formats query params.""" + + q = serializers.CharField(required=False, default="") + entities = serializers.MultipleChoiceField(required=False, choices=list(ENTITY_MAP.keys())) + + def validate_q(self, value: str): + return process_query(value) + + class SearchViewSet(StructuredViewSetMixin, viewsets.ViewSet): permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] def list(self, request: Request, **kw) -> HttpResponse: - query = process_query(request.GET.get("q", "").strip()) - counts = {} + # parse query params + query_serializer = QuerySerializer(data=self.request.query_params) + query_serializer.is_valid(raise_exception=True) + params = query_serializer.validated_data + + counts = {key: None for key in ENTITY_MAP} + # get entities to search from params or default to all entities + entities = params["entities"] if len(params["entities"]) > 0 else set(ENTITY_MAP.keys()) + query = params["q"] # empty queryset to union things onto it qs = Dashboard.objects.annotate(type=Value("empty", output_field=CharField())).filter(team=self.team).none() - for klass in (Action, Cohort, Insight, Dashboard, Experiment, FeatureFlag): - klass_qs, type = class_queryset(klass, team=self.team, query=query) + # add entities + for entity_meta in [ENTITY_MAP[entity] for entity in entities]: + klass_qs, entity_name = class_queryset( + klass=entity_meta.get("klass"), # type: ignore + team=self.team, + query=query, + search_fields=entity_meta.get("search_fields"), # type: ignore + extra_fields=entity_meta.get("extra_fields"), + ) qs = qs.union(klass_qs) - counts[type] = klass_qs.count() + counts[entity_name] = klass_qs.count() + # order by rank if query: qs = qs.order_by("-rank") @@ -54,32 +121,48 @@ def process_query(query: str): return query -def class_queryset(klass: type[Model], team: Team, query: str | None): +def class_queryset( + klass: type[Model], + team: Team, + query: str | None, + search_fields: dict[str, str], + extra_fields: dict | None, +): """Builds a queryset for the class.""" - type = class_to_type(klass) - values = ["type", "result_id", "name"] + entity_type = class_to_entity_name(klass) + values = ["type", "result_id", "extra_fields"] - qs: QuerySet[Any] = klass.objects.filter(team=team) - qs = qs.annotate(type=Value(type, output_field=CharField())) + qs: QuerySet[Any] = klass.objects.filter(team=team) # filter team + qs = qs.annotate(type=Value(entity_type, output_field=CharField())) # entity type - if type == "insight": + # entity id + if entity_type == "insight" or entity_type == "notebook": qs = qs.annotate(result_id=F("short_id")) else: qs = qs.annotate(result_id=Cast("pk", CharField())) + # extra fields + if extra_fields: + qs = qs.annotate(extra_fields=JSONObject(**{field: field for field in extra_fields})) + else: + qs = qs.annotate(extra_fields=JSONObject()) + + # full-text search rank if query: + search_vectors = [SearchVector(key, weight=value, config="simple") for key, value in search_fields.items()] + combined_vector = functools.reduce(lambda a, b: a + b, search_vectors) qs = qs.annotate( - rank=SearchRank( - SearchVector("name", config="simple"), SearchQuery(query, config="simple", search_type="raw") - ) + rank=SearchRank(combined_vector, SearchQuery(query, config="simple", search_type="raw")), ) qs = qs.filter(rank__gt=0.05) values.append("rank") + # specify fields to fetch qs = qs.values(*values) - return qs, type + + return qs, entity_type -def class_to_type(klass: type[Model]): +def class_to_entity_name(klass: type[Model]): """Converts the class name to snake case.""" return re.sub("(?!^)([A-Z]+)", r"_\1", klass.__name__).lower() diff --git a/posthog/api/services/__init__.py b/posthog/api/services/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py new file mode 100644 index 0000000000000..1ef831bde1b82 --- /dev/null +++ b/posthog/api/services/query.py @@ -0,0 +1,97 @@ +import structlog +from typing import Any, Dict, List, Optional, cast + +from pydantic import BaseModel +from rest_framework.exceptions import ValidationError + +from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.database.database import create_hogql_database, serialize_database +from posthog.hogql.metadata import get_hogql_metadata +from posthog.hogql.modifiers import create_default_modifiers_for_team +from posthog.hogql_queries.query_runner import get_query_runner +from posthog.models import Team +from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer +from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions +from posthog.schema import HogQLMetadata + +logger = structlog.get_logger(__name__) + +QUERY_WITH_RUNNER = [ + "LifecycleQuery", + "TrendsQuery", + "WebOverviewQuery", + "WebTopSourcesQuery", + "WebTopClicksQuery", + "WebTopPagesQuery", + "WebStatsTableQuery", +] +QUERY_WITH_RUNNER_NO_CACHE = ["EventsQuery", "PersonsQuery", "HogQLQuery", "SessionsTimelineQuery"] + + +def _unwrap_pydantic(response: Any) -> Dict | List: + if isinstance(response, list): + return [_unwrap_pydantic(item) for item in response] + + elif isinstance(response, BaseModel): + resp1: Dict[str, Any] = {} + for key in response.__fields__.keys(): + resp1[key] = _unwrap_pydantic(getattr(response, key)) + return resp1 + + elif isinstance(response, dict): + resp2: Dict[str, Any] = {} + for key in response.keys(): + resp2[key] = _unwrap_pydantic(response.get(key)) + return resp2 + + return response + + +def _unwrap_pydantic_dict(response: Any) -> Dict: + return cast(dict, _unwrap_pydantic(response)) + + +def process_query( + team: Team, + query_json: Dict, + in_export_context: Optional[bool] = False, + refresh_requested: Optional[bool] = False, +) -> Dict: + # query_json has been parsed by QuerySchemaParser + # it _should_ be impossible to end up in here with a "bad" query + query_kind = query_json.get("kind") + tag_queries(query=query_json) + + if query_kind in QUERY_WITH_RUNNER: + query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) + elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: + query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + return _unwrap_pydantic_dict(query_runner.calculate()) + elif query_kind == "HogQLMetadata": + metadata_query = HogQLMetadata.model_validate(query_json) + metadata_response = get_hogql_metadata(query=metadata_query, team=team) + return _unwrap_pydantic_dict(metadata_response) + elif query_kind == "DatabaseSchemaQuery": + database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) + return serialize_database(database) + elif query_kind == "TimeToSeeDataSessionsQuery": + sessions_query_serializer = SessionsQuerySerializer(data=query_json) + sessions_query_serializer.is_valid(raise_exception=True) + return {"results": get_sessions(sessions_query_serializer).data} + elif query_kind == "TimeToSeeDataQuery": + serializer = SessionEventsQuerySerializer( + data={ + "team_id": team.pk, + "session_start": query_json["sessionStart"], + "session_end": query_json["sessionEnd"], + "session_id": query_json["sessionId"], + } + ) + serializer.is_valid(raise_exception=True) + return get_session_events(serializer) or {} + else: + if query_json.get("source"): + return process_query(team, query_json["source"]) + + raise ValidationError(f"Unsupported query kind: {query_kind}") diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 655c44eff5ab9..f0c0385bd2ccc 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -88,6 +88,22 @@ LIMIT 21 /*controller='team-detail',route='api/projects/%28%3FP%3Cid%3E%5B%5E/.%5D%2B%29/%3F%24'*/ ' --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.10 + ' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 2) + ' +--- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.2 ' SELECT "posthog_organizationmembership"."id", diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index e2b852a604b20..9916b9afadbbf 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -1440,8 +1440,8 @@ "posthog_experiment"."filters", "posthog_experiment"."parameters", "posthog_experiment"."secondary_metrics", - "posthog_experiment"."feature_flag_id", "posthog_experiment"."created_by_id", + "posthog_experiment"."feature_flag_id", "posthog_experiment"."start_date", "posthog_experiment"."end_date", "posthog_experiment"."created_at", diff --git a/posthog/api/test/batch_exports/test_delete.py b/posthog/api/test/batch_exports/test_delete.py index 20375cecbb768..cc07ed4675151 100644 --- a/posthog/api/test/batch_exports/test_delete.py +++ b/posthog/api/test/batch_exports/test_delete.py @@ -241,3 +241,48 @@ def test_deletes_are_partitioned_by_team_id(client: HttpClient): # Make sure we can still get the export with the right user response = get_batch_export(client, team.pk, batch_export_id) assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db(transaction=True) +def test_delete_batch_export_even_without_underlying_schedule(client: HttpClient): + """Test deleting a BatchExport completes even if underlying Schedule was already deleted.""" + temporal = sync_connect() + + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + organization = create_organization("Test Org") + team = create_team(organization) + user = create_user("test@user.com", "Test User", organization) + client.force_login(user) + + with start_test_worker(temporal): + batch_export = create_batch_export_ok(client, team.pk, batch_export_data) + batch_export_id = batch_export["id"] + + handle = temporal.get_schedule_handle(batch_export_id) + async_to_sync(handle.delete)() + + with pytest.raises(RPCError): + describe_schedule(temporal, batch_export_id) + + delete_batch_export_ok(client, team.pk, batch_export_id) + + response = get_batch_export(client, team.pk, batch_export_id) + assert response.status_code == status.HTTP_404_NOT_FOUND + + with pytest.raises(RPCError): + describe_schedule(temporal, batch_export_id) diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 9579c2447d4ae..8c670c3243529 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -2991,6 +2991,34 @@ def test_decide_new_capture_activation(self, *args): self.assertEqual(response.status_code, 200) self.assertFalse("analytics" in response.json()) + with self.settings(NEW_ANALYTICS_CAPTURE_TEAM_IDS={"0", "*"}, NEW_ANALYTICS_CAPTURE_SAMPLING_RATE=1.0): + response = self._post_decide(api_version=3) + self.assertEqual(response.status_code, 200) + self.assertTrue("analytics" in response.json()) + self.assertEqual(response.json()["analytics"]["endpoint"], "/i/v0/e/") + + with self.settings( + NEW_ANALYTICS_CAPTURE_TEAM_IDS={"*"}, + NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS={str(self.team.id)}, + NEW_ANALYTICS_CAPTURE_SAMPLING_RATE=1.0, + ): + response = self._post_decide(api_version=3) + self.assertEqual(response.status_code, 200) + self.assertFalse("analytics" in response.json()) + + def test_decide_element_chain_as_string(self, *args): + self.client.logout() + with self.settings(ELEMENT_CHAIN_AS_STRING_TEAMS={str(self.team.id)}): + response = self._post_decide(api_version=3) + self.assertEqual(response.status_code, 200) + self.assertTrue("elementsChainAsString" in response.json()) + self.assertTrue(response.json()["elementsChainAsString"]) + + with self.settings(ELEMENT_CHAIN_AS_STRING_TEAMS={"0"}): + response = self._post_decide(api_version=3) + self.assertEqual(response.status_code, 200) + self.assertFalse("elementsChainAsString" in response.json()) + class TestDatabaseCheckForDecide(BaseTest, QueryMatchingTest): """ diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 0c6581a389561..24b0ae0fa2ba4 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -1171,6 +1171,34 @@ def test_getting_flags_is_not_nplus1(self) -> None: response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_getting_flags_with_no_creator(self) -> None: + FeatureFlag.objects.all().delete() + + self.client.post( + f"/api/projects/{self.team.id}/feature_flags/", + data={ + "name": f"flag", + "key": f"flag_0", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, + format="json", + ).json() + + FeatureFlag.objects.create( + created_by=None, + team=self.team, + key="flag_role_access", + name="Flag role access", + ) + + with self.assertNumQueries(FuzzyInt(11, 12)): + response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.json()["results"]), 2) + sorted_results = sorted(response.json()["results"], key=lambda x: x["key"]) + self.assertEqual(sorted_results[1]["created_by"], None) + self.assertEqual(sorted_results[1]["key"], "flag_role_access") + @patch("posthog.api.feature_flag.report_user_action") def test_my_flags(self, mock_capture): self.client.post( diff --git a/posthog/api/test/test_feature_flag_utils.py b/posthog/api/test/test_feature_flag_utils.py index dd6108d7ff54c..157fe4f5be9c9 100644 --- a/posthog/api/test/test_feature_flag_utils.py +++ b/posthog/api/test/test_feature_flag_utils.py @@ -51,9 +51,9 @@ def create_cohort(name): cohort_ids = {cohorts["a"].pk, cohorts["b"].pk, cohorts["c"].pk} seen_cohorts_cache = { - str(cohorts["a"].pk): cohorts["a"], - str(cohorts["b"].pk): cohorts["b"], - str(cohorts["c"].pk): cohorts["c"], + cohorts["a"].pk: cohorts["a"], + cohorts["b"].pk: cohorts["b"], + cohorts["c"].pk: cohorts["c"], } # (a)-->(c)-->(b) @@ -68,6 +68,6 @@ def create_cohort(name): def test_empty_cohorts_set(self): cohort_ids: Set[int] = set() - seen_cohorts_cache: Dict[str, Cohort] = {} + seen_cohorts_cache: Dict[int, Cohort] = {} topologically_sorted_cohort_ids = sort_cohorts_topologically(cohort_ids, seen_cohorts_cache) self.assertEqual(topologically_sorted_cohort_ids, []) diff --git a/posthog/api/test/test_organization_feature_flag.py b/posthog/api/test/test_organization_feature_flag.py index 103756d0c4911..78e72269b20bb 100644 --- a/posthog/api/test/test_organization_feature_flag.py +++ b/posthog/api/test/test_organization_feature_flag.py @@ -53,7 +53,7 @@ def test_get_feature_flag_success(self): "email": self.user.email, "is_email_verified": self.user.is_email_verified, }, - "filters": flag.filters, + "filters": flag.get_filters(), "created_at": flag.created_at.strftime("%Y-%m-%dT%H:%M:%S.%f") + "Z", "active": flag.active, } @@ -243,6 +243,29 @@ def test_copy_feature_flag_update_existing(self): set(flag_response.keys()), ) + def test_copy_feature_flag_with_old_legacy_flags(self): + url = f"/api/organizations/{self.organization.id}/feature_flags/copy_flags" + target_project = self.team_2 + + flag_to_copy = FeatureFlag.objects.create( + team=self.team_1, + created_by=self.user, + key="flag-to-copy-here", + filters={}, + rollout_percentage=self.rollout_percentage_to_copy, + ) + + data = { + "feature_flag_key": flag_to_copy.key, + "from_project": self.feature_flag_to_copy.team_id, + "target_project_ids": [target_project.id], + } + response = self.client.post(url, data) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.json()["success"]), 1) + self.assertEqual(len(response.json()["failed"]), 0) + def test_copy_feature_flag_update_override_deleted(self): target_project = self.team_2 target_project_2 = Team.objects.create(organization=self.organization) @@ -581,8 +604,8 @@ def connect(parent, child): # get topological order of the original cohorts original_cohorts_cache = {} for _, cohort in cohorts.items(): - original_cohorts_cache[str(cohort.id)] = cohort - original_cohort_ids = {int(str_id) for str_id in original_cohorts_cache.keys()} + original_cohorts_cache[cohort.id] = cohort + original_cohort_ids = {cohort_id for cohort_id in original_cohorts_cache.keys()} topologically_sorted_original_cohort_ids = sort_cohorts_topologically( original_cohort_ids, original_cohorts_cache ) @@ -593,7 +616,7 @@ def connect(parent, child): def traverse(cohort, index): expected_cohort_id = topologically_sorted_original_cohort_ids_reversed[index] - expected_name = original_cohorts_cache[str(expected_cohort_id)].name + expected_name = original_cohorts_cache[expected_cohort_id].name self.assertEqual(expected_name, cohort.name) prop = cohort.filters["properties"]["values"][0] diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index b49cd25b83287..ff03704605014 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -1,11 +1,11 @@ import json +from unittest import mock from unittest.mock import patch -from urllib.parse import quote from freezegun import freeze_time from rest_framework import status -from posthog.api.query import process_query +from posthog.api.services.query import process_query from posthog.models.property_definition import PropertyDefinition, PropertyType from posthog.models.utils import UUIDT from posthog.schema import ( @@ -336,51 +336,9 @@ def test_person_property_filter(self): response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual(len(response["results"]), 2) - def test_json_undefined_constant_error(self): - response = self.client.get( - f"/api/projects/{self.team.id}/query/?query=%7B%22kind%22%3A%22EventsQuery%22%2C%22select%22%3A%5B%22*%22%5D%2C%22limit%22%3AInfinity%7D" - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_input", - "detail": "Unsupported constant found in JSON: Infinity", - "attr": None, - }, - ) - - response = self.client.get( - f"/api/projects/{self.team.id}/query/?query=%7B%22kind%22%3A%22EventsQuery%22%2C%22select%22%3A%5B%22*%22%5D%2C%22limit%22%3ANaN%7D" - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_input", - "detail": "Unsupported constant found in JSON: NaN", - "attr": None, - }, - ) - def test_safe_clickhouse_error_passed_through(self): query = {"kind": "EventsQuery", "select": ["timestamp + 'string'"]} - # Safe errors are passed through in GET requests - response_get = self.client.get(f"/api/projects/{self.team.id}/query/?query={quote(json.dumps(query))}") - self.assertEqual(response_get.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response_get.json(), - self.validation_error_response( - "Illegal types DateTime64(6, 'UTC') and String of arguments of function plus: " - "While processing toTimeZone(timestamp, 'UTC') + 'string'.", - "illegal_type_of_argument", - ), - ) - - # Safe errors are passed through in POST requests too response_post = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query}) self.assertEqual(response_post.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -396,11 +354,6 @@ def test_safe_clickhouse_error_passed_through(self): def test_unsafe_clickhouse_error_is_swallowed(self, sqlparse_format_mock): query = {"kind": "EventsQuery", "select": ["timestamp"]} - # Unsafe errors are swallowed in GET requests (in this case we should not expose malformed SQL) - response_get = self.client.get(f"/api/projects/{self.team.id}/query/?query={quote(json.dumps(query))}") - self.assertEqual(response_get.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - - # Unsafe errors are swallowed in POST requests too response_post = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query}) self.assertEqual(response_post.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) @@ -832,3 +785,87 @@ def test_full_hogql_query_values(self): ) self.assertEqual(response.get("results", [])[0][0], 20) + + +class TestQueryRetrieve(APIBaseTest): + def setUp(self): + super().setUp() + self.team_id = self.team.pk + self.valid_query_id = "12345" + self.invalid_query_id = "invalid-query-id" + self.redis_client_mock = mock.Mock() + self.redis_get_patch = mock.patch("posthog.redis.get_client", return_value=self.redis_client_mock) + self.redis_get_patch.start() + + def tearDown(self): + self.redis_get_patch.stop() + + def test_with_valid_query_id(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": False, + "complete": True, + "results": ["result1", "result2"], + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["complete"], True, response.content) + + def test_with_invalid_query_id(self): + self.redis_client_mock.get.return_value = None + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.invalid_query_id}/") + self.assertEqual(response.status_code, 404) + + def test_completed_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "complete": True, + "results": ["result1", "result2"], + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertTrue(response.json()["complete"]) + + def test_running_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "complete": False, + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertFalse(response.json()["complete"]) + + def test_failed_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": True, + "error_message": "Query failed", + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertTrue(response.json()["error"]) + + def test_destroy(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": True, + "error_message": "Query failed", + } + ).encode() + response = self.client.delete(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 204) + self.redis_client_mock.delete.assert_called_once() diff --git a/posthog/api/test/test_search.py b/posthog/api/test/test_search.py index 3324dc18db6f7..543d2d5adc048 100644 --- a/posthog/api/test/test_search.py +++ b/posthog/api/test/test_search.py @@ -5,15 +5,22 @@ from posthog.api.search import process_query from posthog.test.base import APIBaseTest -from posthog.models import Dashboard, FeatureFlag, Team +from posthog.models import Dashboard, FeatureFlag, Team, Insight class TestSearch(APIBaseTest): + insight_1: Insight + dashboard_1: Dashboard + def setUp(self): super().setUp() + Insight.objects.create(team=self.team, derived_name="derived name") + self.insight_1 = Insight.objects.create(team=self.team, name="second insight") + Insight.objects.create(team=self.team, name="third insight") + Dashboard.objects.create(team=self.team, created_by=self.user) - Dashboard.objects.create(name="second dashboard", team=self.team, created_by=self.user) + self.dashboard_1 = Dashboard.objects.create(name="second dashboard", team=self.team, created_by=self.user) Dashboard.objects.create(name="third dashboard", team=self.team, created_by=self.user) FeatureFlag.objects.create(key="a", team=self.team, created_by=self.user) @@ -24,25 +31,71 @@ def test_search(self): response = self.client.get("/api/projects/@current/search?q=sec") self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.json()["results"]), 2) + self.assertEqual(len(response.json()["results"]), 3) self.assertEqual(response.json()["counts"]["action"], 0) self.assertEqual(response.json()["counts"]["dashboard"], 1) self.assertEqual(response.json()["counts"]["feature_flag"], 1) + self.assertEqual(response.json()["counts"]["insight"], 1) def test_search_without_query(self): response = self.client.get("/api/projects/@current/search") self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.json()["results"]), 6) + self.assertEqual(len(response.json()["results"]), 9) self.assertEqual(response.json()["counts"]["action"], 0) self.assertEqual(response.json()["counts"]["dashboard"], 3) self.assertEqual(response.json()["counts"]["feature_flag"], 3) + self.assertEqual(response.json()["counts"]["insight"], 3) + + def test_search_filtered_by_entity(self): + response = self.client.get("/api/projects/@current/search?q=sec&entities=insight&entities=dashboard") + + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()["results"]), 2) + self.assertEqual(response.json()["counts"]["dashboard"], 1) + self.assertEqual(response.json()["counts"]["insight"], 1) + + def test_response_format_and_ids(self): + response = self.client.get("/api/projects/@current/search?q=sec&entities=insight&entities=dashboard") + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["results"][0], + { + "rank": response.json()["results"][0]["rank"], + "type": "dashboard", + "result_id": str(self.dashboard_1.id), + "extra_fields": {"description": "", "name": "second dashboard"}, + }, + ) + self.assertEqual( + response.json()["results"][1], + { + "rank": response.json()["results"][1]["rank"], + "type": "insight", + "result_id": self.insight_1.short_id, + "extra_fields": { + "derived_name": None, + "name": "second insight", + "description": None, + }, + }, + ) + + def test_extra_fields(self): + response = self.client.get("/api/projects/@current/search?entities=insight") + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["results"][0]["extra_fields"], + {"derived_name": "derived name", "description": None, "name": None}, + ) def test_search_with_fully_invalid_query(self): response = self.client.get("/api/projects/@current/search?q=%3E") self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.json()["results"]), 6) + self.assertEqual(len(response.json()["results"]), 9) self.assertEqual(response.json()["counts"]["action"], 0) self.assertEqual(response.json()["counts"]["dashboard"], 3) self.assertEqual(response.json()["counts"]["feature_flag"], 3) diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index e106dd6cbddf2..00c101e4487ee 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -3,9 +3,9 @@ from typing import Dict, Optional, cast from unittest import mock from unittest.mock import ANY, patch +from zoneinfo import ZoneInfo import pytest -from zoneinfo import ZoneInfo from django.core import mail from django.urls.base import reverse from django.utils import timezone @@ -543,6 +543,7 @@ def test_social_signup_with_allowed_domain_on_self_hosted( @patch("posthoganalytics.capture") @mock.patch("ee.billing.billing_manager.BillingManager.update_billing_distinct_ids") + @mock.patch("ee.billing.billing_manager.BillingManager.update_billing_customer_email") @mock.patch("social_core.backends.base.BaseAuth.request") @mock.patch("posthog.api.authentication.get_instance_available_sso_providers") @mock.patch("posthog.tasks.user_identify.identify_task") @@ -553,11 +554,13 @@ def test_social_signup_with_allowed_domain_on_cloud( mock_sso_providers, mock_request, mock_update_distinct_ids, + mock_update_billing_customer_email, mock_capture, ): with self.is_cloud(True): self.run_test_for_allowed_domain(mock_sso_providers, mock_request, mock_capture) assert mock_update_distinct_ids.called_once() + assert mock_update_billing_customer_email.called_once() @mock.patch("social_core.backends.base.BaseAuth.request") @mock.patch("posthog.api.authentication.get_instance_available_sso_providers") diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py index 8924667fbbea8..0431abea04817 100644 --- a/posthog/api/test/test_team.py +++ b/posthog/api/test/test_team.py @@ -1,6 +1,7 @@ import json from typing import List, cast -from unittest.mock import ANY, MagicMock, patch +from unittest import mock +from unittest.mock import MagicMock, call, patch from asgiref.sync import sync_to_async from django.core.cache import cache @@ -219,15 +220,16 @@ def test_delete_team_own_second(self, mock_capture: MagicMock, mock_delete_bulky AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(), 1, ) - mock_capture.assert_called_once_with( - self.user.distinct_id, - "team deleted", - properties={}, - groups={ - "instance": ANY, - "organization": str(self.organization.id), - "project": str(self.team.uuid), - }, + mock_capture.assert_has_calls( + calls=[ + call( + self.user.distinct_id, + "membership level changed", + properties={"new_level": 8, "previous_level": 1}, + groups=mock.ANY, + ), + call(self.user.distinct_id, "team deleted", properties={}, groups=mock.ANY), + ] ) mock_delete_bulky_postgres_data.assert_called_once_with(team_ids=[team.pk]) diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index 8d6005ec663f8..cef17ab628f32 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -2,6 +2,7 @@ from typing import Any import posthoganalytics +import structlog from django.db import transaction from django.utils.timezone import now from rest_framework import mixins, request, response, serializers, viewsets @@ -27,6 +28,7 @@ BatchExportIdError, BatchExportServiceError, BatchExportServiceRPCError, + BatchExportServiceScheduleNotFound, backfill_export, cancel_running_batch_export_backfill, delete_schedule, @@ -49,6 +51,8 @@ from posthog.temporal.client import sync_connect from posthog.utils import relative_date_parse +logger = structlog.get_logger(__name__) + def validate_date_input(date_input: Any) -> dt.datetime: """Parse any datetime input as a proper dt.datetime. @@ -320,10 +324,22 @@ def unpause(self, request: request.Request, *args, **kwargs) -> response.Respons return response.Response({"paused": False}) def perform_destroy(self, instance: BatchExport): - """Perform a BatchExport destroy by clearing Temporal and Django state.""" - instance.deleted = True + """Perform a BatchExport destroy by clearing Temporal and Django state. + + If the underlying Temporal Schedule doesn't exist, we ignore the error and proceed with the delete anyways. + The Schedule could have been manually deleted causing Django and Temporal to go out of sync. For whatever reason, + since we are deleting, we assume that we can recover from this state by finishing the delete operation by calling + instance.save(). + """ temporal = sync_connect() - delete_schedule(temporal, str(instance.pk)) + + instance.deleted = True + + try: + delete_schedule(temporal, str(instance.pk)) + except BatchExportServiceScheduleNotFound as e: + logger.warning("The Schedule %s could not be deleted as it was not found", e.schedule_id) + instance.save() for backfill in BatchExportBackfill.objects.filter(batch_export=instance): diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index fc74d6f51f253..38cecda263aaa 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -3,6 +3,7 @@ from dataclasses import asdict, dataclass, fields from uuid import UUID +import temporalio from asgiref.sync import async_to_sync from temporalio.client import ( Client, @@ -163,6 +164,14 @@ class BatchExportServiceRPCError(BatchExportServiceError): """Exception raised when the underlying Temporal RPC fails.""" +class BatchExportServiceScheduleNotFound(BatchExportServiceRPCError): + """Exception raised when the underlying Temporal RPC fails because a schedule was not found.""" + + def __init__(self, schedule_id: str): + self.schedule_id = schedule_id + super().__init__(f"The Temporal Schedule {schedule_id} was not found (maybe it was deleted?)") + + def pause_batch_export(temporal: Client, batch_export_id: str, note: str | None = None) -> None: """Pause this BatchExport. @@ -250,7 +259,14 @@ async def unpause_schedule(temporal: Client, schedule_id: str, note: str | None async def delete_schedule(temporal: Client, schedule_id: str) -> None: """Delete a Temporal Schedule.""" handle = temporal.get_schedule_handle(schedule_id) - await handle.delete() + + try: + await handle.delete() + except temporalio.service.RPCError as e: + if e.status == temporalio.service.RPCStatusCode.NOT_FOUND: + raise BatchExportServiceScheduleNotFound(schedule_id) + else: + raise BatchExportServiceRPCError() from e @async_to_sync diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index be11c4ffe48b5..f7ee632e2ad48 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -141,7 +141,7 @@ def calculate_for_query_based_insight( ) # local import to avoid circular reference - from posthog.api.query import process_query + from posthog.api.services.query import process_query # TODO need to properly check that hogql is enabled? return cache_key, cache_type, process_query(team, insight.query, True) diff --git a/posthog/celery.py b/posthog/celery.py index 46b1e3b402f71..53c67214783ee 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -395,24 +395,19 @@ def redis_heartbeat(): @app.task(ignore_result=True, bind=True) -def enqueue_clickhouse_execute_with_progress( - self, team_id, query_id, query, args=None, settings=None, with_column_types=False -): +def process_query_task(self, team_id, query_id, query_json, in_export_context=False, refresh_requested=False): """ - Kick off query with progress reporting - Iterate over the progress status - Save status to redis + Kick off query Once complete save results to redis """ - from posthog.client import execute_with_progress - - execute_with_progress( - team_id, - query_id, - query, - args, - settings, - with_column_types, + from posthog.client import execute_process_query + + execute_process_query( + team_id=team_id, + query_id=query_id, + query_json=query_json, + in_export_context=in_export_context, + refresh_requested=refresh_requested, task_id=self.request.id, ) @@ -515,10 +510,10 @@ def pg_row_count(): CLICKHOUSE_TABLES = [ - "events", + "sharded_events", "person", "person_distinct_id2", - "session_replay_events", + "sharded_session_replay_events", "log_entries", ] if not is_cloud(): @@ -540,9 +535,8 @@ def clickhouse_lag(): ) for table in CLICKHOUSE_TABLES: try: - QUERY = ( - """select max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag from {table};""" - ) + QUERY = """SELECT max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag + FROM {table}""" query = QUERY.format(table=table) lag = sync_execute(query)[0][2] statsd.gauge( @@ -688,9 +682,8 @@ def clickhouse_row_count(): ) for table in CLICKHOUSE_TABLES: try: - QUERY = ( - """select count(1) freq from {table} where _timestamp >= toStartOfDay(date_sub(DAY, 2, now()));""" - ) + QUERY = """SELECT sum(rows) rows from system.parts + WHERE table = '{table}' and active;""" query = QUERY.format(table=table) rows = sync_execute(query)[0][0] row_count_gauge.labels(table_name=table).set(rows) @@ -745,10 +738,11 @@ def clickhouse_part_count(): from posthog.client import sync_execute QUERY = """ - select table, count(1) freq - from system.parts - group by table - order by freq desc; + SELECT table, count(1) freq + FROM system.parts + WHERE active + GROUP BY table + ORDER BY freq DESC; """ rows = sync_execute(QUERY) diff --git a/posthog/clickhouse/cancel.py b/posthog/clickhouse/cancel.py new file mode 100644 index 0000000000000..e05eea7ad3d64 --- /dev/null +++ b/posthog/clickhouse/cancel.py @@ -0,0 +1,14 @@ +from statshog.defaults.django import statsd + +from posthog.api.services.query import logger +from posthog.clickhouse.client import sync_execute +from posthog.settings import CLICKHOUSE_CLUSTER + + +def cancel_query_on_cluster(team_id: int, client_query_id: str) -> None: + result = sync_execute( + f"KILL QUERY ON CLUSTER '{CLICKHOUSE_CLUSTER}' WHERE query_id LIKE %(client_query_id)s", + {"client_query_id": f"{team_id}_{client_query_id}%"}, + ) + logger.info("Cancelled query %s for team %s, result: %s", client_query_id, team_id, result) + statsd.incr("clickhouse.query.cancellation_requested", tags={"team_id": team_id}) diff --git a/posthog/clickhouse/client/__init__.py b/posthog/clickhouse/client/__init__.py index f2ad255c395e1..a249ebbabb4ad 100644 --- a/posthog/clickhouse/client/__init__.py +++ b/posthog/clickhouse/client/__init__.py @@ -1,8 +1,8 @@ from posthog.clickhouse.client.execute import query_with_columns, sync_execute -from posthog.clickhouse.client.execute_async import execute_with_progress +from posthog.clickhouse.client.execute_async import execute_process_query __all__ = [ "sync_execute", "query_with_columns", - "execute_with_progress", + "execute_process_query", ] diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 3bb28c3f20075..7e42d52d4836c 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -1,172 +1,93 @@ -import hashlib +import datetime import json -import time -from dataclasses import asdict as dataclass_asdict -from dataclasses import dataclass -from time import perf_counter -from typing import Any, Optional - -from posthog import celery -from clickhouse_driver import Client as SyncClient -from django.conf import settings as app_settings -from statshog.defaults.django import statsd - -from posthog import redis -from posthog.celery import enqueue_clickhouse_execute_with_progress -from posthog.clickhouse.client.execute import _prepare_query -from posthog.errors import wrap_query_error -from posthog.settings import ( - CLICKHOUSE_CA, - CLICKHOUSE_DATABASE, - CLICKHOUSE_HOST, - CLICKHOUSE_PASSWORD, - CLICKHOUSE_SECURE, - CLICKHOUSE_USER, - CLICKHOUSE_VERIFY, -) - -REDIS_STATUS_TTL = 600 # 10 minutes - - -@dataclass -class QueryStatus: - team_id: int - num_rows: float = 0 - total_rows: float = 0 - error: bool = False - complete: bool = False - error_message: str = "" - results: Any = None - start_time: Optional[float] = None - end_time: Optional[float] = None - task_id: Optional[str] = None - - -def generate_redis_results_key(query_id): - REDIS_KEY_PREFIX_ASYNC_RESULTS = "query_with_progress" - key = f"{REDIS_KEY_PREFIX_ASYNC_RESULTS}:{query_id}" - return key - - -def execute_with_progress( +import uuid + +import structlog +from rest_framework.exceptions import NotFound + +from posthog import celery, redis +from posthog.celery import process_query_task +from posthog.clickhouse.query_tagging import tag_queries +from posthog.schema import QueryStatus + +logger = structlog.get_logger(__name__) + +REDIS_STATUS_TTL_SECONDS = 600 # 10 minutes +REDIS_KEY_PREFIX_ASYNC_RESULTS = "query_async" + + +class QueryNotFoundError(NotFound): + pass + + +class QueryRetrievalError(Exception): + pass + + +def generate_redis_results_key(query_id: str, team_id: int) -> str: + return f"{REDIS_KEY_PREFIX_ASYNC_RESULTS}:{team_id}:{query_id}" + + +def execute_process_query( team_id, query_id, - query, - args=None, - settings=None, - with_column_types=False, - update_freq=0.2, + query_json, + in_export_context, + refresh_requested, task_id=None, ): - """ - Kick off query with progress reporting - Iterate over the progress status - Save status to redis - Once complete save results to redis - """ - - key = generate_redis_results_key(query_id) - ch_client = SyncClient( - host=CLICKHOUSE_HOST, - database=CLICKHOUSE_DATABASE, - secure=CLICKHOUSE_SECURE, - user=CLICKHOUSE_USER, - password=CLICKHOUSE_PASSWORD, - ca_certs=CLICKHOUSE_CA, - verify=CLICKHOUSE_VERIFY, - settings={"max_result_rows": "10000"}, - ) + key = generate_redis_results_key(query_id, team_id) redis_client = redis.get_client() - start_time = perf_counter() - - prepared_sql, prepared_args, tags = _prepare_query(client=ch_client, query=query, args=args) + from posthog.models import Team + from posthog.api.services.query import process_query - query_status = QueryStatus(team_id, task_id=task_id) + team = Team.objects.get(pk=team_id) - start_time = time.time() + query_status = QueryStatus( + id=query_id, + team_id=team_id, + task_id=task_id, + complete=False, + error=True, # Assume error in case nothing below ends up working + start_time=datetime.datetime.utcnow(), + ) + value = query_status.model_dump_json() try: - progress = ch_client.execute_with_progress( - prepared_sql, - params=prepared_args, - settings=settings, - with_column_types=with_column_types, + tag_queries(client_query_id=query_id, team_id=team_id) + results = process_query( + team=team, query_json=query_json, in_export_context=in_export_context, refresh_requested=refresh_requested ) - for num_rows, total_rows in progress: - query_status = QueryStatus( - team_id=team_id, - num_rows=num_rows, - total_rows=total_rows, - complete=False, - error=False, - error_message="", - results=None, - start_time=start_time, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - time.sleep(update_freq) - else: - rv = progress.get_result() - query_status = QueryStatus( - team_id=team_id, - num_rows=query_status.num_rows, - total_rows=query_status.total_rows, - complete=True, - error=False, - start_time=query_status.start_time, - end_time=time.time(), - error_message="", - results=rv, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - + logger.info("Got results for team %s query %s", team_id, query_id) + query_status.complete = True + query_status.error = False + query_status.results = results + query_status.expiration_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=REDIS_STATUS_TTL_SECONDS) + query_status.end_time = datetime.datetime.utcnow() + value = query_status.model_dump_json() except Exception as err: - err = wrap_query_error(err) - tags["failed"] = True - tags["reason"] = type(err).__name__ - statsd.incr("clickhouse_sync_execution_failure") - query_status = QueryStatus( - team_id=team_id, - num_rows=query_status.num_rows, - total_rows=query_status.total_rows, - complete=False, - error=True, - start_time=query_status.start_time, - end_time=time.time(), - error_message=str(err), - results=None, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - + query_status.results = None # Clear results in case they are faulty + query_status.error_message = str(err) + logger.error("Error processing query for team %s query %s: %s", team_id, query_id, err) + value = query_status.model_dump_json() raise err finally: - ch_client.disconnect() + redis_client.set(key, value, ex=REDIS_STATUS_TTL_SECONDS) - execution_time = perf_counter() - start_time - statsd.timing("clickhouse_sync_execution_time", execution_time * 1000.0) - - if app_settings.SHELL_PLUS_PRINT_SQL: - print("Execution time: %.6fs" % (execution_time,)) # noqa T201 - - -def enqueue_execute_with_progress( +def enqueue_process_query_task( team_id, - query, - args=None, - settings=None, - with_column_types=False, - bypass_celery=False, + query_json, query_id=None, + refresh_requested=False, + bypass_celery=False, force=False, ): if not query_id: - query_id = _query_hash(query, team_id, args) - key = generate_redis_results_key(query_id) + query_id = uuid.uuid4().hex + + key = generate_redis_results_key(query_id, team_id) redis_client = redis.get_client() if force: @@ -187,49 +108,53 @@ def enqueue_execute_with_progress( # If we've seen this query before return the query_id and don't resubmit it. return query_id - # Immediately set status so we don't have race with celery - query_status = QueryStatus(team_id=team_id, start_time=time.time()) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) + # Immediately set status, so we don't have race with celery + query_status = QueryStatus(id=query_id, team_id=team_id) + redis_client.set(key, query_status.model_dump_json(), ex=REDIS_STATUS_TTL_SECONDS) if bypass_celery: # Call directly ( for testing ) - enqueue_clickhouse_execute_with_progress(team_id, query_id, query, args, settings, with_column_types) + process_query_task(team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested) else: - enqueue_clickhouse_execute_with_progress.delay(team_id, query_id, query, args, settings, with_column_types) + task = process_query_task.delay( + team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested + ) + query_status.task_id = task.id + redis_client.set(key, query_status.model_dump_json(), ex=REDIS_STATUS_TTL_SECONDS) return query_id -def get_status_or_results(team_id, query_id): - """ - Returns QueryStatus data class - QueryStatus data class contains either: - Current status of running query - Results of completed query - Error payload of failed query - """ +def get_query_status(team_id, query_id): redis_client = redis.get_client() - key = generate_redis_results_key(query_id) + key = generate_redis_results_key(query_id, team_id) + try: byte_results = redis_client.get(key) - if byte_results: - str_results = byte_results.decode("utf-8") - else: - return QueryStatus(team_id, error=True, error_message="Query is unknown to backend") - query_status = QueryStatus(**json.loads(str_results)) - if query_status.team_id != team_id: - raise Exception("Requesting team is not executing team") except Exception as e: - query_status = QueryStatus(team_id, error=True, error_message=str(e)) - return query_status + raise QueryRetrievalError(f"Error retrieving query {query_id} for team {team_id}") from e + if not byte_results: + raise QueryNotFoundError(f"Query {query_id} not found for team {team_id}") -def _query_hash(query: str, team_id: int, args: Any) -> str: - """ - Takes a query and returns a hex encoded hash of the query and args - """ - if args: - key = hashlib.md5((str(team_id) + query + json.dumps(args)).encode("utf-8")).hexdigest() - else: - key = hashlib.md5((str(team_id) + query).encode("utf-8")).hexdigest() - return key + return QueryStatus(**json.loads(byte_results)) + + +def cancel_query(team_id, query_id): + query_status = get_query_status(team_id, query_id) + + if query_status.task_id: + logger.info("Got task id %s, attempting to revoke", query_status.task_id) + celery.app.control.revoke(query_status.task_id, terminate=True) + + from posthog.clickhouse.cancel import cancel_query_on_cluster + + logger.info("Revoked task id %s, attempting to cancel on cluster", query_status.task_id) + cancel_query_on_cluster(team_id, query_id) + + redis_client = redis.get_client() + key = generate_redis_results_key(query_id, team_id) + logger.info("Deleting redis query key %s", key) + redis_client.delete(key) + + return True diff --git a/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr b/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr new file mode 100644 index 0000000000000..282191d2015c7 --- /dev/null +++ b/posthog/clickhouse/client/test/__snapshots__/test_execute_async.ambr @@ -0,0 +1,8 @@ +# name: ClickhouseClientTestCase.test_async_query_client + ' + SELECT plus(1, 1) + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=600, + allow_experimental_object_type=1 + ' +--- diff --git a/posthog/clickhouse/client/test/test_execute_async.py b/posthog/clickhouse/client/test/test_execute_async.py new file mode 100644 index 0000000000000..4958c23b3f0a0 --- /dev/null +++ b/posthog/clickhouse/client/test/test_execute_async.py @@ -0,0 +1,153 @@ +import uuid +from unittest.mock import patch + +from django.test import TestCase + +from posthog.clickhouse.client import execute_async as client +from posthog.client import sync_execute +from posthog.hogql.errors import HogQLException +from posthog.models import Organization, Team +from posthog.test.base import ClickhouseTestMixin, snapshot_clickhouse_queries + + +def build_query(sql): + return { + "kind": "HogQLQuery", + "query": sql, + } + + +class ClickhouseClientTestCase(TestCase, ClickhouseTestMixin): + def setUp(self): + self.organization = Organization.objects.create(name="test") + self.team = Team.objects.create(organization=self.organization) + self.team_id = self.team.pk + + @snapshot_clickhouse_queries + def test_async_query_client(self): + query = build_query("SELECT 1+1") + team_id = self.team_id + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + result = client.get_query_status(team_id, query_id) + self.assertFalse(result.error, result.error_message) + self.assertTrue(result.complete) + self.assertEqual(result.results["results"], [[2]]) + + def test_async_query_client_errors(self): + query = build_query("SELECT WOW SUCH DATA FROM NOWHERE THIS WILL CERTAINLY WORK") + self.assertRaises( + HogQLException, + client.enqueue_process_query_task, + **{"team_id": (self.team_id), "query_json": query, "bypass_celery": True}, + ) + query_id = uuid.uuid4().hex + try: + client.enqueue_process_query_task(self.team_id, query, query_id=query_id, bypass_celery=True) + except Exception: + pass + + result = client.get_query_status(self.team_id, query_id) + self.assertTrue(result.error) + self.assertRegex(result.error_message, "Unknown table") + + def test_async_query_client_uuid(self): + query = build_query("SELECT toUUID('00000000-0000-0000-0000-000000000000')") + team_id = self.team_id + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + result = client.get_query_status(team_id, query_id) + self.assertFalse(result.error, result.error_message) + self.assertTrue(result.complete) + self.assertEqual(result.results["results"], [["00000000-0000-0000-0000-000000000000"]]) + + def test_async_query_client_does_not_leak(self): + query = build_query("SELECT 1+1") + team_id = self.team_id + wrong_team = 5 + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + + try: + client.get_query_status(wrong_team, query_id) + except Exception as e: + self.assertEqual(str(e), f"Query {query_id} not found for team {wrong_team}") + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_is_lazy(self, execute_sync_mock): + query = build_query("SELECT 4 + 4") + query_id = uuid.uuid4().hex + team_id = self.team_id + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we only called clickhouse once + execute_sync_mock.assert_called_once() + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_is_lazy_but_not_too_lazy(self, execute_sync_mock): + query = build_query("SELECT 8 + 8") + query_id = uuid.uuid4().hex + team_id = self.team_id + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again, but with force + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we called clickhouse twice + self.assertEqual(execute_sync_mock.call_count, 2) + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_manual_query_uuid(self, execute_sync_mock): + # This is a unique test because technically in the test pattern `SELECT 8 + 8` is already + # in redis. This tests to make sure it is treated as a unique run of that query + query = build_query("SELECT 8 + 8") + team_id = self.team_id + query_id = "I'm so unique" + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again, but with force + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we called clickhouse twice + self.assertEqual(execute_sync_mock.call_count, 2) + + def test_client_strips_comments_from_request(self): + """ + To ensure we can easily copy queries from `system.query_log` in e.g. + Metabase, we strip comments from the query we send. Metabase doesn't + display multilined output. + + See https://github.com/metabase/metabase/issues/14253 + + Note I'm not really testing much complexity, I trust that those will + come out as failures in other tests. + """ + from posthog.clickhouse.query_tagging import tag_queries + + # First add in the request information that should be added to the sql. + # We check this to make sure it is not removed by the comment stripping + with self.capture_select_queries() as sqls: + tag_queries(kind="request", id="1") + sync_execute( + query=""" + -- this request returns 1 + SELECT 1 + """ + ) + self.assertEqual(len(sqls), 1) + first_query = sqls[0] + self.assertIn(f"SELECT 1", first_query) + self.assertNotIn("this request returns", first_query) + + # Make sure it still includes the "annotation" comment that includes + # request routing information for debugging purposes + self.assertIn("/* request:1 */", first_query) diff --git a/posthog/clickhouse/plugin_log_entries.py b/posthog/clickhouse/plugin_log_entries.py index 1f4f7c70d7146..1ac1cb0759ce7 100644 --- a/posthog/clickhouse/plugin_log_entries.py +++ b/posthog/clickhouse/plugin_log_entries.py @@ -25,7 +25,7 @@ PLUGIN_LOG_ENTRIES_TABLE_ENGINE = lambda: ReplacingMergeTree(PLUGIN_LOG_ENTRIES_TABLE, ver="_timestamp") PLUGIN_LOG_ENTRIES_TABLE_SQL = lambda: ( PLUGIN_LOG_ENTRIES_TABLE_BASE_SQL - + """PARTITION BY plugin_id ORDER BY (team_id, id) + + """PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) {ttl_period} SETTINGS index_granularity=512 """ diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index b260abb7ce1d0..cd975ff0f823c 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -1367,7 +1367,7 @@ , _offset UInt64 ) ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_noshard/posthog.plugin_log_entries', '{replica}-{shard}', _timestamp) - PARTITION BY plugin_id ORDER BY (team_id, id) + PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) SETTINGS index_granularity=512 @@ -2166,7 +2166,7 @@ , _offset UInt64 ) ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_noshard/posthog.plugin_log_entries', '{replica}-{shard}', _timestamp) - PARTITION BY plugin_id ORDER BY (team_id, id) + PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) SETTINGS index_granularity=512 diff --git a/posthog/event_usage.py b/posthog/event_usage.py index fa69f0c23662b..7cd1945d37df4 100644 --- a/posthog/event_usage.py +++ b/posthog/event_usage.py @@ -196,6 +196,26 @@ def report_bulk_invited( ) +def report_user_organization_membership_level_changed( + user: User, + organization: Organization, + new_level: int, + previous_level: int, +) -> None: + """ + Triggered after a user's membership level in an organization is changed. + """ + posthoganalytics.capture( + user.distinct_id, + "membership level changed", + properties={ + "new_level": new_level, + "previous_level": previous_level, + }, + groups=groups(organization), + ) + + def report_user_action(user: User, event: str, properties: Dict = {}): posthoganalytics.capture( user.distinct_id, diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 9d619c23175b6..5695a0d0be2e5 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -126,26 +126,35 @@ def property_to_expr( elif len(value) == 1: value = value[0] else: - exprs = [ - property_to_expr( - Property( - type=property.type, - key=property.key, - operator=property.operator, - value=v, - ), - team, - scope, + if operator in [PropertyOperator.exact, PropertyOperator.is_not]: + op = ( + ast.CompareOperationOp.In + if operator == PropertyOperator.exact + else ast.CompareOperationOp.NotIn ) - for v in value - ] - if ( - operator == PropertyOperator.is_not - or operator == PropertyOperator.not_icontains - or operator == PropertyOperator.not_regex - ): - return ast.And(exprs=exprs) - return ast.Or(exprs=exprs) + + return ast.CompareOperation( + op=op, + left=ast.Field(chain=["properties", property.key]), + right=ast.Tuple(exprs=[ast.Constant(value=v) for v in value]), + ) + else: + exprs = [ + property_to_expr( + Property( + type=property.type, + key=property.key, + operator=property.operator, + value=v, + ), + team, + scope, + ) + for v in value + ] + if operator == PropertyOperator.not_icontains or operator == PropertyOperator.not_regex: + return ast.And(exprs=exprs) + return ast.Or(exprs=exprs) chain = ["person", "properties"] if property.type == "person" and scope != "person" else ["properties"] field = ast.Field(chain=chain + [property.key]) diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index c7e8c82713b15..751b9fb46b860 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -22,6 +22,8 @@ from posthog.client import sync_execute from posthog.schema import HogQLQueryResponse, HogQLFilters, HogQLQueryModifiers +EXPORT_CONTEXT_MAX_EXECUTION_TIME = 600 + def execute_hogql_query( query: Union[str, ast.SelectQuery, ast.SelectUnionQuery], @@ -119,6 +121,10 @@ def execute_hogql_query( ) ) + settings = settings or HogQLGlobalSettings() + if in_export_context: + settings.max_execution_time = EXPORT_CONTEXT_MAX_EXECUTION_TIME + # Print the ClickHouse SQL query with timings.measure("print_ast"): clickhouse_context = HogQLContext( @@ -131,7 +137,7 @@ def execute_hogql_query( select_query, context=clickhouse_context, dialect="clickhouse", - settings=settings or HogQLGlobalSettings(), + settings=settings, ) timings_dict = timings.to_dict() diff --git a/posthog/hogql/test/__snapshots__/test_resolver.ambr b/posthog/hogql/test/__snapshots__/test_resolver.ambr new file mode 100644 index 0000000000000..78223c03c2b66 --- /dev/null +++ b/posthog/hogql/test/__snapshots__/test_resolver.ambr @@ -0,0 +1,2984 @@ +# name: TestResolver.test_asterisk_expander_from_subquery_table + ' + { + select: [ + { + chain: [ + "uuid" + ] + type: { + name: "uuid" + table_type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: { + name: "$group_0" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + }, + $group_1: { + name: "$group_1" + table_type: + }, + $group_2: { + name: "$group_2" + table_type: + }, + $group_3: { + name: "$group_3" + table_type: + }, + $group_4: { + name: "$group_4" + table_type: + }, + $session_id: { + name: "$session_id" + table_type: + }, + created_at: { + name: "created_at" + table_type: + }, + distinct_id: { + name: "distinct_id" + table_type: + }, + elements_chain: { + name: "elements_chain" + table_type: + }, + event: { + name: "event" + table_type: + }, + properties: { + name: "properties" + table_type: + }, + timestamp: { + name: "timestamp" + table_type: + }, + uuid: { + name: "uuid" + table_type: + } + } + ctes: {} + tables: { + events: + } + } + } + }, + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: + } + }, + { + chain: [ + "properties" + ] + type: { + name: "properties" + table_type: + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + }, + { + chain: [ + "distinct_id" + ] + type: { + name: "distinct_id" + table_type: + } + }, + { + chain: [ + "elements_chain" + ] + type: { + name: "elements_chain" + table_type: + } + }, + { + chain: [ + "created_at" + ] + type: { + name: "created_at" + table_type: + } + }, + { + chain: [ + "$session_id" + ] + type: { + name: "$session_id" + table_type: + } + }, + { + chain: [ + "$group_0" + ] + type: { + name: "$group_0" + table_type: + } + }, + { + chain: [ + "$group_1" + ] + type: { + name: "$group_1" + table_type: + } + }, + { + chain: [ + "$group_2" + ] + type: { + name: "$group_2" + table_type: + } + }, + { + chain: [ + "$group_3" + ] + type: { + name: "$group_3" + table_type: + } + }, + { + chain: [ + "$group_4" + ] + type: { + name: "$group_4" + table_type: + } + } + ] + select_from: { + table: { + select: [ + { + chain: [ + "uuid" + ] + type: + }, + { + chain: [ + "event" + ] + type: + }, + { + chain: [ + "properties" + ] + type: + }, + { + chain: [ + "timestamp" + ] + type: + }, + { + chain: [ + "distinct_id" + ] + type: + }, + { + chain: [ + "elements_chain" + ] + type: + }, + { + chain: [ + "created_at" + ] + type: + }, + { + chain: [ + "$session_id" + ] + type: + }, + { + chain: [ + "$group_0" + ] + type: + }, + { + chain: [ + "$group_1" + ] + type: + }, + { + chain: [ + "$group_2" + ] + type: + }, + { + chain: [ + "$group_3" + ] + type: + }, + { + chain: [ + "$group_4" + ] + type: + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [ + + ] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + created_at: , + distinct_id: , + elements_chain: , + event: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: {} + } + } + ' +--- +# name: TestResolver.test_asterisk_expander_select_union + ' + { + select: [ + { + chain: [ + "uuid" + ] + type: { + name: "uuid" + table_type: { + types: [ + { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: { + name: "$group_0" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + }, + $group_1: { + name: "$group_1" + table_type: + }, + $group_2: { + name: "$group_2" + table_type: + }, + $group_3: { + name: "$group_3" + table_type: + }, + $group_4: { + name: "$group_4" + table_type: + }, + $session_id: { + name: "$session_id" + table_type: + }, + created_at: { + name: "created_at" + table_type: + }, + distinct_id: { + name: "distinct_id" + table_type: + }, + elements_chain: { + name: "elements_chain" + table_type: + }, + event: { + name: "event" + table_type: + }, + properties: { + name: "properties" + table_type: + }, + timestamp: { + name: "timestamp" + table_type: + }, + uuid: { + name: "uuid" + table_type: + } + } + ctes: {} + tables: { + events: + } + }, + { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: { + name: "$group_0" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + }, + $group_1: { + name: "$group_1" + table_type: + }, + $group_2: { + name: "$group_2" + table_type: + }, + $group_3: { + name: "$group_3" + table_type: + }, + $group_4: { + name: "$group_4" + table_type: + }, + $session_id: { + name: "$session_id" + table_type: + }, + created_at: { + name: "created_at" + table_type: + }, + distinct_id: { + name: "distinct_id" + table_type: + }, + elements_chain: { + name: "elements_chain" + table_type: + }, + event: { + name: "event" + table_type: + }, + properties: { + name: "properties" + table_type: + }, + timestamp: { + name: "timestamp" + table_type: + }, + uuid: { + name: "uuid" + table_type: + } + } + ctes: {} + tables: { + events: + } + } + ] + } + } + }, + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: + } + }, + { + chain: [ + "properties" + ] + type: { + name: "properties" + table_type: + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + }, + { + chain: [ + "distinct_id" + ] + type: { + name: "distinct_id" + table_type: + } + }, + { + chain: [ + "elements_chain" + ] + type: { + name: "elements_chain" + table_type: + } + }, + { + chain: [ + "created_at" + ] + type: { + name: "created_at" + table_type: + } + }, + { + chain: [ + "$session_id" + ] + type: { + name: "$session_id" + table_type: + } + }, + { + chain: [ + "$group_0" + ] + type: { + name: "$group_0" + table_type: + } + }, + { + chain: [ + "$group_1" + ] + type: { + name: "$group_1" + table_type: + } + }, + { + chain: [ + "$group_2" + ] + type: { + name: "$group_2" + table_type: + } + }, + { + chain: [ + "$group_3" + ] + type: { + name: "$group_3" + table_type: + } + }, + { + chain: [ + "$group_4" + ] + type: { + name: "$group_4" + table_type: + } + } + ] + select_from: { + table: { + select_queries: [ + { + select: [ + { + chain: [ + "uuid" + ] + type: + }, + { + chain: [ + "event" + ] + type: + }, + { + chain: [ + "properties" + ] + type: + }, + { + chain: [ + "timestamp" + ] + type: + }, + { + chain: [ + "distinct_id" + ] + type: + }, + { + chain: [ + "elements_chain" + ] + type: + }, + { + chain: [ + "created_at" + ] + type: + }, + { + chain: [ + "$session_id" + ] + type: + }, + { + chain: [ + "$group_0" + ] + type: + }, + { + chain: [ + "$group_1" + ] + type: + }, + { + chain: [ + "$group_2" + ] + type: + }, + { + chain: [ + "$group_3" + ] + type: + }, + { + chain: [ + "$group_4" + ] + type: + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: + }, + { + select: [ + { + chain: [ + "uuid" + ] + type: + }, + { + chain: [ + "event" + ] + type: + }, + { + chain: [ + "properties" + ] + type: + }, + { + chain: [ + "timestamp" + ] + type: + }, + { + chain: [ + "distinct_id" + ] + type: + }, + { + chain: [ + "elements_chain" + ] + type: + }, + { + chain: [ + "created_at" + ] + type: + }, + { + chain: [ + "$session_id" + ] + type: + }, + { + chain: [ + "$group_0" + ] + type: + }, + { + chain: [ + "$group_1" + ] + type: + }, + { + chain: [ + "$group_2" + ] + type: + }, + { + chain: [ + "$group_3" + ] + type: + }, + { + chain: [ + "$group_4" + ] + type: + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: + } + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [ + + ] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + created_at: , + distinct_id: , + elements_chain: , + event: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: {} + } + } + ' +--- +# name: TestResolver.test_asterisk_expander_subquery + ' + { + select: [ + { + chain: [ + "a" + ] + type: { + name: "a" + table_type: { + aliases: { + a: { + alias: "a" + type: { + data_type: "int" + } + }, + b: { + alias: "b" + type: { + data_type: "int" + } + } + } + anonymous_tables: [] + columns: { + a: , + b: + } + ctes: {} + tables: {} + } + } + }, + { + chain: [ + "b" + ] + type: { + name: "b" + table_type: + } + } + ] + select_from: { + table: { + select: [ + { + alias: "a" + expr: { + type: + value: 1 + } + type: + }, + { + alias: "b" + expr: { + type: + value: 2 + } + type: + } + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [ + + ] + columns: { + a: , + b: + } + ctes: {} + tables: {} + } + } + ' +--- +# name: TestResolver.test_asterisk_expander_subquery_alias + ' + { + select: [ + { + chain: [ + "a" + ] + type: { + name: "a" + table_type: { + alias: "x" + select_query_type: { + aliases: { + a: { + alias: "a" + type: { + data_type: "int" + } + }, + b: { + alias: "b" + type: { + data_type: "int" + } + } + } + anonymous_tables: [] + columns: { + a: , + b: + } + ctes: {} + tables: {} + } + } + } + }, + { + chain: [ + "b" + ] + type: { + name: "b" + table_type: + } + } + ] + select_from: { + alias: "x" + table: { + select: [ + { + alias: "a" + expr: { + type: + value: 1 + } + type: + }, + { + alias: "b" + expr: { + type: + value: 2 + } + type: + } + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + a: , + b: + } + ctes: {} + tables: { + x: + } + } + } + ' +--- +# name: TestResolver.test_asterisk_expander_table + ' + { + select: [ + { + chain: [ + "uuid" + ] + type: { + name: "uuid" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: + } + }, + { + chain: [ + "properties" + ] + type: { + name: "properties" + table_type: + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + }, + { + chain: [ + "distinct_id" + ] + type: { + name: "distinct_id" + table_type: + } + }, + { + chain: [ + "elements_chain" + ] + type: { + name: "elements_chain" + table_type: + } + }, + { + chain: [ + "created_at" + ] + type: { + name: "created_at" + table_type: + } + }, + { + chain: [ + "$session_id" + ] + type: { + name: "$session_id" + table_type: + } + }, + { + chain: [ + "$group_0" + ] + type: { + name: "$group_0" + table_type: + } + }, + { + chain: [ + "$group_1" + ] + type: { + name: "$group_1" + table_type: + } + }, + { + chain: [ + "$group_2" + ] + type: { + name: "$group_2" + table_type: + } + }, + { + chain: [ + "$group_3" + ] + type: { + name: "$group_3" + table_type: + } + }, + { + chain: [ + "$group_4" + ] + type: { + name: "$group_4" + table_type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + created_at: , + distinct_id: , + elements_chain: , + event: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: { + events: + } + } + } + ' +--- +# name: TestResolver.test_asterisk_expander_table_alias + ' + { + select: [ + { + chain: [ + "uuid" + ] + type: { + name: "uuid" + table_type: { + alias: "e" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + }, + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: + } + }, + { + chain: [ + "properties" + ] + type: { + name: "properties" + table_type: + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + }, + { + chain: [ + "distinct_id" + ] + type: { + name: "distinct_id" + table_type: + } + }, + { + chain: [ + "elements_chain" + ] + type: { + name: "elements_chain" + table_type: + } + }, + { + chain: [ + "created_at" + ] + type: { + name: "created_at" + table_type: + } + }, + { + chain: [ + "$session_id" + ] + type: { + name: "$session_id" + table_type: + } + }, + { + chain: [ + "$group_0" + ] + type: { + name: "$group_0" + table_type: + } + }, + { + chain: [ + "$group_1" + ] + type: { + name: "$group_1" + table_type: + } + }, + { + chain: [ + "$group_2" + ] + type: { + name: "$group_2" + table_type: + } + }, + { + chain: [ + "$group_3" + ] + type: { + name: "$group_3" + table_type: + } + }, + { + chain: [ + "$group_4" + ] + type: { + name: "$group_4" + table_type: + } + } + ] + select_from: { + alias: "e" + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + created_at: , + distinct_id: , + elements_chain: , + event: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: { + e: + } + } + } + ' +--- +# name: TestResolver.test_call_type + ' + { + select: [ + { + args: [ + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + ] + distinct: False + name: "max" + type: { + arg_types: [ + { + data_type: "datetime" + } + ] + name: "max" + return_type: { + data_type: "unknown" + } + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: {} + ctes: {} + tables: { + events: + } + } + } + ' +--- +# name: TestResolver.test_resolve_boolean_operation_types + ' + { + select: [ + { + exprs: [ + { + type: { + data_type: "int" + } + value: 1 + }, + { + type: { + data_type: "int" + } + value: 1 + } + ] + type: { + data_type: "bool" + } + }, + { + exprs: [ + { + type: { + data_type: "int" + } + value: 1 + }, + { + type: { + data_type: "int" + } + value: 1 + } + ] + type: { + data_type: "bool" + } + }, + { + expr: { + type: { + data_type: "bool" + } + value: True + } + type: { + data_type: "bool" + } + } + ] + type: { + aliases: {} + anonymous_tables: [] + columns: {} + ctes: {} + tables: {} + } + } + ' +--- +# name: TestResolver.test_resolve_constant_type + ' + { + select: [ + { + type: { + data_type: "int" + } + value: 1 + }, + { + type: { + data_type: "str" + } + value: "boo" + }, + { + type: { + data_type: "bool" + } + value: True + }, + { + type: { + data_type: "float" + } + value: 1.1232 + }, + { + type: { + data_type: "unknown" + } + }, + { + type: { + data_type: "date" + } + value: 2020-01-10 + }, + { + type: { + data_type: "datetime" + } + value: 2020-01-10 00:00:00+00:00 + }, + { + type: { + data_type: "uuid" + } + value: 00000000-0000-4000-8000-000000000000 + }, + { + type: { + data_type: "array" + item_type: { + data_type: "unknown" + } + } + value: [] + }, + { + type: { + data_type: "array" + item_type: { + data_type: "int" + } + } + value: [ + 1, + 2 + ] + }, + { + type: { + data_type: "tuple" + item_types: [ + { + data_type: "int" + }, + { + data_type: "int" + }, + { + data_type: "int" + } + ] + } + value: (1, 2, 3) + } + ] + type: { + aliases: {} + anonymous_tables: [] + columns: {} + ctes: {} + tables: {} + } + } + ' +--- +# name: TestResolver.test_resolve_events_table + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "events", + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + timestamp: + } + ctes: {} + tables: { + events: + } + } + where: { + left: { + chain: [ + "events", + "event" + ] + type: { + name: "event" + table_type: + } + } + op: "==" + right: { + type: { + data_type: "str" + } + value: "test" + } + type: { + data_type: "bool" + } + } + } + ' +--- +# name: TestResolver.test_resolve_events_table_alias + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + alias: "e" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + }, + { + chain: [ + "e", + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + } + ] + select_from: { + alias: "e" + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + timestamp: + } + ctes: {} + tables: { + e: + } + } + where: { + left: { + chain: [ + "e", + "event" + ] + type: { + name: "event" + table_type: + } + } + op: "==" + right: { + type: { + data_type: "str" + } + value: "test" + } + type: { + data_type: "bool" + } + } + } + ' +--- +# name: TestResolver.test_resolve_events_table_column_alias + ' + { + select: [ + { + alias: "ee" + expr: { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + alias: "e" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + } + type: { + alias: "ee" + type: + } + }, + { + chain: [ + "ee" + ] + type: + }, + { + alias: "e" + expr: { + chain: [ + "ee" + ] + type: + } + type: { + alias: "e" + type: + } + }, + { + chain: [ + "e", + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + } + ] + select_from: { + alias: "e" + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: { + e: , + ee: + } + anonymous_tables: [] + columns: { + e: , + ee: , + timestamp: + } + ctes: {} + tables: { + e: + } + } + where: { + left: { + chain: [ + "e", + "event" + ] + type: { + name: "event" + table_type: + } + } + op: "==" + right: { + type: { + data_type: "str" + } + value: "test" + } + type: { + data_type: "bool" + } + } + } + ' +--- +# name: TestResolver.test_resolve_events_table_column_alias_inside_subquery + ' + { + select: [ + { + chain: [ + "b" + ] + type: { + name: "b" + table_type: { + alias: "e" + select_query_type: { + aliases: { + b: { + alias: "b" + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + c: { + alias: "c" + type: { + name: "timestamp" + table_type: + } + } + } + anonymous_tables: [] + columns: { + b: , + c: + } + ctes: {} + tables: { + events: + } + } + } + } + } + ] + select_from: { + alias: "e" + table: { + select: [ + { + alias: "b" + expr: { + chain: [ + "event" + ] + type: + } + type: + }, + { + alias: "c" + expr: { + chain: [ + "timestamp" + ] + type: + } + type: + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + b: + } + ctes: {} + tables: { + e: + } + } + where: { + left: { + chain: [ + "e", + "b" + ] + type: { + name: "b" + table_type: + } + } + op: "==" + right: { + type: { + data_type: "str" + } + value: "test" + } + type: { + data_type: "bool" + } + } + } + ' +--- +# name: TestResolver.test_resolve_lazy_events_pdi_person_table + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "pdi", + "person", + "id" + ] + type: { + name: "id" + table_type: { + field: "person" + lazy_join: { + from_field: "person_id", + join_function: , + join_table: { + fields: { + created_at: {}, + id: {}, + is_identified: {}, + pdi: {}, + properties: {}, + team_id: {} + } + } + } + table_type: { + field: "pdi" + lazy_join: { + from_field: "distinct_id", + join_function: , + join_table: { + fields: { + distinct_id: {}, + person: {}, + person_id: {}, + team_id: {} + } + } + } + table_type: + } + } + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + id: + } + ctes: {} + tables: { + events: + } + } + } + ' +--- +# name: TestResolver.test_resolve_lazy_events_pdi_person_table_aliased + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + alias: "e" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + }, + { + chain: [ + "e", + "pdi", + "person", + "id" + ] + type: { + name: "id" + table_type: { + field: "person" + lazy_join: { + from_field: "person_id", + join_function: , + join_table: { + fields: { + created_at: {}, + id: {}, + is_identified: {}, + pdi: {}, + properties: {}, + team_id: {} + } + } + } + table_type: { + field: "pdi" + lazy_join: { + from_field: "distinct_id", + join_function: , + join_table: { + fields: { + distinct_id: {}, + person: {}, + person_id: {}, + team_id: {} + } + } + } + table_type: + } + } + } + } + ] + select_from: { + alias: "e" + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + id: + } + ctes: {} + tables: { + e: + } + } + } + ' +--- +# name: TestResolver.test_resolve_lazy_events_pdi_table + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "pdi", + "person_id" + ] + type: { + name: "person_id" + table_type: { + field: "pdi" + lazy_join: { + from_field: "distinct_id", + join_function: , + join_table: { + fields: { + distinct_id: {}, + person: {}, + person_id: {}, + team_id: {} + } + } + } + table_type: + } + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + person_id: + } + ctes: {} + tables: { + events: + } + } + } + ' +--- +# name: TestResolver.test_resolve_lazy_events_pdi_table_aliased + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + alias: "e" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + } + }, + { + chain: [ + "e", + "pdi", + "person_id" + ] + type: { + name: "person_id" + table_type: { + field: "pdi" + lazy_join: { + from_field: "distinct_id", + join_function: , + join_table: { + fields: { + distinct_id: {}, + person: {}, + person_id: {}, + team_id: {} + } + } + } + table_type: + } + } + } + ] + select_from: { + alias: "e" + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + person_id: + } + ctes: {} + tables: { + e: + } + } + } + ' +--- +# name: TestResolver.test_resolve_lazy_pdi_person_table + ' + { + select: [ + { + chain: [ + "distinct_id" + ] + type: { + name: "distinct_id" + table_type: { + table: { + fields: { + distinct_id: {}, + person: {}, + person_id: {}, + team_id: {} + } + } + } + } + }, + { + chain: [ + "person", + "id" + ] + type: { + name: "id" + table_type: { + field: "person" + lazy_join: { + from_field: "person_id", + join_function: , + join_table: { + fields: { + created_at: {}, + id: {}, + is_identified: {}, + pdi: {}, + properties: {}, + team_id: {} + } + } + } + table_type: + } + } + } + ] + select_from: { + table: { + chain: [ + "person_distinct_ids" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + distinct_id: , + id: + } + ctes: {} + tables: { + person_distinct_ids: + } + } + } + ' +--- +# name: TestResolver.test_resolve_union_all + ' + { + select_queries: [ + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + timestamp: + } + ctes: {} + tables: { + events: + } + } + }, + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "timestamp" + ] + type: { + name: "timestamp" + table_type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + timestamp: + } + ctes: {} + tables: { + events: + } + } + } + ] + type: { + types: [ + , + + ] + } + } + ' +--- +# name: TestResolver.test_resolve_virtual_events_poe + ' + { + select: [ + { + chain: [ + "event" + ] + type: { + name: "event" + table_type: { + table: { + fields: { + $group_0: {}, + $group_1: {}, + $group_2: {}, + $group_3: {}, + $group_4: {}, + $session_id: {}, + created_at: {}, + distinct_id: {}, + elements_chain: {}, + event: {}, + goe_0: {}, + goe_1: {}, + goe_2: {}, + goe_3: {}, + goe_4: {}, + group_0: {}, + group_1: {}, + group_2: {}, + group_3: {}, + group_4: {}, + override: {}, + override_person_id: {}, + pdi: {}, + person: {}, + person_id: {}, + poe: {}, + properties: {}, + session: {}, + team_id: {}, + timestamp: {}, + uuid: {} + } + } + } + } + }, + { + chain: [ + "poe", + "id" + ] + type: { + name: "id" + table_type: { + field: "poe" + table_type: + virtual_table: { + fields: { + created_at: {}, + id: {}, + properties: {} + } + } + } + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + event: , + id: + } + ctes: {} + tables: { + events: + } + } + } + ' +--- diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index c0ed528ea4da9..ecdfecee28671 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -163,7 +163,7 @@ def test_property_to_expr_event_list(self): # positive self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "exact"}), - self._parse_expr("properties.a = 'b' or properties.a = 'c'"), + self._parse_expr("properties.a IN ('b', 'c')"), ) self.assertEqual( self._property_to_expr( @@ -183,7 +183,7 @@ def test_property_to_expr_event_list(self): # negative self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "is_not"}), - self._parse_expr("properties.a != 'b' and properties.a != 'c'"), + self._parse_expr("properties.a NOT IN ('b', 'c')"), ) self.assertEqual( self._property_to_expr( diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py index f2ee1d812ea65..069e633e0a457 100644 --- a/posthog/hogql/test/test_resolver.py +++ b/posthog/hogql/test/test_resolver.py @@ -1,6 +1,6 @@ from datetime import timezone, datetime, date from typing import Optional, Dict, cast - +import pytest from django.test import override_settings from uuid import UUID @@ -10,12 +10,12 @@ from posthog.hogql.context import HogQLContext from posthog.hogql.database.database import create_hogql_database from posthog.hogql.database.models import ( - LazyJoin, FieldTraverser, StringJSONDatabaseField, StringDatabaseField, DateTimeDatabaseField, ) +from posthog.hogql.test.utils import pretty_dataclasses from posthog.hogql.visitor import clone_expr from posthog.hogql.parser import parse_select from posthog.hogql.printer import print_ast, print_prepared_ast @@ -44,42 +44,11 @@ def setUp(self): self.database = create_hogql_database(self.team.pk) self.context = HogQLContext(database=self.database, team_id=self.team.pk) + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_events_table(self): expr = self._select("SELECT event, events.timestamp FROM events WHERE events.event = 'test'") expr = resolve_types(expr, self.context) - - events_table_type = ast.TableType(table=self.database.events) - event_field_type = ast.FieldType(name="event", table_type=events_table_type) - timestamp_field_type = ast.FieldType(name="timestamp", table_type=events_table_type) - select_query_type = ast.SelectQueryType( - columns={"event": event_field_type, "timestamp": timestamp_field_type}, - tables={"events": events_table_type}, - ) - - expected = ast.SelectQuery( - select=[ - ast.Field(chain=["event"], type=event_field_type), - ast.Field(chain=["events", "timestamp"], type=timestamp_field_type), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - type=events_table_type, - ), - where=ast.CompareOperation( - left=ast.Field(chain=["events", "event"], type=event_field_type), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value="test", type=ast.StringType()), - type=ast.BooleanType(), - ), - type=select_query_type, - ) - - # asserting individually to help debug if something is off - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot def test_will_not_run_twice(self): expr = self._select("SELECT event, events.timestamp FROM events WHERE events.event = 'test'") @@ -91,186 +60,23 @@ def test_will_not_run_twice(self): "Type already resolved for SelectQuery (SelectQueryType). Can't run again.", ) + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_events_table_alias(self): expr = self._select("SELECT event, e.timestamp FROM events e WHERE e.event = 'test'") expr = resolve_types(expr, self.context) + assert pretty_dataclasses(expr) == self.snapshot - events_table_type = ast.TableType(table=self.database.events) - events_table_alias_type = ast.TableAliasType(alias="e", table_type=events_table_type) - event_field_type = ast.FieldType(name="event", table_type=events_table_alias_type) - timestamp_field_type = ast.FieldType(name="timestamp", table_type=events_table_alias_type) - select_query_type = ast.SelectQueryType( - columns={"event": event_field_type, "timestamp": timestamp_field_type}, - tables={"e": events_table_alias_type}, - ) - - expected = ast.SelectQuery( - select=[ - ast.Field(chain=["event"], type=event_field_type), - ast.Field(chain=["e", "timestamp"], type=timestamp_field_type), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - alias="e", - type=events_table_alias_type, - ), - where=ast.CompareOperation( - left=ast.Field(chain=["e", "event"], type=event_field_type), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value="test", type=ast.StringType()), - type=ast.BooleanType(), - ), - type=select_query_type, - ) - - # asserting individually to help debug if something is off - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) - + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_events_table_column_alias(self): expr = self._select("SELECT event as ee, ee, ee as e, e.timestamp FROM events e WHERE e.event = 'test'") expr = resolve_types(expr, self.context) + assert pretty_dataclasses(expr) == self.snapshot - events_table_type = ast.TableType(table=self.database.events) - events_table_alias_type = ast.TableAliasType(alias="e", table_type=events_table_type) - event_field_type = ast.FieldType(name="event", table_type=events_table_alias_type) - timestamp_field_type = ast.FieldType(name="timestamp", table_type=events_table_alias_type) - - select_query_type = ast.SelectQueryType( - aliases={ - "ee": ast.FieldAliasType(alias="ee", type=event_field_type), - "e": ast.FieldAliasType( - alias="e", - type=ast.FieldAliasType(alias="ee", type=event_field_type), - ), - }, - columns={ - "ee": ast.FieldAliasType(alias="ee", type=event_field_type), - "e": ast.FieldAliasType( - alias="e", - type=ast.FieldAliasType(alias="ee", type=event_field_type), - ), - "timestamp": timestamp_field_type, - }, - tables={"e": events_table_alias_type}, - ) - - expected = ast.SelectQuery( - select=[ - ast.Alias( - alias="ee", - expr=ast.Field(chain=["event"], type=event_field_type), - type=select_query_type.aliases["ee"], - ), - ast.Field(chain=["ee"], type=select_query_type.aliases["ee"]), - ast.Alias( - alias="e", - expr=ast.Field(chain=["ee"], type=select_query_type.aliases["ee"]), - type=select_query_type.aliases["e"], # is ee ? - ), - ast.Field(chain=["e", "timestamp"], type=timestamp_field_type), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - alias="e", - type=select_query_type.tables["e"], - ), - where=ast.CompareOperation( - left=ast.Field(chain=["e", "event"], type=event_field_type), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value="test", type=ast.StringType()), - type=ast.BooleanType(), - ), - type=select_query_type, - ) - # asserting individually to help debug if something is off - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) - + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_events_table_column_alias_inside_subquery(self): expr = self._select("SELECT b FROM (select event as b, timestamp as c from events) e WHERE e.b = 'test'") expr = resolve_types(expr, self.context) - inner_events_table_type = ast.TableType(table=self.database.events) - inner_event_field_type = ast.FieldAliasType( - alias="b", - type=ast.FieldType(name="event", table_type=inner_events_table_type), - ) - timestamp_field_type = ast.FieldType(name="timestamp", table_type=inner_events_table_type) - timstamp_alias_type = ast.FieldAliasType(alias="c", type=timestamp_field_type) - inner_select_type = ast.SelectQueryType( - aliases={ - "b": inner_event_field_type, - "c": ast.FieldAliasType(alias="c", type=timestamp_field_type), - }, - columns={ - "b": inner_event_field_type, - "c": ast.FieldAliasType(alias="c", type=timestamp_field_type), - }, - tables={ - "events": inner_events_table_type, - }, - ) - select_alias_type = ast.SelectQueryAliasType(alias="e", select_query_type=inner_select_type) - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["b"], - type=ast.FieldType( - name="b", - table_type=ast.SelectQueryAliasType(alias="e", select_query_type=inner_select_type), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.SelectQuery( - select=[ - ast.Alias( - alias="b", - expr=ast.Field(chain=["event"], type=inner_event_field_type.type), - type=inner_event_field_type, - ), - ast.Alias( - alias="c", - expr=ast.Field(chain=["timestamp"], type=timestamp_field_type), - type=timstamp_alias_type, - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=inner_events_table_type), - type=inner_events_table_type, - ), - type=inner_select_type, - ), - alias="e", - type=select_alias_type, - ), - where=ast.CompareOperation( - left=ast.Field( - chain=["e", "b"], - type=ast.FieldType(name="b", table_type=select_alias_type), - ), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value="test", type=ast.StringType()), - type=ast.BooleanType(), - ), - type=ast.SelectQueryType( - aliases={}, - columns={"b": ast.FieldType(name="b", table_type=select_alias_type)}, - tables={"e": select_alias_type}, - ), - ) - # asserting individually to help debug if something is off - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot def test_resolve_subquery_no_field_access(self): # From ClickHouse's GitHub: "Aliases defined outside of subquery are not visible in subqueries (but see below)." @@ -281,6 +87,7 @@ def test_resolve_subquery_no_field_access(self): expr = resolve_types(expr, self.context) self.assertEqual(str(e.exception), "Unable to resolve field: e") + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_constant_type(self): with freeze_time("2020-01-10 00:00:00"): expr = self._select( @@ -295,66 +102,13 @@ def test_resolve_constant_type(self): }, ) expr = resolve_types(expr, self.context) - expected = ast.SelectQuery( - select=[ - ast.Constant(value=1, type=ast.IntegerType()), - ast.Constant(value="boo", type=ast.StringType()), - ast.Constant(value=True, type=ast.BooleanType()), - ast.Constant(value=1.1232, type=ast.FloatType()), - ast.Constant(value=None, type=ast.UnknownType()), - ast.Constant(value=date(2020, 1, 10), type=ast.DateType()), - ast.Constant( - value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc), - type=ast.DateTimeType(), - ), - ast.Constant( - value=UUID("00000000-0000-4000-8000-000000000000"), - type=ast.UUIDType(), - ), - ast.Constant(value=[], type=ast.ArrayType(item_type=ast.UnknownType())), - ast.Constant(value=[1, 2], type=ast.ArrayType(item_type=ast.IntegerType())), - ast.Constant( - value=(1, 2, 3), - type=ast.TupleType( - item_types=[ - ast.IntegerType(), - ast.IntegerType(), - ast.IntegerType(), - ] - ), - ), - ], - type=ast.SelectQueryType(aliases={}, columns={}, tables={}), - ) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_boolean_operation_types(self): expr = self._select("SELECT 1 and 1, 1 or 1, not true") expr = resolve_types(expr, self.context) - expected = ast.SelectQuery( - select=[ - ast.And( - exprs=[ - ast.Constant(value=1, type=ast.IntegerType()), - ast.Constant(value=1, type=ast.IntegerType()), - ], - type=ast.BooleanType(), - ), - ast.Or( - exprs=[ - ast.Constant(value=1, type=ast.IntegerType()), - ast.Constant(value=1, type=ast.IntegerType()), - ], - type=ast.BooleanType(), - ), - ast.Not( - expr=ast.Constant(value=True, type=ast.BooleanType()), - type=ast.BooleanType(), - ), - ], - type=ast.SelectQueryType(aliases={}, columns={}, tables={}), - ) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot def test_resolve_errors(self): queries = [ @@ -369,388 +123,53 @@ def test_resolve_errors(self): resolve_types(self._select(query), self.context) self.assertIn("Unable to resolve field:", str(e.exception)) + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_lazy_pdi_person_table(self): expr = self._select("select distinct_id, person.id from person_distinct_ids") expr = resolve_types(expr, self.context) - pdi_table_type = ast.LazyTableType(table=self.database.person_distinct_ids) - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["distinct_id"], - type=ast.FieldType(name="distinct_id", table_type=pdi_table_type), - ), - ast.Field( - chain=["person", "id"], - type=ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=pdi_table_type, - field="person", - lazy_join=self.database.person_distinct_ids.fields.get("person"), - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["person_distinct_ids"], type=pdi_table_type), - type=pdi_table_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "distinct_id": ast.FieldType(name="distinct_id", table_type=pdi_table_type), - "id": ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=pdi_table_type, - lazy_join=self.database.person_distinct_ids.fields.get("person"), - field="person", - ), - ), - }, - tables={"person_distinct_ids": pdi_table_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_lazy_events_pdi_table(self): expr = self._select("select event, pdi.person_id from events") expr = resolve_types(expr, self.context) - events_table_type = ast.TableType(table=self.database.events) - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["pdi", "person_id"], - type=ast.FieldType( - name="person_id", - table_type=ast.LazyJoinType( - table_type=events_table_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - type=events_table_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "event": ast.FieldType(name="event", table_type=events_table_type), - "person_id": ast.FieldType( - name="person_id", - table_type=ast.LazyJoinType( - table_type=events_table_type, - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - field="pdi", - ), - ), - }, - tables={"events": events_table_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_lazy_events_pdi_table_aliased(self): expr = self._select("select event, e.pdi.person_id from events e") expr = resolve_types(expr, self.context) - events_table_type = ast.TableType(table=self.database.events) - events_table_alias_type = ast.TableAliasType(table_type=events_table_type, alias="e") - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_alias_type), - ), - ast.Field( - chain=["e", "pdi", "person_id"], - type=ast.FieldType( - name="person_id", - table_type=ast.LazyJoinType( - table_type=events_table_alias_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - alias="e", - type=events_table_alias_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "event": ast.FieldType(name="event", table_type=events_table_alias_type), - "person_id": ast.FieldType( - name="person_id", - table_type=ast.LazyJoinType( - table_type=events_table_alias_type, - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - field="pdi", - ), - ), - }, - tables={"e": events_table_alias_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_lazy_events_pdi_person_table(self): expr = self._select("select event, pdi.person.id from events") expr = resolve_types(expr, self.context) - events_table_type = ast.TableType(table=self.database.events) - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["pdi", "person", "id"], - type=ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=ast.LazyJoinType( - table_type=events_table_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - field="person", - lazy_join=cast( - LazyJoin, - cast(LazyJoin, self.database.events.fields.get("pdi")).join_table.fields.get("person"), - ), - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - type=events_table_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "event": ast.FieldType(name="event", table_type=events_table_type), - "id": ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=ast.LazyJoinType( - table_type=events_table_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - field="person", - lazy_join=cast( - LazyJoin, - cast(LazyJoin, self.database.events.fields.get("pdi")).join_table.fields.get("person"), - ), - ), - ), - }, - tables={"events": events_table_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_lazy_events_pdi_person_table_aliased(self): expr = self._select("select event, e.pdi.person.id from events e") expr = resolve_types(expr, self.context) - events_table_type = ast.TableType(table=self.database.events) - events_table_alias_type = ast.TableAliasType(table_type=events_table_type, alias="e") - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_alias_type), - ), - ast.Field( - chain=["e", "pdi", "person", "id"], - type=ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=ast.LazyJoinType( - table_type=events_table_alias_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - field="person", - lazy_join=cast( - LazyJoin, - cast(LazyJoin, self.database.events.fields.get("pdi")).join_table.fields.get("person"), - ), - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - alias="e", - type=events_table_alias_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "event": ast.FieldType(name="event", table_type=events_table_alias_type), - "id": ast.FieldType( - name="id", - table_type=ast.LazyJoinType( - table_type=ast.LazyJoinType( - table_type=events_table_alias_type, - field="pdi", - lazy_join=cast(LazyJoin, self.database.events.fields.get("pdi")), - ), - field="person", - lazy_join=cast( - LazyJoin, - cast(LazyJoin, self.database.events.fields.get("pdi")).join_table.fields.get("person"), - ), - ), - ), - }, - tables={"e": events_table_alias_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_virtual_events_poe(self): expr = self._select("select event, poe.id from events") expr = resolve_types(expr, self.context) - events_table_type = ast.TableType(table=self.database.events) - expected = ast.SelectQuery( - select=[ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["poe", "id"], - type=ast.FieldType( - name="id", - table_type=ast.VirtualTableType( - table_type=events_table_type, - field="poe", - virtual_table=self.database.events.fields["poe"], - ), - ), - ), - ], - select_from=ast.JoinExpr( - table=ast.Field(chain=["events"], type=events_table_type), - type=events_table_type, - ), - type=ast.SelectQueryType( - aliases={}, - anonymous_tables=[], - columns={ - "event": ast.FieldType(name="event", table_type=events_table_type), - "id": ast.FieldType( - name="id", - table_type=ast.VirtualTableType( - table_type=events_table_type, - field="poe", - virtual_table=self.database.events.fields.get("poe"), - ), - ), - }, - tables={"events": events_table_type}, - ), - ) - self.assertEqual(expr.select, expected.select) - self.assertEqual(expr.select_from, expected.select_from) - self.assertEqual(expr.where, expected.where) - self.assertEqual(expr.type, expected.type) - self.assertEqual(expr, expected) + assert pretty_dataclasses(expr) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_resolve_union_all(self): node = self._select("select event, timestamp from events union all select event, timestamp from events") node = resolve_types(node, self.context) + assert pretty_dataclasses(node) == self.snapshot - events_table_type = ast.TableType(table=self.database.events) - self.assertEqual( - node.select_queries[0].select, - [ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=events_table_type), - ), - ], - ) - self.assertEqual( - node.select_queries[1].select, - [ - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=events_table_type), - ), - ], - ) - + @pytest.mark.usefixtures("unittest_snapshot") def test_call_type(self): node = self._select("select max(timestamp) from events") node = resolve_types(node, self.context) - expected = [ - ast.Call( - name="max", - # NB! timestamp was resolved to a DateTimeType for the Call's arg type. - type=ast.CallType( - name="max", - arg_types=[ast.DateTimeType()], - return_type=ast.UnknownType(), - ), - args=[ - ast.Field( - chain=["timestamp"], - type=ast.FieldType( - name="timestamp", - table_type=ast.TableType(table=self.database.events), - ), - ) - ], - ), - ] - self.assertEqual(node.select, expected) + assert pretty_dataclasses(node) == self.snapshot def test_ctes_loop(self): with self.assertRaises(ResolverException) as e: @@ -760,10 +179,7 @@ def test_ctes_loop(self): def test_ctes_basic_column(self): expr = self._print_hogql("with 1 as cte select cte from events") expected = self._print_hogql("select 1 from events") - self.assertEqual( - expr, - expected, - ) + self.assertEqual(expr, expected) def test_ctes_recursive_column(self): self.assertEqual( @@ -815,282 +231,40 @@ def test_ctes_subquery_recursion(self): ) @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_table(self): self.setUp() # rebuild self.database with PERSON_ON_EVENTS_OVERRIDE=False node = self._select("select * from events") node = resolve_types(node, self.context) - - events_table_type = ast.TableType(table=self.database.events) - self.assertEqual( - node.select, - [ - ast.Field( - chain=["uuid"], - type=ast.FieldType(name="uuid", table_type=events_table_type), - ), - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_type), - ), - ast.Field( - chain=["properties"], - type=ast.FieldType(name="properties", table_type=events_table_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=events_table_type), - ), - ast.Field( - chain=["distinct_id"], - type=ast.FieldType(name="distinct_id", table_type=events_table_type), - ), - ast.Field( - chain=["elements_chain"], - type=ast.FieldType(name="elements_chain", table_type=events_table_type), - ), - ast.Field( - chain=["created_at"], - type=ast.FieldType(name="created_at", table_type=events_table_type), - ), - ast.Field( - chain=["$session_id"], - type=ast.FieldType(name="$session_id", table_type=events_table_type), - ), - ast.Field( - chain=["$group_0"], - type=ast.FieldType(name="$group_0", table_type=events_table_type), - ), - ast.Field( - chain=["$group_1"], - type=ast.FieldType(name="$group_1", table_type=events_table_type), - ), - ast.Field( - chain=["$group_2"], - type=ast.FieldType(name="$group_2", table_type=events_table_type), - ), - ast.Field( - chain=["$group_3"], - type=ast.FieldType(name="$group_3", table_type=events_table_type), - ), - ast.Field( - chain=["$group_4"], - type=ast.FieldType(name="$group_4", table_type=events_table_type), - ), - ], - ) + assert pretty_dataclasses(node) == self.snapshot @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_table_alias(self): self.setUp() # rebuild self.database with PERSON_ON_EVENTS_OVERRIDE=False node = self._select("select * from events e") node = resolve_types(node, self.context) + assert pretty_dataclasses(node) == self.snapshot - events_table_type = ast.TableType(table=self.database.events) - events_table_alias_type = ast.TableAliasType(table_type=events_table_type, alias="e") - self.assertEqual( - node.select, - [ - ast.Field( - chain=["uuid"], - type=ast.FieldType(name="uuid", table_type=events_table_alias_type), - ), - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=events_table_alias_type), - ), - ast.Field( - chain=["properties"], - type=ast.FieldType(name="properties", table_type=events_table_alias_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=events_table_alias_type), - ), - ast.Field( - chain=["distinct_id"], - type=ast.FieldType(name="distinct_id", table_type=events_table_alias_type), - ), - ast.Field( - chain=["elements_chain"], - type=ast.FieldType(name="elements_chain", table_type=events_table_alias_type), - ), - ast.Field( - chain=["created_at"], - type=ast.FieldType(name="created_at", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$session_id"], - type=ast.FieldType(name="$session_id", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$group_0"], - type=ast.FieldType(name="$group_0", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$group_1"], - type=ast.FieldType(name="$group_1", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$group_2"], - type=ast.FieldType(name="$group_2", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$group_3"], - type=ast.FieldType(name="$group_3", table_type=events_table_alias_type), - ), - ast.Field( - chain=["$group_4"], - type=ast.FieldType(name="$group_4", table_type=events_table_alias_type), - ), - ], - ) - + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_subquery(self): node = self._select("select * from (select 1 as a, 2 as b)") node = resolve_types(node, self.context) - select_subquery_type = ast.SelectQueryType( - aliases={ - "a": ast.FieldAliasType(alias="a", type=ast.IntegerType()), - "b": ast.FieldAliasType(alias="b", type=ast.IntegerType()), - }, - columns={ - "a": ast.FieldAliasType(alias="a", type=ast.IntegerType()), - "b": ast.FieldAliasType(alias="b", type=ast.IntegerType()), - }, - tables={}, - anonymous_tables=[], - ) - self.assertEqual( - node.select, - [ - ast.Field( - chain=["a"], - type=ast.FieldType(name="a", table_type=select_subquery_type), - ), - ast.Field( - chain=["b"], - type=ast.FieldType(name="b", table_type=select_subquery_type), - ), - ], - ) + assert pretty_dataclasses(node) == self.snapshot + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_subquery_alias(self): node = self._select("select x.* from (select 1 as a, 2 as b) x") node = resolve_types(node, self.context) - select_subquery_type = ast.SelectQueryAliasType( - alias="x", - select_query_type=ast.SelectQueryType( - aliases={ - "a": ast.FieldAliasType(alias="a", type=ast.IntegerType()), - "b": ast.FieldAliasType(alias="b", type=ast.IntegerType()), - }, - columns={ - "a": ast.FieldAliasType(alias="a", type=ast.IntegerType()), - "b": ast.FieldAliasType(alias="b", type=ast.IntegerType()), - }, - tables={}, - anonymous_tables=[], - ), - ) - self.assertEqual( - node.select, - [ - ast.Field( - chain=["a"], - type=ast.FieldType(name="a", table_type=select_subquery_type), - ), - ast.Field( - chain=["b"], - type=ast.FieldType(name="b", table_type=select_subquery_type), - ), - ], - ) + assert pretty_dataclasses(node) == self.snapshot @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_from_subquery_table(self): self.setUp() # rebuild self.database with PERSON_ON_EVENTS_OVERRIDE=False node = self._select("select * from (select * from events)") node = resolve_types(node, self.context) - - events_table_type = ast.TableType(table=self.database.events) - inner_select_type = ast.SelectQueryType( - tables={"events": events_table_type}, - anonymous_tables=[], - aliases={}, - columns={ - "uuid": ast.FieldType(name="uuid", table_type=events_table_type), - "event": ast.FieldType(name="event", table_type=events_table_type), - "properties": ast.FieldType(name="properties", table_type=events_table_type), - "timestamp": ast.FieldType(name="timestamp", table_type=events_table_type), - "distinct_id": ast.FieldType(name="distinct_id", table_type=events_table_type), - "elements_chain": ast.FieldType(name="elements_chain", table_type=events_table_type), - "created_at": ast.FieldType(name="created_at", table_type=events_table_type), - "$session_id": ast.FieldType(name="$session_id", table_type=events_table_type), - "$group_0": ast.FieldType(name="$group_0", table_type=events_table_type), - "$group_1": ast.FieldType(name="$group_1", table_type=events_table_type), - "$group_2": ast.FieldType(name="$group_2", table_type=events_table_type), - "$group_3": ast.FieldType(name="$group_3", table_type=events_table_type), - "$group_4": ast.FieldType(name="$group_4", table_type=events_table_type), - }, - ) - - self.assertEqual( - node.select, - [ - ast.Field( - chain=["uuid"], - type=ast.FieldType(name="uuid", table_type=inner_select_type), - ), - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=inner_select_type), - ), - ast.Field( - chain=["properties"], - type=ast.FieldType(name="properties", table_type=inner_select_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=inner_select_type), - ), - ast.Field( - chain=["distinct_id"], - type=ast.FieldType(name="distinct_id", table_type=inner_select_type), - ), - ast.Field( - chain=["elements_chain"], - type=ast.FieldType(name="elements_chain", table_type=inner_select_type), - ), - ast.Field( - chain=["created_at"], - type=ast.FieldType(name="created_at", table_type=inner_select_type), - ), - ast.Field( - chain=["$session_id"], - type=ast.FieldType(name="$session_id", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_0"], - type=ast.FieldType(name="$group_0", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_1"], - type=ast.FieldType(name="$group_1", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_2"], - type=ast.FieldType(name="$group_2", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_3"], - type=ast.FieldType(name="$group_3", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_4"], - type=ast.FieldType(name="$group_4", table_type=inner_select_type), - ), - ], - ) + assert pretty_dataclasses(node) == self.snapshot def test_asterisk_expander_multiple_table_error(self): node = self._select("select * from (select 1 as a, 2 as b) x left join (select 1 as a, 2 as b) y on x.a = y.a") @@ -1102,95 +276,12 @@ def test_asterisk_expander_multiple_table_error(self): ) @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) + @pytest.mark.usefixtures("unittest_snapshot") def test_asterisk_expander_select_union(self): self.setUp() # rebuild self.database with PERSON_ON_EVENTS_OVERRIDE=False node = self._select("select * from (select * from events union all select * from events)") node = resolve_types(node, self.context) - - events_table_type = ast.TableType(table=self.database.events) - inner_select_type = ast.SelectUnionQueryType( - types=[ - ast.SelectQueryType( - tables={"events": events_table_type}, - anonymous_tables=[], - aliases={}, - columns={ - "uuid": ast.FieldType(name="uuid", table_type=events_table_type), - "event": ast.FieldType(name="event", table_type=events_table_type), - "properties": ast.FieldType(name="properties", table_type=events_table_type), - "timestamp": ast.FieldType(name="timestamp", table_type=events_table_type), - "distinct_id": ast.FieldType(name="distinct_id", table_type=events_table_type), - "elements_chain": ast.FieldType(name="elements_chain", table_type=events_table_type), - "created_at": ast.FieldType(name="created_at", table_type=events_table_type), - "$session_id": ast.FieldType(name="$session_id", table_type=events_table_type), - "$group_0": ast.FieldType(name="$group_0", table_type=events_table_type), - "$group_1": ast.FieldType(name="$group_1", table_type=events_table_type), - "$group_2": ast.FieldType(name="$group_2", table_type=events_table_type), - "$group_3": ast.FieldType(name="$group_3", table_type=events_table_type), - "$group_4": ast.FieldType(name="$group_4", table_type=events_table_type), - }, - ) - ] - * 2 - ) - - self.assertEqual( - node.select, - [ - ast.Field( - chain=["uuid"], - type=ast.FieldType(name="uuid", table_type=inner_select_type), - ), - ast.Field( - chain=["event"], - type=ast.FieldType(name="event", table_type=inner_select_type), - ), - ast.Field( - chain=["properties"], - type=ast.FieldType(name="properties", table_type=inner_select_type), - ), - ast.Field( - chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=inner_select_type), - ), - ast.Field( - chain=["distinct_id"], - type=ast.FieldType(name="distinct_id", table_type=inner_select_type), - ), - ast.Field( - chain=["elements_chain"], - type=ast.FieldType(name="elements_chain", table_type=inner_select_type), - ), - ast.Field( - chain=["created_at"], - type=ast.FieldType(name="created_at", table_type=inner_select_type), - ), - ast.Field( - chain=["$session_id"], - type=ast.FieldType(name="$session_id", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_0"], - type=ast.FieldType(name="$group_0", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_1"], - type=ast.FieldType(name="$group_1", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_2"], - type=ast.FieldType(name="$group_2", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_3"], - type=ast.FieldType(name="$group_3", table_type=inner_select_type), - ), - ast.Field( - chain=["$group_4"], - type=ast.FieldType(name="$group_4", table_type=inner_select_type), - ), - ], - ) + assert pretty_dataclasses(node) == self.snapshot def test_lambda_parent_scope(self): # does not raise @@ -1234,7 +325,8 @@ def test_visit_hogqlx_tag(self): node = cast(ast.SelectQuery, resolve_types(node, self.context)) table_node = cast(ast.SelectQuery, node).select_from.table expected = ast.SelectQuery( - select=[ast.Field(chain=["event"])], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])) + select=[ast.Field(chain=["event"])], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ) assert clone_expr(table_node, clear_types=True) == expected diff --git a/posthog/hogql/test/utils.py b/posthog/hogql/test/utils.py index 8e5fc45313a0f..7e46c620c997a 100644 --- a/posthog/hogql/test/utils.py +++ b/posthog/hogql/test/utils.py @@ -1,3 +1,8 @@ +import dataclasses +import json +from pydantic import BaseModel + + def pretty_print_in_tests(query: str, team_id: int) -> str: return ( query.replace("SELECT", "\nSELECT") @@ -9,3 +14,52 @@ def pretty_print_in_tests(query: str, team_id: int) -> str: .replace("SETTINGS", "\nSETTINGS") .replace(f"team_id, {team_id})", "team_id, 420)") ) + + +def pretty_dataclasses(obj, seen=None, indent=0): + if seen is None: + seen = set() + + indent_space = " " * indent + next_indent = " " * (indent + 2) + + if isinstance(obj, BaseModel): + obj = obj.model_dump() + + if dataclasses.is_dataclass(obj): + obj_id = id(obj) + if obj_id in seen: + return "" + seen.add(obj_id) + + field_strings = [] + fields = sorted(dataclasses.fields(obj), key=lambda f: f.name) + for f in fields: + value = getattr(obj, f.name) + if value is not None: + formatted_value = pretty_dataclasses(value, seen, indent + 2) + field_strings.append(f"{next_indent}{f.name}: {formatted_value}") + + return "{\n" + "\n".join(field_strings) + "\n" + indent_space + "}" + + elif isinstance(obj, list): + if len(obj) == 0: + return "[]" + elements = [pretty_dataclasses(item, seen, indent + 2) for item in obj] + return "[\n" + ",\n".join(next_indent + element for element in elements) + "\n" + indent_space + "]" + + elif isinstance(obj, dict): + if len(obj) == 0: + return "{}" + sorted_items = sorted(obj.items()) + key_value_pairs = [f"{k}: {pretty_dataclasses(v, seen, indent + 2)}" for k, v in sorted_items] + return "{\n" + ",\n".join(next_indent + pair for pair in key_value_pairs) + "\n" + indent_space + "}" + + elif isinstance(obj, str): + return json.dumps(obj) + + elif callable(obj): + return "" + + else: + return str(obj) diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index f6afdfd591e85..f7499741cd51e 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -420,10 +420,10 @@ def test_trends_breakdowns(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] - assert response.results[0]["label"] == f"$pageview - Chrome" - assert response.results[1]["label"] == f"$pageview - Edge" - assert response.results[2]["label"] == f"$pageview - Firefox" - assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" assert response.results[0]["count"] == 6 assert response.results[1]["count"] == 1 assert response.results[2]["count"] == 2 @@ -479,11 +479,11 @@ def test_trends_breakdowns_histogram(self): "[32.5,40.01]", ] - assert response.results[0]["label"] == '$pageview - ["",""]' - assert response.results[1]["label"] == "$pageview - [10.0,17.5]" - assert response.results[2]["label"] == "$pageview - [17.5,25.0]" - assert response.results[3]["label"] == "$pageview - [25.0,32.5]" - assert response.results[4]["label"] == "$pageview - [32.5,40.01]" + assert response.results[0]["label"] == '["",""]' + assert response.results[1]["label"] == "[10.0,17.5]" + assert response.results[2]["label"] == "[17.5,25.0]" + assert response.results[3]["label"] == "[25.0,32.5]" + assert response.results[4]["label"] == "[32.5,40.01]" assert response.results[0]["data"] == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] assert response.results[1]["data"] == [0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0] @@ -554,14 +554,47 @@ def test_trends_breakdowns_hogql(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" + assert response.results[0]["count"] == 6 + assert response.results[1]["count"] == 1 + assert response.results[2]["count"] == 2 + assert response.results[3]["count"] == 1 + + def test_trends_breakdowns_multiple_hogql(self): + self._create_test_events() + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview"), EventsNode(event="$pageleave")], + None, + BreakdownFilter(breakdown_type=BreakdownType.hogql, breakdown="properties.$browser"), + ) + + breakdown_labels = [result["breakdown_value"] for result in response.results] + + assert len(response.results) == 8 + assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari", "Chrome", "Edge", "Firefox", "Safari"] assert response.results[0]["label"] == f"$pageview - Chrome" assert response.results[1]["label"] == f"$pageview - Edge" assert response.results[2]["label"] == f"$pageview - Firefox" assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[4]["label"] == f"$pageleave - Chrome" + assert response.results[5]["label"] == f"$pageleave - Edge" + assert response.results[6]["label"] == f"$pageleave - Firefox" + assert response.results[7]["label"] == f"$pageleave - Safari" assert response.results[0]["count"] == 6 assert response.results[1]["count"] == 1 assert response.results[2]["count"] == 2 assert response.results[3]["count"] == 1 + assert response.results[4]["count"] == 3 + assert response.results[5]["count"] == 1 + assert response.results[6]["count"] == 1 + assert response.results[7]["count"] == 1 def test_trends_breakdowns_and_compare(self): self._create_test_events() @@ -626,10 +659,10 @@ def test_trends_breakdown_and_aggregation_query_orchestration(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] - assert response.results[0]["label"] == f"$pageview - Chrome" - assert response.results[1]["label"] == f"$pageview - Edge" - assert response.results[2]["label"] == f"$pageview - Firefox" - assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" assert response.results[0]["data"] == [ 0, diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index a7ceb785f18d8..3aac186437f1c 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -136,7 +136,7 @@ def calculate(self): timings.extend(response.timings) - res.extend(self.build_series_response(response, series_with_extra)) + res.extend(self.build_series_response(response, series_with_extra, len(queries))) if ( self.query.trendsFilter is not None @@ -147,7 +147,7 @@ def calculate(self): return TrendsQueryResponse(results=res, timings=timings) - def build_series_response(self, response: HogQLQueryResponse, series: SeriesWithExtras): + def build_series_response(self, response: HogQLQueryResponse, series: SeriesWithExtras, series_count: int): if response.results is None: return [] @@ -246,7 +246,13 @@ def get_value(name: str, val: Any): series_object["label"] = "{} - {}".format(series_object["label"], cohort_name) series_object["breakdown_value"] = get_value("breakdown_value", val) else: - series_object["label"] = "{} - {}".format(series_object["label"], get_value("breakdown_value", val)) + # If there's multiple series, include the object label in the series label + if series_count > 1: + series_object["label"] = "{} - {}".format( + series_object["label"], get_value("breakdown_value", val) + ) + else: + series_object["label"] = get_value("breakdown_value", val) series_object["breakdown_value"] = get_value("breakdown_value", val) res.append(series_object) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index 4c8b2b857eec3..201fad05baf8c 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -48,17 +48,21 @@ def property_filters_without_pathname(self) -> List[Union[EventPropertyFilter, P return [p for p in self.query.properties if p.key != "$pathname"] def session_where(self, include_previous_period: Optional[bool] = None): - properties = [ - parse_expr( - "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", - placeholders={ - "date_from": self.query_date_range.previous_period_date_from_as_hogql() - if include_previous_period - else self.query_date_range.date_from_as_hogql(), - "date_to": self.query_date_range.date_to_as_hogql(), - }, - ) - ] + self.property_filters_without_pathname + properties = ( + [ + parse_expr( + "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", + placeholders={ + "date_from": self.query_date_range.previous_period_date_from_as_hogql() + if include_previous_period + else self.query_date_range.date_from_as_hogql(), + "date_to": self.query_date_range.date_to_as_hogql(), + }, + ) + ] + + self.property_filters_without_pathname + + self._test_account_filters + ) return property_to_expr( properties, self.team, @@ -91,17 +95,29 @@ def session_having(self, include_previous_period: Optional[bool] = None): ) def events_where(self): - properties = [ - parse_expr( - "events.timestamp >= {date_from}", - placeholders={"date_from": self.query_date_range.date_from_as_hogql()}, - ) - ] + self.query.properties + properties = ( + [ + parse_expr( + "events.timestamp >= {date_from}", + placeholders={"date_from": self.query_date_range.date_from_as_hogql()}, + ) + ] + + self.query.properties + + self._test_account_filters + ) + return property_to_expr( properties, self.team, ) + @cached_property + def _test_account_filters(self): + if isinstance(self.team.test_account_filters, list) and len(self.team.test_account_filters) > 0: + return self.team.test_account_filters + else: + return [] + def _is_stale(self, cached_result_package): date_to = self.query_date_range.date_to() interval = self.query_date_range.interval_name diff --git a/posthog/migrations/0365_update_created_by_flag_constraint.py b/posthog/migrations/0365_update_created_by_flag_constraint.py new file mode 100644 index 0000000000000..e8912598ae3fd --- /dev/null +++ b/posthog/migrations/0365_update_created_by_flag_constraint.py @@ -0,0 +1,85 @@ +# Generated by Django 3.2.19 on 2023-11-09 10:35 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0364_team_external_data_workspace_rows"), + ] + + # :TRICKY: + # We are replacing the original generated migration: + # migrations.AlterField( + # model_name='experiment', + # name='created_by', + # field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + # ), + # migrations.AlterField( + # model_name='featureflag', + # name='created_by', + # field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + # ), + # with one that adds the 'NOT VALID' directive, which applies the constraint only for inserts/updates. + # This ensures the table is not locked when creating the new constraint. + # A follow up migration will validate the constraint. + # The code here is exactly the same as the one generated by the default migration, except for the 'NOT VALID' directive. + + operations = [ + # make the created_by column nullable in experiments & flags + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.AlterField( + model_name="experiment", + name="created_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.user", + ), + ), + migrations.AlterField( + model_name="featureflag", + name="created_by", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.user", + ), + ), + ], + database_operations=[ + # We add -- existing-table-constraint-ignore to ignore the constraint validation in CI. + # This should be safe, because we are making the constraint NOT VALID, so doesn't lock things up for long. + migrations.RunSQL( + """ + SET CONSTRAINTS "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id" IMMEDIATE; -- existing-table-constraint-ignore + ALTER TABLE "posthog_experiment" DROP CONSTRAINT "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id"; -- existing-table-constraint-ignore + ALTER TABLE "posthog_experiment" ALTER COLUMN "created_by_id" DROP NOT NULL; + ALTER TABLE "posthog_experiment" ADD CONSTRAINT "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id" FOREIGN KEY ("created_by_id") REFERENCES "posthog_user" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; -- existing-table-constraint-ignore + """, + reverse_sql=""" + SET CONSTRAINTS "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id" IMMEDIATE; + ALTER TABLE "posthog_experiment" DROP CONSTRAINT "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id"; + ALTER TABLE "posthog_experiment" ALTER COLUMN "created_by_id" SET NOT NULL; + ALTER TABLE "posthog_experiment" ADD CONSTRAINT "posthog_experiment_created_by_id_b40aea95_fk_posthog_user_id" FOREIGN KEY ("created_by_id") REFERENCES "posthog_user" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; + """, + ), + migrations.RunSQL( + """SET CONSTRAINTS "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id" IMMEDIATE; -- existing-table-constraint-ignore + ALTER TABLE "posthog_featureflag" DROP CONSTRAINT "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id"; -- existing-table-constraint-ignore + ALTER TABLE "posthog_featureflag" ALTER COLUMN "created_by_id" DROP NOT NULL; + ALTER TABLE "posthog_featureflag" ADD CONSTRAINT "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id" FOREIGN KEY ("created_by_id") REFERENCES "posthog_user" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; -- existing-table-constraint-ignore + """, + reverse_sql=""" + SET CONSTRAINTS "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id" IMMEDIATE; + ALTER TABLE "posthog_featureflag" DROP CONSTRAINT "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id"; + ALTER TABLE "posthog_featureflag" ALTER COLUMN "created_by_id" SET NOT NULL; + ALTER TABLE "posthog_featureflag" ADD CONSTRAINT "posthog_featureflag_created_by_id_4571fe1a_fk_posthog_user_id" FOREIGN KEY ("created_by_id") REFERENCES "posthog_user" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; + -- existing-table-constraint-ignore + """, + ), + ], + ), + ] diff --git a/posthog/migrations/0366_alter_action_created_by.py b/posthog/migrations/0366_alter_action_created_by.py new file mode 100644 index 0000000000000..996183b7625bf --- /dev/null +++ b/posthog/migrations/0366_alter_action_created_by.py @@ -0,0 +1,21 @@ +# Generated by Django 3.2.19 on 2023-11-21 14:02 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0365_update_created_by_flag_constraint"), + ] + + operations = [ + migrations.AlterField( + model_name="action", + name="created_by", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), + ), + ] diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py index 368100fcbc978..698957f8dbafd 100644 --- a/posthog/models/action/action.py +++ b/posthog/models/action/action.py @@ -20,7 +20,7 @@ class Meta: team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) description: models.TextField = models.TextField(blank=True, default="") created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE, null=True, blank=True) + created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) deleted: models.BooleanField = models.BooleanField(default=False) events: models.ManyToManyField = models.ManyToManyField("Event", blank=True) post_to_slack: models.BooleanField = models.BooleanField(default=False) diff --git a/posthog/models/cohort/test/test_util.py b/posthog/models/cohort/test/test_util.py index d8ff051a0bb41..dce0258746828 100644 --- a/posthog/models/cohort/test/test_util.py +++ b/posthog/models/cohort/test/test_util.py @@ -508,3 +508,39 @@ def test_dependent_cohorts_for_complex_nested_cohort(self): self.assertEqual(get_dependent_cohorts(cohort3), [cohort2, cohort1]) self.assertEqual(get_dependent_cohorts(cohort4), [cohort1]) self.assertEqual(get_dependent_cohorts(cohort5), [cohort4, cohort1, cohort2]) + + def test_dependent_cohorts_ignore_invalid_ids(self): + cohort1 = _create_cohort( + team=self.team, + name="cohort1", + groups=[{"properties": [{"key": "name", "value": "test", "type": "person"}]}], + ) + + cohort2 = _create_cohort( + team=self.team, + name="cohort2", + groups=[ + { + "properties": [ + {"key": "id", "value": cohort1.pk, "type": "cohort"}, + {"key": "id", "value": "invalid-key", "type": "cohort"}, + ] + } + ], + ) + + cohort3 = _create_cohort( + team=self.team, + name="cohorte", + groups=[ + { + "properties": [ + {"key": "id", "value": cohort2.pk, "type": "cohort"}, + {"key": "id", "value": "invalid-key", "type": "cohort"}, + ] + } + ], + ) + + self.assertEqual(get_dependent_cohorts(cohort2), [cohort1]) + self.assertEqual(get_dependent_cohorts(cohort3), [cohort2, cohort1]) diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index abd4e6c89920c..c4201fbaf3f47 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -440,7 +440,7 @@ def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: def get_dependent_cohorts( cohort: Cohort, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, + seen_cohorts_cache: Optional[Dict[int, Cohort]] = None, ) -> List[Cohort]: if seen_cohorts_cache is None: seen_cohorts_cache = {} @@ -449,28 +449,40 @@ def get_dependent_cohorts( seen_cohort_ids = set() seen_cohort_ids.add(cohort.id) - queue = [prop.value for prop in cohort.properties.flat if prop.type == "cohort"] + queue = [] + for prop in cohort.properties.flat: + if prop.type == "cohort" and not isinstance(prop.value, list): + try: + queue.append(int(prop.value)) + except (ValueError, TypeError): + continue while queue: cohort_id = queue.pop() try: - parsed_cohort_id = str(cohort_id) - if parsed_cohort_id in seen_cohorts_cache: - cohort = seen_cohorts_cache[parsed_cohort_id] + if cohort_id in seen_cohorts_cache: + cohort = seen_cohorts_cache[cohort_id] else: cohort = Cohort.objects.using(using_database).get(pk=cohort_id) - seen_cohorts_cache[parsed_cohort_id] = cohort + seen_cohorts_cache[cohort_id] = cohort if cohort.id not in seen_cohort_ids: cohorts.append(cohort) seen_cohort_ids.add(cohort.id) - queue += [prop.value for prop in cohort.properties.flat if prop.type == "cohort"] + + for prop in cohort.properties.flat: + if prop.type == "cohort" and not isinstance(prop.value, list): + try: + queue.append(int(prop.value)) + except (ValueError, TypeError): + continue + except Cohort.DoesNotExist: continue return cohorts -def sort_cohorts_topologically(cohort_ids: Set[int], seen_cohorts_cache: Dict[str, Cohort]) -> List[int]: +def sort_cohorts_topologically(cohort_ids: Set[int], seen_cohorts_cache: Dict[int, Cohort]) -> List[int]: """ Sorts the given cohorts in an order where cohorts with no dependencies are placed first, followed by cohorts that depend on the preceding ones. It ensures that each cohort in the sorted list @@ -492,13 +504,13 @@ def traverse(cohort): # add child dependency_graph[cohort.id].append(int(prop.value)) - neighbor_cohort = seen_cohorts_cache[str(prop.value)] + neighbor_cohort = seen_cohorts_cache[int(prop.value)] if cohort.id not in seen: seen.add(cohort.id) traverse(neighbor_cohort) for cohort_id in cohort_ids: - cohort = seen_cohorts_cache[str(cohort_id)] + cohort = seen_cohorts_cache[int(cohort_id)] traverse(cohort) # post-order DFS (children first, then the parent) diff --git a/posthog/models/experiment.py b/posthog/models/experiment.py index ea970c5b2db12..74e631b6fab8c 100644 --- a/posthog/models/experiment.py +++ b/posthog/models/experiment.py @@ -23,8 +23,8 @@ class Experiment(models.Model): # A list of filters for secondary metrics secondary_metrics: models.JSONField = models.JSONField(default=list, null=True) + created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) feature_flag: models.ForeignKey = models.ForeignKey("FeatureFlag", blank=False, on_delete=models.RESTRICT) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) start_date: models.DateTimeField = models.DateTimeField(null=True) end_date: models.DateTimeField = models.DateTimeField(null=True) created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index 36379563aa7f7..c339abe44d0ed 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -37,7 +37,7 @@ class Meta: rollout_percentage: models.IntegerField = models.IntegerField(null=True, blank=True) team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) + created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) deleted: models.BooleanField = models.BooleanField(default=False) active: models.BooleanField = models.BooleanField(default=True) @@ -134,7 +134,7 @@ def get_filters(self): def transform_cohort_filters_for_easy_evaluation( self, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, + seen_cohorts_cache: Optional[Dict[int, Cohort]] = None, ): """ Expands cohort filters into person property filters when possible. @@ -168,18 +168,17 @@ def transform_cohort_filters_for_easy_evaluation( for prop in props: if prop.get("type") == "cohort": cohort_condition = True - cohort_id = prop.get("value") + cohort_id = int(prop.get("value")) if cohort_id: if len(props) > 1: # We cannot expand this cohort condition if it's not the only property in its group. return self.conditions try: - parsed_cohort_id = str(cohort_id) - if parsed_cohort_id in seen_cohorts_cache: - cohort = seen_cohorts_cache[parsed_cohort_id] + if cohort_id in seen_cohorts_cache: + cohort = seen_cohorts_cache[cohort_id] else: cohort = Cohort.objects.using(using_database).get(pk=cohort_id) - seen_cohorts_cache[parsed_cohort_id] = cohort + seen_cohorts_cache[cohort_id] = cohort except Cohort.DoesNotExist: return self.conditions if not cohort_condition: @@ -259,7 +258,7 @@ def transform_cohort_filters_for_easy_evaluation( def get_cohort_ids( self, using_database: str = "default", - seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, + seen_cohorts_cache: Optional[Dict[int, Cohort]] = None, sort_by_topological_order=False, ) -> List[int]: from posthog.models.cohort.util import get_dependent_cohorts, sort_cohorts_topologically @@ -272,14 +271,13 @@ def get_cohort_ids( props = condition.get("properties", []) for prop in props: if prop.get("type") == "cohort": - cohort_id = prop.get("value") + cohort_id = int(prop.get("value")) try: - parsed_cohort_id = str(cohort_id) - if parsed_cohort_id in seen_cohorts_cache: - cohort: Cohort = seen_cohorts_cache[parsed_cohort_id] + if cohort_id in seen_cohorts_cache: + cohort: Cohort = seen_cohorts_cache[cohort_id] else: cohort = Cohort.objects.using(using_database).get(pk=cohort_id) - seen_cohorts_cache[parsed_cohort_id] = cohort + seen_cohorts_cache[cohort_id] = cohort cohort_ids.add(cohort.pk) cohort_ids.update( diff --git a/posthog/models/feature_flag/permissions.py b/posthog/models/feature_flag/permissions.py index 95d39636c4c07..8f766b4fccc60 100644 --- a/posthog/models/feature_flag/permissions.py +++ b/posthog/models/feature_flag/permissions.py @@ -12,7 +12,7 @@ def can_user_edit_feature_flag(request, feature_flag): else: if not request.user.organization.is_feature_available(AvailableFeature.ROLE_BASED_ACCESS): return True - if feature_flag.created_by == request.user: + if hasattr(feature_flag, "created_by") and feature_flag.created_by and feature_flag.created_by == request.user: return True if ( request.user.organization_memberships.get(organization=request.user.organization).level diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 869ba9f0f6e75..461c5f777f568 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -24,12 +24,7 @@ from posthog.cloud_utils import is_cloud from posthog.constants import MAX_SLUG_LENGTH, AvailableFeature from posthog.email import is_email_available -from posthog.models.utils import ( - LowercaseSlugField, - UUIDModel, - create_with_slug, - sane_repr, -) +from posthog.models.utils import LowercaseSlugField, UUIDModel, create_with_slug, sane_repr from posthog.redis import get_client from posthog.utils import absolute_uri @@ -416,3 +411,19 @@ def ensure_organization_membership_consistency(sender, instance: OrganizationMem save_user = True if save_user: instance.user.save() + + +@receiver(models.signals.pre_save, sender=OrganizationMembership) +def organization_membership_saved(sender: Any, instance: OrganizationMembership, **kwargs: Any) -> None: + from posthog.event_usage import report_user_organization_membership_level_changed + + try: + old_instance = OrganizationMembership.objects.get(id=instance.id) + if old_instance.level != instance.level: + # the level has been changed + report_user_organization_membership_level_changed( + instance.user, instance.organization, instance.level, old_instance.level + ) + except OrganizationMembership.DoesNotExist: + # The instance is new, or we are setting up test data + pass diff --git a/posthog/models/test/test_organization_model.py b/posthog/models/test/test_organization_model.py index f140dcc862f26..8c35602a64be5 100644 --- a/posthog/models/test/test_organization_model.py +++ b/posthog/models/test/test_organization_model.py @@ -1,8 +1,10 @@ from unittest import mock +from unittest.mock import patch from django.utils import timezone from posthog.models import Organization, OrganizationInvite, Plugin +from posthog.models.organization import OrganizationMembership from posthog.plugins.test.mock import mocked_plugin_requests_get from posthog.plugins.test.plugin_archives import HELLO_WORLD_PLUGIN_GITHUB_ZIP from posthog.test.base import BaseTest @@ -77,3 +79,28 @@ def test_update_available_features_ignored_if_usage_info_exists(self): new_org.usage = {"events": {"usage": 1000, "limit": None}} new_org.update_available_features() assert new_org.available_features == ["test1", "test2"] + + +class TestOrganizationMembership(BaseTest): + @patch("posthoganalytics.capture") + def test_event_sent_when_membership_level_changed( + self, + mock_capture, + ): + user = self._create_user("user1") + organization = Organization.objects.create(name="Test Org") + membership = OrganizationMembership.objects.create(user=user, organization=organization, level=1) + mock_capture.assert_not_called() + # change the level + membership.level = 15 + membership.save() + # check that the event was sent + mock_capture.assert_called_once_with( + user.distinct_id, + "membership level changed", + properties={ + "new_level": 15, + "previous_level": 1, + }, + groups=mock.ANY, + ) diff --git a/posthog/models/test/test_user_model.py b/posthog/models/test/test_user_model.py index fe26931522eac..9c07f36b16466 100644 --- a/posthog/models/test/test_user_model.py +++ b/posthog/models/test/test_user_model.py @@ -10,6 +10,7 @@ def test_create_user_with_distinct_id(self): self.assertNotEqual(user.distinct_id, None) def test_analytics_metadata(self): + self.maxDiff = None # One org, one team, anonymized organization, team, user = User.objects.bootstrap( organization_name="Test Org", @@ -32,6 +33,7 @@ def test_analytics_metadata(self): "team_member_count_all": 1, "completed_onboarding_once": False, "organization_id": str(organization.id), + "current_organization_membership_level": 15, "project_id": str(team.uuid), "project_setup_complete": False, "has_password_set": True, @@ -67,6 +69,7 @@ def test_analytics_metadata(self): "team_member_count_all": 2, "completed_onboarding_once": True, "organization_id": str(self.organization.id), + "current_organization_membership_level": 1, "project_id": str(self.team.uuid), "project_setup_complete": True, "has_password_set": True, diff --git a/posthog/models/user.py b/posthog/models/user.py index 423936747e2cc..b25c12776fb1b 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -1,14 +1,5 @@ from functools import cached_property -from typing import ( - Any, - Callable, - Dict, - List, - Optional, - Tuple, - Type, - TypedDict, -) +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypedDict from django.contrib.auth.models import AbstractUser, BaseUserManager from django.db import models, transaction @@ -237,6 +228,8 @@ def join( # We don't need to check for ExplicitTeamMembership as none can exist for a completely new member self.current_team = organization.teams.order_by("id").filter(access_control=False).first() self.save() + if level == OrganizationMembership.Level.OWNER and not self.current_organization.customer_id: + self.update_billing_customer_email(organization) self.update_billing_distinct_ids(organization) return membership @@ -268,6 +261,12 @@ def update_billing_distinct_ids(self, organization: Organization) -> None: if is_cloud() and get_cached_instance_license() is not None: BillingManager(get_cached_instance_license()).update_billing_distinct_ids(organization) + def update_billing_customer_email(self, organization: Organization) -> None: + from ee.billing.billing_manager import BillingManager # avoid circular import + + if is_cloud() and get_cached_instance_license() is not None: + BillingManager(get_cached_instance_license()).update_billing_customer_email(organization) + def get_analytics_metadata(self): team_member_count_all: int = ( OrganizationMembership.objects.filter(organization__in=self.organizations.all()) @@ -276,6 +275,10 @@ def get_analytics_metadata(self): .count() ) + current_organization_membership = None + if self.organization: + current_organization_membership = self.organization.memberships.filter(user=self).first() + project_setup_complete = False if self.team and self.team.completed_snippet_onboarding and self.team.ingested_event: project_setup_complete = True @@ -294,6 +297,9 @@ def get_analytics_metadata(self): ).exists(), # has completed the onboarding at least for one project # properties dependent on current project / org below "organization_id": str(self.organization.id) if self.organization else None, + "current_organization_membership_level": current_organization_membership.level + if current_organization_membership + else None, "project_id": str(self.team.uuid) if self.team else None, "project_setup_complete": project_setup_complete, "joined_at": self.date_joined, diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py index 63b7024d3d6bf..2dfe50e24b7d9 100644 --- a/posthog/queries/test/test_trends.py +++ b/posthog/queries/test/test_trends.py @@ -474,14 +474,14 @@ def test_trends_breakdown_cumulative(self): self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") + self.assertEqual(response[0]["label"], "none") self.assertEqual(response[0]["labels"][4], "1-Jan-2020") self.assertEqual(response[0]["data"], [0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]) - self.assertEqual(response[1]["label"], "sign up - other_value") + self.assertEqual(response[1]["label"], "other_value") self.assertEqual(response[1]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0]) - self.assertEqual(response[2]["label"], "sign up - value") + self.assertEqual(response[2]["label"], "value") self.assertEqual(response[2]["data"], [0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]) def test_trends_single_aggregate_dau(self): @@ -919,13 +919,14 @@ def test_trends_breakdown_single_aggregate_cohorts(self): ) for result in event_response: - if result["label"] == "sign up - cohort1": + if result["label"] == "cohort1": self.assertEqual(result["aggregated_value"], 2) - elif result["label"] == "sign up - cohort2": + elif result["label"] == "cohort2": self.assertEqual(result["aggregated_value"], 2) - elif result["label"] == "sign up - cohort3": + elif result["label"] == "cohort3": self.assertEqual(result["aggregated_value"], 3) else: + self.assertEqual(result["label"], "all users") self.assertEqual(result["aggregated_value"], 7) def test_trends_breakdown_single_aggregate(self): @@ -3869,7 +3870,7 @@ def test_breakdown_by_empty_cohort(self): self.team, ) - self.assertEqual(event_response[0]["label"], "$pageview - all users") + self.assertEqual(event_response[0]["label"], "all users") self.assertEqual(sum(event_response[0]["data"]), 1) @also_test_with_person_on_events_v2 @@ -3935,15 +3936,15 @@ def test_breakdown_by_cohort(self): counts[res["label"]] = sum(res["data"]) break_val[res["label"]] = res["breakdown_value"] - self.assertEqual(counts["watched movie - cohort1"], 1) - self.assertEqual(counts["watched movie - cohort2"], 3) - self.assertEqual(counts["watched movie - cohort3"], 4) - self.assertEqual(counts["watched movie - all users"], 7) + self.assertEqual(counts["cohort1"], 1) + self.assertEqual(counts["cohort2"], 3) + self.assertEqual(counts["cohort3"], 4) + self.assertEqual(counts["all users"], 7) - self.assertEqual(break_val["watched movie - cohort1"], cohort.pk) - self.assertEqual(break_val["watched movie - cohort2"], cohort2.pk) - self.assertEqual(break_val["watched movie - cohort3"], cohort3.pk) - self.assertEqual(break_val["watched movie - all users"], "all") + self.assertEqual(break_val["cohort1"], cohort.pk) + self.assertEqual(break_val["cohort2"], cohort2.pk) + self.assertEqual(break_val["cohort3"], cohort3.pk) + self.assertEqual(break_val["all users"], "all") self.assertEntityResponseEqual(event_response, action_response) @@ -4085,7 +4086,7 @@ def test_breakdown_by_person_property(self): for response in event_response: if response["breakdown_value"] == "person1": self.assertEqual(response["count"], 1) - self.assertEqual(response["label"], "watched movie - person1") + self.assertEqual(response["label"], "person1") if response["breakdown_value"] == "person2": self.assertEqual(response["count"], 3) if response["breakdown_value"] == "person3": @@ -4126,7 +4127,7 @@ def test_breakdown_by_person_property_for_person_on_events(self): for response in event_response: if response["breakdown_value"] == "person1": self.assertEqual(response["count"], 1) - self.assertEqual(response["label"], "watched movie - person1") + self.assertEqual(response["label"], "person1") if response["breakdown_value"] == "person2": self.assertEqual(response["count"], 3) if response["breakdown_value"] == "person3": @@ -4666,9 +4667,9 @@ def test_trends_aggregate_by_distinct_id(self): self.team, ) self.assertEqual(daily_response[0]["data"][0], 2) - self.assertEqual(daily_response[0]["label"], "sign up - some_val") + self.assertEqual(daily_response[0]["label"], "some_val") self.assertEqual(daily_response[1]["data"][0], 1) - self.assertEqual(daily_response[1]["label"], "sign up - none") + self.assertEqual(daily_response[1]["label"], "none") # MAU with freeze_time("2019-12-31T13:00:01Z"): @@ -4809,8 +4810,8 @@ def test_breakdown_filtering(self): ) self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[2]["label"], "sign up - other_value") self.assertEqual(response[1]["label"], "sign up - value") + self.assertEqual(response[2]["label"], "sign up - other_value") self.assertEqual(response[3]["label"], "no events - none") self.assertEqual(sum(response[0]["data"]), 2) @@ -4869,9 +4870,9 @@ def test_breakdown_filtering_persons(self): ), self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[1]["label"], "sign up - test@gmail.com") - self.assertEqual(response[2]["label"], "sign up - test@posthog.com") + self.assertEqual(response[0]["label"], "none") + self.assertEqual(response[1]["label"], "test@gmail.com") + self.assertEqual(response[2]["label"], "test@posthog.com") self.assertEqual(response[0]["count"], 1) self.assertEqual(response[1]["count"], 1) @@ -4927,9 +4928,9 @@ def test_breakdown_filtering_persons_with_action_props(self): ), self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[1]["label"], "sign up - test@gmail.com") - self.assertEqual(response[2]["label"], "sign up - test@posthog.com") + self.assertEqual(response[0]["label"], "none") + self.assertEqual(response[1]["label"], "test@gmail.com") + self.assertEqual(response[2]["label"], "test@posthog.com") self.assertEqual(response[0]["count"], 1) self.assertEqual(response[1]["count"], 1) @@ -5003,8 +5004,8 @@ def test_breakdown_filtering_with_properties(self): ) response = sorted(response, key=lambda x: x["label"]) - self.assertEqual(response[0]["label"], "sign up - first url") - self.assertEqual(response[1]["label"], "sign up - second url") + self.assertEqual(response[0]["label"], "first url") + self.assertEqual(response[1]["label"], "second url") self.assertEqual(sum(response[0]["data"]), 1) self.assertEqual(response[0]["breakdown_value"], "first url") @@ -5086,7 +5087,7 @@ def test_breakdown_filtering_with_properties_in_new_format(self): ) response = sorted(response, key=lambda x: x["label"]) - self.assertEqual(response[0]["label"], "sign up - second url") + self.assertEqual(response[0]["label"], "second url") self.assertEqual(sum(response[0]["data"]), 1) self.assertEqual(response[0]["breakdown_value"], "second url") @@ -5170,8 +5171,8 @@ def test_mau_with_breakdown_filtering_and_prop_filter(self): self.team, ) - self.assertEqual(event_response[0]["label"], "sign up - some_val") - self.assertEqual(event_response[1]["label"], "sign up - some_val2") + self.assertEqual(event_response[0]["label"], "some_val") + self.assertEqual(event_response[1]["label"], "some_val2") self.assertEqual(sum(event_response[0]["data"]), 2) self.assertEqual(event_response[0]["data"][5], 1) @@ -5211,8 +5212,8 @@ def test_dau_with_breakdown_filtering(self): self.team, ) - self.assertEqual(event_response[1]["label"], "sign up - other_value") - self.assertEqual(event_response[2]["label"], "sign up - value") + self.assertEqual(event_response[1]["label"], "other_value") + self.assertEqual(event_response[2]["label"], "value") self.assertEqual(sum(event_response[1]["data"]), 1) self.assertEqual(event_response[1]["data"][5], 1) @@ -5256,8 +5257,8 @@ def test_dau_with_breakdown_filtering_with_sampling(self): self.team, ) - self.assertEqual(event_response[1]["label"], "sign up - other_value") - self.assertEqual(event_response[2]["label"], "sign up - value") + self.assertEqual(event_response[1]["label"], "other_value") + self.assertEqual(event_response[2]["label"], "value") self.assertEqual(sum(event_response[1]["data"]), 1) self.assertEqual(event_response[1]["data"][5], 1) @@ -5301,7 +5302,7 @@ def test_dau_with_breakdown_filtering_with_prop_filter(self): self.team, ) - self.assertEqual(event_response[0]["label"], "sign up - other_value") + self.assertEqual(event_response[0]["label"], "other_value") self.assertEqual(sum(event_response[0]["data"]), 1) self.assertEqual(event_response[0]["data"][5], 1) # property not defined diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py index e891190f6e310..458aabdc14198 100644 --- a/posthog/queries/trends/breakdown.py +++ b/posthog/queries/trends/breakdown.py @@ -676,7 +676,11 @@ def _breakdown_result_descriptors(self, breakdown_value, filter: Filter, entity: extra_label = self._determine_breakdown_label( breakdown_value, filter.breakdown_type, filter.breakdown, breakdown_value ) - label = "{} - {}".format(entity.name, extra_label) + if len(filter.entities) > 1: + # if there are multiple entities in the query, include the entity name in the labels + label = "{} - {}".format(entity.name, extra_label) + else: + label = extra_label additional_values = {"label": label} if filter.breakdown_type == "cohort": additional_values["breakdown_value"] = "all" if breakdown_value == ALL_USERS_COHORT_ID else breakdown_value diff --git a/posthog/schema.py b/posthog/schema.py index be7bb8619a4ce..46d107122cd8e 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -3,6 +3,7 @@ from __future__ import annotations +from datetime import datetime from enum import Enum from typing import Any, Dict, List, Optional, Union @@ -439,6 +440,23 @@ class PropertyOperator(str, Enum): max = "max" +class QueryStatus(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + complete: Optional[bool] = False + end_time: Optional[datetime] = None + error: Optional[bool] = False + error_message: Optional[str] = "" + expiration_time: Optional[datetime] = None + id: str + query_async: Optional[bool] = True + results: Optional[Any] = None + start_time: Optional[datetime] = None + task_id: Optional[str] = None + team_id: int + + class QueryTiming(BaseModel): model_config = ConfigDict( extra="forbid", @@ -553,6 +571,7 @@ class TrendsFilter(BaseModel): display: Optional[ChartDisplayType] = None formula: Optional[str] = None hidden_legend_indexes: Optional[List[float]] = None + show_labels_on_series: Optional[bool] = None show_legend: Optional[bool] = None show_percent_stack_view: Optional[bool] = None show_values_on_series: Optional[bool] = None @@ -571,6 +590,14 @@ class TrendsQueryResponse(BaseModel): timings: Optional[List[QueryTiming]] = None +class ActionsPie(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + disableHoverOffset: Optional[bool] = None + hideAggregation: Optional[bool] = None + + class RETENTION(BaseModel): model_config = ConfigDict( extra="forbid", @@ -584,6 +611,7 @@ class VizSpecificOptions(BaseModel): model_config = ConfigDict( extra="forbid", ) + ActionsPie: Optional[ActionsPie] = None RETENTION: Optional[RETENTION] = None diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py index bab3d7f2506ae..b559f5726ca29 100644 --- a/posthog/settings/ingestion.py +++ b/posthog/settings/ingestion.py @@ -36,4 +36,7 @@ NEW_ANALYTICS_CAPTURE_ENDPOINT = os.getenv("NEW_CAPTURE_ENDPOINT", "/i/v0/e/") NEW_ANALYTICS_CAPTURE_TEAM_IDS = get_set(os.getenv("NEW_ANALYTICS_CAPTURE_TEAM_IDS", "")) +NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS = get_set(os.getenv("NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS", "")) NEW_ANALYTICS_CAPTURE_SAMPLING_RATE = get_from_env("NEW_ANALYTICS_CAPTURE_SAMPLING_RATE", type_cast=float, default=1.0) + +ELEMENT_CHAIN_AS_STRING_TEAMS = get_set(os.getenv("ELEMENT_CHAIN_AS_STRING_TEAMS", "")) diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 622798774ec1d..8f6fffd0c9f90 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -7,7 +7,7 @@ from django.http import QueryDict from sentry_sdk import capture_exception, push_scope -from posthog.api.query import process_query +from posthog.api.services.query import process_query from posthog.jwt import PosthogJwtAudience, encode_jwt from posthog.models.exported_asset import ExportedAsset, save_content from posthog.utils import absolute_uri diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr index 92f4167485468..9cabd193acff2 100644 --- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr +++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr @@ -267,7 +267,7 @@ FROM events WHERE team_id = 2 AND event = 'external data sync job' - AND parseDateTimeBestEffort(JSONExtractString(properties, 'start_time')) BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + AND parseDateTimeBestEffort(JSONExtractString(properties, 'startTime')) BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' GROUP BY job_id, team) GROUP BY team diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index 79a7ab46b8ab2..a10a16e17893a 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -1017,7 +1017,7 @@ def test_external_data_rows_synced_response( properties={ "count": 10, "job_id": 10924, - "start_time": start_time, + "startTime": start_time, }, timestamp=now() - relativedelta(hours=i), team=self.analytics_team, @@ -1029,7 +1029,7 @@ def test_external_data_rows_synced_response( properties={ "count": 10, "job_id": 10924, - "start_time": start_time, + "startTime": start_time, }, timestamp=now() - relativedelta(hours=i, minutes=i), team=self.analytics_team, @@ -1042,7 +1042,7 @@ def test_external_data_rows_synced_response( properties={ "count": 10, "job_id": 10924, - "start_time": (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "startTime": (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ"), }, timestamp=now() - relativedelta(hours=i), team=self.analytics_team, diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index 9ffbc39227331..3e8d4907d4f3f 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -16,7 +16,6 @@ ) import requests -from retry import retry import structlog from dateutil import parser from django.conf import settings @@ -24,6 +23,7 @@ from django.db.models import Count, Q from posthoganalytics.client import Client from psycopg2 import sql +from retry import retry from sentry_sdk import capture_exception from posthog import version_requirement @@ -604,7 +604,7 @@ def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> List SELECT team, sum(rows_synced) FROM ( SELECT JSONExtractString(properties, 'job_id') AS job_id, distinct_id AS team, any(JSONExtractInt(properties, 'count')) AS rows_synced FROM events - WHERE team_id = %(team_to_query)s AND event = 'external data sync job' AND parseDateTimeBestEffort(JSONExtractString(properties, 'start_time')) BETWEEN %(begin)s AND %(end)s + WHERE team_id = %(team_to_query)s AND event = 'external data sync job' AND parseDateTimeBestEffort(JSONExtractString(properties, 'startTime')) BETWEEN %(begin)s AND %(end)s GROUP BY job_id, team ) GROUP BY team diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 53f8d5f855e5d..9561a8bf2ea35 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -2,13 +2,17 @@ import datetime as dt import gzip import json +import os +import random import re +import unittest.mock from collections import deque from uuid import uuid4 import pytest import pytest_asyncio import responses +import snowflake.connector from django.conf import settings from django.test import override_settings from requests.models import PreparedRequest @@ -19,7 +23,6 @@ from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker -from posthog.temporal.tests.utils.datetimes import to_isoformat from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import acreate_batch_export, adelete_batch_export, afetch_batch_export_runs from posthog.temporal.workflows.batch_exports import ( @@ -36,6 +39,92 @@ pytestmark = [pytest.mark.asyncio, pytest.mark.django_db] +class FakeSnowflakeCursor: + """A fake Snowflake cursor that can fail on PUT and COPY queries.""" + + def __init__(self, *args, failure_mode: str | None = None, **kwargs): + self._execute_calls = [] + self._execute_async_calls = [] + self._sfqid = 1 + self._fail = failure_mode + + @property + def sfqid(self): + current = self._sfqid + self._sfqid += 1 + return current + + def execute(self, query, params=None, file_stream=None): + self._execute_calls.append({"query": query, "params": params, "file_stream": file_stream}) + + def execute_async(self, query, params=None, file_stream=None): + self._execute_async_calls.append({"query": query, "params": params, "file_stream": file_stream}) + + def get_results_from_sfqid(self, query_id): + pass + + def fetchone(self): + if self._fail == "put": + return ( + "test", + "test.gz", + 456, + 0, + "NONE", + "GZIP", + "FAILED", + "Some error on put", + ) + else: + return ( + "test", + "test.gz", + 456, + 0, + "NONE", + "GZIP", + "UPLOADED", + None, + ) + + def fetchall(self): + if self._fail == "copy": + return [("test", "LOAD FAILED", 100, 99, 1, 1, "Some error on copy", 3)] + else: + return [("test", "LOADED", 100, 99, 1, 1, "Some error on copy", 3)] + + +class FakeSnowflakeConnection: + def __init__( + self, + *args, + failure_mode: str | None = None, + **kwargs, + ): + self._cursors = [] + self._is_running = True + self.failure_mode = failure_mode + + def cursor(self) -> FakeSnowflakeCursor: + cursor = FakeSnowflakeCursor(failure_mode=self.failure_mode) + self._cursors.append(cursor) + return cursor + + def get_query_status_throw_if_error(self, query_id): + return snowflake.connector.constants.QueryStatus.SUCCESS + + def is_still_running(self, status): + current_status = self._is_running + self._is_running = not current_status + return current_status + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + pass + + def contains_queries_in_order(queries: list[str], *queries_to_find: str): """Check if a list of queries contains a list of queries in order.""" # We use a deque to pop the queries we find off the list of queries to @@ -204,21 +293,52 @@ def query_request_handler(request: PreparedRequest): return queries, staged_files +@pytest.fixture +def database(): + """Generate a unique database name for tests.""" + return f"test_batch_exports_{uuid4()}" + + +@pytest.fixture +def schema(): + """Generate a unique schema name for tests.""" + return f"test_batch_exports_{uuid4()}" + + +@pytest.fixture +def table_name(ateam, interval): + return f"test_workflow_table_{ateam.pk}_{interval}" + + +@pytest.fixture +def snowflake_config(database, schema) -> dict[str, str]: + """Return a Snowflake configuration dictionary to use in tests. + + We set default configuration values to support tests against the Snowflake API + and tests that mock it. + """ + password = os.getenv("SNOWFLAKE_PASSWORD", "password") + warehouse = os.getenv("SNOWFLAKE_WAREHOUSE", "COMPUTE_WH") + account = os.getenv("SNOWFLAKE_ACCOUNT", "account") + username = os.getenv("SNOWFLAKE_USERNAME", "hazzadous") + + return { + "password": password, + "user": username, + "warehouse": warehouse, + "account": account, + "database": database, + "schema": schema, + } + + @pytest_asyncio.fixture -async def snowflake_batch_export(ateam, interval, temporal_client): +async def snowflake_batch_export(ateam, table_name, snowflake_config, interval, exclude_events, temporal_client): + """Manage BatchExport model (and associated Temporal Schedule) for tests""" destination_data = { "type": "Snowflake", - "config": { - "user": "hazzadous", - "password": "password", - "account": "account", - "database": "PostHog", - "schema": "test", - "warehouse": "COMPUTE_WH", - "table_name": "events", - }, + "config": {**snowflake_config, "table_name": table_name, "exclude_events": exclude_events}, } - batch_export_data = { "name": "my-production-snowflake-export", "destination": destination_data, @@ -238,7 +358,9 @@ async def snowflake_batch_export(ateam, interval, temporal_client): @pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) -async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client, snowflake_batch_export, interval): +async def test_snowflake_export_workflow_exports_events( + ateam, clickhouse_client, database, schema, snowflake_batch_export, interval, table_name +): """Test that the whole workflow not just the activity works. It should update the batch export run status to completed, as well as updating the record @@ -247,7 +369,7 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta - (events, _, _) = await generate_test_events_in_clickhouse( + await generate_test_events_in_clickhouse( client=clickhouse_client, team_id=ateam.pk, start_time=data_interval_start, @@ -281,10 +403,12 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - queries, staged_files = add_mock_snowflake_api(rsps) + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + ) as mock, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + fake_conn = FakeSnowflakeConnection() + mock.return_value = fake_conn + await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, inputs, @@ -294,49 +418,27 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client retry_policy=RetryPolicy(maximum_attempts=1), ) - assert contains_queries_in_order( - queries, - 'USE DATABASE "PostHog"', - 'USE SCHEMA "test"', - 'CREATE TABLE IF NOT EXISTS "PostHog"."test"."events"', - # NOTE: we check that we at least have two PUT queries to - # ensure we hit the multi file upload code path - 'PUT file://.* @%"events"', - 'PUT file://.* @%"events"', - 'COPY INTO "events"', - ) + execute_calls = [] + for cursor in fake_conn._cursors: + for call in cursor._execute_calls: + execute_calls.append(call["query"]) - staged_data = "\n".join(staged_files) + execute_async_calls = [] + for cursor in fake_conn._cursors: + for call in cursor._execute_async_calls: + execute_async_calls.append(call["query"]) - # Check that the data is correct. - json_data = [json.loads(line) for line in staged_data.split("\n") if line] - # Pull out the fields we inserted only - json_data = [ - { - "uuid": event["uuid"], - "event": event["event"], - "timestamp": event["timestamp"], - "properties": event["properties"], - "person_id": event["person_id"], - } - for event in json_data + assert execute_calls[0:3] == [ + f'USE DATABASE "{database}"', + f'USE SCHEMA "{schema}"', + "SET ABORT_DETACHED_QUERY = FALSE", ] - json_data.sort(key=lambda x: x["timestamp"]) - # Drop _timestamp and team_id from events - expected_events = [] - for event in events: - expected_event = { - key: value - for key, value in event.items() - if key in ("uuid", "event", "timestamp", "properties", "person_id") - } - expected_event["timestamp"] = to_isoformat(event["timestamp"]) - expected_events.append(expected_event) - expected_events.sort(key=lambda x: x["timestamp"]) + assert all(query.startswith("PUT") for query in execute_calls[3:12]) + assert all(f"_{n}.jsonl" in query for n, query in enumerate(execute_calls[3:12])) - assert json_data[0] == expected_events[0] - assert json_data == expected_events + assert execute_async_calls[0].strip().startswith(f'CREATE TABLE IF NOT EXISTS "{table_name}"') + assert execute_async_calls[1].strip().startswith(f'COPY INTO "{table_name}"') runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) assert len(runs) == 1 @@ -451,11 +553,15 @@ async def test_snowflake_export_workflow_raises_error_on_put_fail( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - add_mock_snowflake_api(rsps, fail="put") + class FakeSnowflakeConnectionFailOnPut(FakeSnowflakeConnection): + def __init__(self, *args, **kwargs): + super().__init__(*args, failure_mode="put", **kwargs) + + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + side_effect=FakeSnowflakeConnectionFailOnPut, + ): with pytest.raises(WorkflowFailureError) as exc_info: await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, @@ -513,11 +619,15 @@ async def test_snowflake_export_workflow_raises_error_on_copy_fail( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - add_mock_snowflake_api(rsps, fail="copy") + class FakeSnowflakeConnectionFailOnCopy(FakeSnowflakeConnection): + def __init__(self, *args, **kwargs): + super().__init__(*args, failure_mode="copy", **kwargs) + + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + side_effect=FakeSnowflakeConnectionFailOnCopy, + ): with pytest.raises(WorkflowFailureError) as exc_info: await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, @@ -577,8 +687,11 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str assert run.latest_error == "ValueError: A useful error message" -async def test_snowflake_export_workflow_handles_cancellation(ateam, snowflake_batch_export): - """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data.""" +async def test_snowflake_export_workflow_handles_cancellation_mocked(ateam, snowflake_batch_export): + """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data. + + We mock the insert_into_snowflake_activity for this test. + """ workflow_id = str(uuid4()) inputs = SnowflakeBatchExportInputs( team_id=ateam.pk, @@ -624,3 +737,462 @@ async def never_finish_activity(_: SnowflakeInsertInputs) -> str: run = runs[0] assert run.status == "Cancelled" assert run.latest_error == "Cancelled" + + +def assert_events_in_snowflake( + cursor: snowflake.connector.cursor.SnowflakeCursor, table_name: str, events: list, exclude_events: list[str] +): + """Assert provided events are present in Snowflake table.""" + cursor.execute(f'SELECT * FROM "{table_name}"') + + rows = cursor.fetchall() + + columns = {index: metadata.name for index, metadata in enumerate(cursor.description)} + json_columns = ("properties", "elements", "people_set", "people_set_once") + + # Rows are tuples, so we construct a dictionary using the metadata from cursor.description. + # We rely on the order of the columns in each row matching the order set in cursor.description. + # This seems to be the case, at least for now. + inserted_events = [ + { + columns[index]: json.loads(row[index]) + if columns[index] in json_columns and row[index] is not None + else row[index] + for index in columns.keys() + } + for row in rows + ] + inserted_events.sort(key=lambda x: (x["event"], x["timestamp"])) + + expected_events = [] + for event in events: + event_name = event.get("event") + + if exclude_events is not None and event_name in exclude_events: + continue + + properties = event.get("properties", None) + elements_chain = event.get("elements_chain", None) + expected_event = { + "distinct_id": event.get("distinct_id"), + "elements": json.dumps(elements_chain), + "event": event_name, + "ip": properties.get("$ip", None) if properties else None, + "properties": event.get("properties"), + "people_set": properties.get("$set", None) if properties else None, + "people_set_once": properties.get("$set_once", None) if properties else None, + "site_url": "", + "timestamp": dt.datetime.fromisoformat(event.get("timestamp")), + "team_id": event.get("team_id"), + "uuid": event.get("uuid"), + } + expected_events.append(expected_event) + + expected_events.sort(key=lambda x: (x["event"], x["timestamp"])) + + assert inserted_events[0] == expected_events[0] + assert inserted_events == expected_events + + +REQUIRED_ENV_VARS = ( + "SNOWFLAKE_WAREHOUSE", + "SNOWFLAKE_PASSWORD", + "SNOWFLAKE_ACCOUNT", + "SNOWFLAKE_USERNAME", +) + +SKIP_IF_MISSING_REQUIRED_ENV_VARS = pytest.mark.skipif( + any(env_var not in os.environ for env_var in REQUIRED_ENV_VARS), + reason="Snowflake required env vars are not set", +) + + +@pytest.fixture +def snowflake_cursor(snowflake_config): + """Manage a snowflake cursor that cleans up after we are done.""" + with snowflake.connector.connect( + user=snowflake_config["user"], + password=snowflake_config["password"], + account=snowflake_config["account"], + warehouse=snowflake_config["warehouse"], + ) as connection: + cursor = connection.cursor() + cursor.execute(f"CREATE DATABASE \"{snowflake_config['database']}\"") + cursor.execute(f"CREATE SCHEMA \"{snowflake_config['database']}\".\"{snowflake_config['schema']}\"") + cursor.execute(f"USE SCHEMA \"{snowflake_config['database']}\".\"{snowflake_config['schema']}\"") + + yield cursor + + cursor.execute(f"DROP DATABASE IF EXISTS \"{snowflake_config['database']}\" CASCADE") + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_insert_into_snowflake_activity_inserts_data_into_snowflake_table( + clickhouse_client, activity_environment, snowflake_cursor, snowflake_config, exclude_events +): + """Test that the insert_into_snowflake_activity function inserts data into a PostgreSQL table. + + We use the generate_test_events_in_clickhouse function to generate several sets + of events. Some of these sets are expected to be exported, and others not. Expected + events are those that: + * Are created for the team_id of the batch export. + * Are created in the date range of the batch export. + * Are not duplicates of other events that are in the same batch. + * Do not have an event name contained in the batch export's exclude_events. + + Once we have these events, we pass them to the assert_events_in_snowflake function to check + that they appear in the expected Snowflake table. This function runs against a real Snowflake + instance, so the environment should be populated with the necessary credentials. + """ + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + + team_id = random.randint(1, 1000000) + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=1000, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + if exclude_events: + for event_name in exclude_events: + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=5, + count_outside_range=0, + count_other_team=0, + event_name=event_name, + ) + + table_name = f"test_insert_activity_table_{team_id}" + insert_inputs = SnowflakeInsertInputs( + team_id=team_id, + table_name=table_name, + data_interval_start=data_interval_start.isoformat(), + data_interval_end=data_interval_end.isoformat(), + exclude_events=exclude_events, + **snowflake_config, + ) + + await activity_environment.run(insert_into_snowflake_activity, insert_inputs) + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=table_name, + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_snowflake_export_workflow( + clickhouse_client, + snowflake_cursor, + interval, + snowflake_batch_export, + ateam, + exclude_events, +): + """Test Redshift Export Workflow end-to-end. + + The workflow should update the batch export run status to completed and produce the expected + records to the provided Redshift instance. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=100, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + if exclude_events: + for event_name in exclude_events: + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=5, + count_outside_range=0, + count_other_team=0, + event_name=event_name, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + await activity_environment.client.execute_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=10), + ) + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Completed" + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=snowflake_batch_export.destination.config["table_name"], + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_snowflake_export_workflow_with_many_files( + clickhouse_client, + snowflake_cursor, + interval, + snowflake_batch_export, + ateam, + exclude_events, +): + """Test Snowflake Export Workflow end-to-end with multiple file uploads. + + This test overrides the chunk size and sets it to 1 byte to trigger multiple file uploads. + We want to assert that all files are properly copied into the table. Of course, 1 byte limit + means we are uploading one file at a time, which is very innefficient. For this reason, this test + can take longer, so we keep the event count low and bump the Workflow timeout. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + await activity_environment.client.execute_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=20), + ) + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Completed" + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=snowflake_batch_export.destination.config["table_name"], + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +async def test_snowflake_export_workflow_handles_cancellation( + clickhouse_client, ateam, snowflake_batch_export, interval, snowflake_cursor +): + """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data.""" + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=100, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + # We set the chunk size low on purpose to slow things down and give us time to cancel. + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + handle = await activity_environment.client.start_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + # We need to wait a bit for the activity to start running. + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +async def test_insert_into_snowflake_activity_heartbeats( + clickhouse_client, + ateam, + snowflake_batch_export, + snowflake_cursor, + snowflake_config, + activity_environment, +): + """Test that the insert_into_snowflake_activity activity sends heartbeats. + + We use a function that runs on_heartbeat to check and track the heartbeat contents. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-20T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + events_in_files = [] + n_expected_files = 3 + + for i in range(1, n_expected_files + 1): + part_inserted_at = data_interval_end - snowflake_batch_export.interval_time_delta / i + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=1, + count_outside_range=0, + count_other_team=0, + duplicate=False, + inserted_at=part_inserted_at, + ) + events_in_files += events + + captured_details = [] + + def capture_heartbeat_details(*details): + """A function to track what we heartbeat.""" + nonlocal captured_details + + captured_details.append(details) + + activity_environment.on_heartbeat = capture_heartbeat_details + + table_name = f"test_insert_activity_table_{ateam.pk}" + insert_inputs = SnowflakeInsertInputs( + team_id=ateam.pk, + table_name=table_name, + data_interval_start=data_interval_start.isoformat(), + data_interval_end=data_interval_end.isoformat(), + **snowflake_config, + ) + + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + await activity_environment.run(insert_into_snowflake_activity, insert_inputs) + + assert n_expected_files == len(captured_details) + + for index, details_captured in enumerate(captured_details): + assert dt.datetime.fromisoformat( + details_captured[0] + ) == data_interval_end - snowflake_batch_export.interval_time_delta / (index + 1) + assert details_captured[1] == index + 1 + + assert_events_in_snowflake(snowflake_cursor, table_name, events_in_files, exclude_events=[]) diff --git a/posthog/temporal/utils.py b/posthog/temporal/utils.py index d8757e2949a29..efc19c9e8ef4a 100644 --- a/posthog/temporal/utils.py +++ b/posthog/temporal/utils.py @@ -1,35 +1,115 @@ -import asyncio -from functools import wraps -from typing import Any, Awaitable, Callable, TypeVar, cast +import collections.abc +import dataclasses +import datetime as dt +import typing -from temporalio import activity -F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) +class EmptyHeartbeatError(Exception): + """Raised when an activity heartbeat is empty. + This is also the error we expect when no heartbeatting is happening, as the sequence will be empty. + """ + + def __init__(self): + super().__init__(f"Heartbeat details sequence is empty") + + +class NotEnoughHeartbeatValuesError(Exception): + """Raised when an activity heartbeat doesn't contain the right amount of values we expect.""" + + def __init__(self, details_len: int, expected: int): + super().__init__(f"Not enough values in heartbeat details (expected {expected}, got {details_len})") + + +class HeartbeatParseError(Exception): + """Raised when an activity heartbeat cannot be parsed into it's expected types.""" + + def __init__(self, field: str): + super().__init__(f"Parsing {field} from heartbeat details encountered an error") + + +@dataclasses.dataclass +class HeartbeatDetails: + """The batch export details included in every heartbeat. + + Each batch export destination should subclass this and implement whatever details are specific to that + batch export and required to resume it. + + Attributes: + last_inserted_at: The last inserted_at we managed to upload or insert, depending on the destination. + _remaining: Any remaining values in the heartbeat_details tuple that we do not parse. + """ + + last_inserted_at: dt.datetime + _remaining: collections.abc.Sequence[typing.Any] + + @property + def total_details(self) -> int: + """The total number of details that we have parsed + those remaining to parse.""" + return (len(dataclasses.fields(self.__class__)) - 1) + len(self._remaining) + + @classmethod + def from_activity(cls, activity): + """Attempt to initialize HeartbeatDetails from an activity's info.""" + details = activity.info().heartbeat_details + + if len(details) == 0: + raise EmptyHeartbeatError() -def auto_heartbeater(fn: F) -> F: - # We want to ensure that the type hints from the original callable are - # available via our wrapper, so we use the functools wraps decorator - @wraps(fn) - async def wrapper(*args, **kwargs): - heartbeat_timeout = activity.info().heartbeat_timeout - heartbeat_task = None - if heartbeat_timeout: - # Heartbeat twice as often as the timeout - heartbeat_task = asyncio.create_task(heartbeat_every(heartbeat_timeout.total_seconds() / 2)) try: - return await fn(*args, **kwargs) - finally: - if heartbeat_task: - heartbeat_task.cancel() - # Wait for heartbeat cancellation to complete - await asyncio.wait([heartbeat_task]) + last_inserted_at = dt.datetime.fromisoformat(details[0]) + except (TypeError, ValueError) as e: + raise HeartbeatParseError("last_inserted_at") from e + + return cls(last_inserted_at, _remaining=details[1:]) + + +HeartbeatType = typing.TypeVar("HeartbeatType", bound=HeartbeatDetails) + + +async def should_resume_from_activity_heartbeat( + activity, heartbeat_type: typing.Type[HeartbeatType], logger +) -> tuple[bool, HeartbeatType | None]: + """Check if a batch export should resume from an activity's heartbeat details. + + We understand that a batch export should resume any time that we receive heartbeat details and + those details can be correctly parsed. However, the decision is ultimately up to the batch export + activity to decide if it must resume and how to do so. + + Returns: + A tuple with the first element indicating if the batch export should resume. If the first element + is True, the second tuple element will be the heartbeat details themselves, otherwise None. + """ + try: + heartbeat_details = heartbeat_type.from_activity(activity) + + except EmptyHeartbeatError: + # We don't log this as a warning/error because it's the expected exception when heartbeat is empty. + heartbeat_details = None + received = False + logger.debug("Did not receive details from previous activity execution") + + except NotEnoughHeartbeatValuesError: + heartbeat_details = None + received = False + logger.warning("Details from previous activity execution did not contain the expected amount of values") + + except HeartbeatParseError: + heartbeat_details = None + received = False + logger.warning("Details from previous activity execution could not be parsed.") - return cast(F, wrapper) + except Exception: + # We should start from the beginning, but we make a point to log unexpected errors. + # Ideally, any new exceptions should be added to the previous blocks after the first time and we will never land here. + heartbeat_details = None + received = False + logger.exception("Did not receive details from previous activity Excecution due to an unexpected error") + else: + received = True + logger.debug( + f"Received details from previous activity: {heartbeat_details}", + ) -async def heartbeat_every(delay: float, *details: Any) -> None: - # Heartbeat every so often while not cancelled - while True: - await asyncio.sleep(delay) - activity.heartbeat(*details) + return received, heartbeat_details diff --git a/posthog/temporal/workflows/bigquery_batch_export.py b/posthog/temporal/workflows/bigquery_batch_export.py index 98f4a51d3c4d1..759b755427f2d 100644 --- a/posthog/temporal/workflows/bigquery_batch_export.py +++ b/posthog/temporal/workflows/bigquery_batch_export.py @@ -1,7 +1,8 @@ +import asyncio import contextlib +import dataclasses import datetime as dt import json -from dataclasses import dataclass from django.conf import settings from google.cloud import bigquery @@ -10,6 +11,10 @@ from temporalio.common import RetryPolicy from posthog.batch_exports.service import BigQueryBatchExportInputs +from posthog.temporal.utils import ( + HeartbeatDetails, + should_resume_from_activity_heartbeat, +) from posthog.temporal.workflows.base import PostHogWorkflow from posthog.temporal.workflows.batch_exports import ( BatchExportTemporaryFile, @@ -26,7 +31,7 @@ from posthog.temporal.workflows.metrics import get_bytes_exported_metric, get_rows_exported_metric -def load_jsonl_file_to_bigquery_table(jsonl_file, table, table_schema, bigquery_client): +async def load_jsonl_file_to_bigquery_table(jsonl_file, table, table_schema, bigquery_client): """Execute a COPY FROM query with given connection to copy contents of jsonl_file.""" job_config = bigquery.LoadJobConfig( source_format="NEWLINE_DELIMITED_JSON", @@ -34,10 +39,10 @@ def load_jsonl_file_to_bigquery_table(jsonl_file, table, table_schema, bigquery_ ) load_job = bigquery_client.load_table_from_file(jsonl_file, table, job_config=job_config, rewind=True) - load_job.result() + await asyncio.to_thread(load_job.result) -def create_table_in_bigquery( +async def create_table_in_bigquery( project_id: str, dataset_id: str, table_id: str, @@ -49,12 +54,19 @@ def create_table_in_bigquery( fully_qualified_name = f"{project_id}.{dataset_id}.{table_id}" table = bigquery.Table(fully_qualified_name, schema=table_schema) table.time_partitioning = bigquery.TimePartitioning(type_=bigquery.TimePartitioningType.DAY, field="timestamp") - table = bigquery_client.create_table(table, exists_ok=exists_ok) + table = await asyncio.to_thread(bigquery_client.create_table, table, exists_ok=exists_ok) return table -@dataclass +@dataclasses.dataclass +class BigQueryHeartbeatDetails(HeartbeatDetails): + """The BigQuery batch export details included in every heartbeat.""" + + pass + + +@dataclasses.dataclass class BigQueryInsertInputs: """Inputs for BigQuery.""" @@ -106,6 +118,15 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): inputs.data_interval_end, ) + should_resume, details = await should_resume_from_activity_heartbeat(activity, BigQueryHeartbeatDetails, logger) + + if should_resume is True and details is not None: + data_interval_start = details.last_inserted_at.isoformat() + last_inserted_at = details.last_inserted_at + else: + data_interval_start = inputs.data_interval_start + last_inserted_at = None + async with get_client() as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") @@ -113,7 +134,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): count = await get_rows_count( client=client, team_id=inputs.team_id, - interval_start=inputs.data_interval_start, + interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, @@ -132,7 +153,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): results_iterator = get_results_iterator( client=client, team_id=inputs.team_id, - interval_start=inputs.data_interval_start, + interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, @@ -153,8 +174,24 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): ] json_columns = ("properties", "elements", "set", "set_once") + result = None + + async def worker_shutdown_handler(): + """Handle the Worker shutting down by heart-beating our latest status.""" + await activity.wait_for_worker_shutdown() + logger.bind(last_inserted_at=last_inserted_at).debug("Worker shutting down!") + + if last_inserted_at is None: + # Don't heartbeat if worker shuts down before we could even send anything + # Just start from the beginning again. + return + + activity.heartbeat(last_inserted_at) + + asyncio.create_task(worker_shutdown_handler()) + with bigquery_client(inputs) as bq_client: - bigquery_table = create_table_in_bigquery( + bigquery_table = await create_table_in_bigquery( inputs.project_id, inputs.dataset_id, inputs.table_id, @@ -166,13 +203,13 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): rows_exported = get_rows_exported_metric() bytes_exported = get_bytes_exported_metric() - def flush_to_bigquery(): + async def flush_to_bigquery(): logger.debug( "Loading %s records of size %s bytes", jsonl_file.records_since_last_reset, jsonl_file.bytes_since_last_reset, ) - load_jsonl_file_to_bigquery_table(jsonl_file, bigquery_table, table_schema, bq_client) + await load_jsonl_file_to_bigquery_table(jsonl_file, bigquery_table, table_schema, bq_client) rows_exported.add(jsonl_file.records_since_last_reset) bytes_exported.add(jsonl_file.bytes_since_last_reset) @@ -188,11 +225,20 @@ def flush_to_bigquery(): jsonl_file.write_records_to_jsonl([row]) if jsonl_file.tell() > settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES: - flush_to_bigquery() + await flush_to_bigquery() + + last_inserted_at = result["inserted_at"] + activity.heartbeat(last_inserted_at) + jsonl_file.reset() - if jsonl_file.tell() > 0: - flush_to_bigquery() + if jsonl_file.tell() > 0 and result is not None: + await flush_to_bigquery() + + last_inserted_at = result["inserted_at"] + activity.heartbeat(last_inserted_at) + + jsonl_file.reset() @workflow.defn(name="bigquery-export") @@ -263,6 +309,4 @@ async def run(self, inputs: BigQueryBatchExportInputs): "NotFound", ], update_inputs=update_inputs, - # Disable heartbeat timeout until we add heartbeat support. - heartbeat_timeout_seconds=None, ) diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py index fc29b414d2274..42e66f10d2ae3 100644 --- a/posthog/temporal/workflows/s3_batch_export.py +++ b/posthog/temporal/workflows/s3_batch_export.py @@ -276,7 +276,7 @@ class HeartbeatDetails(typing.NamedTuple): def from_activity_details(cls, details): last_uploaded_part_timestamp = details[0] upload_state = S3MultiPartUploadState(*details[1]) - return HeartbeatDetails(last_uploaded_part_timestamp, upload_state) + return cls(last_uploaded_part_timestamp, upload_state) @dataclass diff --git a/posthog/temporal/workflows/snowflake_batch_export.py b/posthog/temporal/workflows/snowflake_batch_export.py index 1831f87fa2f87..b216f20af0412 100644 --- a/posthog/temporal/workflows/snowflake_batch_export.py +++ b/posthog/temporal/workflows/snowflake_batch_export.py @@ -1,17 +1,28 @@ +import asyncio +import contextlib +import dataclasses import datetime as dt +import functools +import io import json -import tempfile -from dataclasses import dataclass +import typing import snowflake.connector from django.conf import settings -from snowflake.connector.cursor import SnowflakeCursor +from snowflake.connector.connection import SnowflakeConnection from temporalio import activity, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import SnowflakeBatchExportInputs +from posthog.temporal.utils import ( + HeartbeatDetails, + HeartbeatParseError, + NotEnoughHeartbeatValuesError, + should_resume_from_activity_heartbeat, +) from posthog.temporal.workflows.base import PostHogWorkflow from posthog.temporal.workflows.batch_exports import ( + BatchExportTemporaryFile, CreateBatchExportRunInputs, UpdateBatchExportRunStatusInputs, create_export_run, @@ -43,7 +54,32 @@ def __init__(self, table_name: str, status: str, errors_seen: int, first_error: ) -@dataclass +@dataclasses.dataclass +class SnowflakeHeartbeatDetails(HeartbeatDetails): + """The Snowflake batch export details included in every heartbeat. + + Attributes: + file_no: The file number of the last file we managed to upload. + """ + + file_no: int + + @classmethod + def from_activity(cls, activity): + details = super().from_activity(activity) + + if details.total_details < 2: + raise NotEnoughHeartbeatValuesError(details.total_details, 2) + + try: + file_no = int(details._remaining[1]) + except (TypeError, ValueError) as e: + raise HeartbeatParseError("file_no") from e + + return cls(last_inserted_at=details.last_inserted_at, file_no=file_no, _remaining=details._remaining[2:]) + + +@dataclasses.dataclass class SnowflakeInsertInputs: """Inputs for Snowflake.""" @@ -66,23 +102,137 @@ class SnowflakeInsertInputs: include_events: list[str] | None = None -def put_file_to_snowflake_table(cursor: SnowflakeCursor, file_name: str, table_name: str): +def use_namespace(connection: SnowflakeConnection, database: str, schema: str) -> None: + """Switch to a namespace given by database and schema. + + This allows all queries that follow to ignore database and schema. + """ + cursor = connection.cursor() + cursor.execute(f'USE DATABASE "{database}"') + cursor.execute(f'USE SCHEMA "{schema}"') + + +@contextlib.contextmanager +def snowflake_connection(inputs) -> typing.Generator[SnowflakeConnection, None, None]: + """Context manager that yields a Snowflake connection. + + Before yielding we ensure we are in the right namespace, and we set ABORT_DETACHED_QUERY + to FALSE to avoid Snowflake cancelling any async queries. + """ + with snowflake.connector.connect( + user=inputs.user, + password=inputs.password, + account=inputs.account, + warehouse=inputs.warehouse, + database=inputs.database, + schema=inputs.schema, + role=inputs.role, + ) as connection: + use_namespace(connection, inputs.database, inputs.schema) + connection.cursor().execute("SET ABORT_DETACHED_QUERY = FALSE") + + yield connection + + +async def execute_async_query( + connection: SnowflakeConnection, + query: str, + parameters: dict | None = None, + file_stream=None, + poll_interval: float = 1.0, +) -> str: + """Wrap Snowflake connector's polling API in a coroutine. + + This enables asynchronous execution of queries to release the event loop to execute other tasks + while we poll for a query to be done. For example, the event loop may use this time for heartbeating. + + Args: + connection: A SnowflakeConnection object as produced by snowflake.connector.connect. + query: A query string to run asynchronously. + parameters: An optional dictionary of parameters to bind to the query. + poll_interval: Specify how long to wait in between polls. + """ + cursor = connection.cursor() + + # Snowflake docs incorrectly state that the 'params' argument is named 'parameters'. + result = cursor.execute_async(query, params=parameters, file_stream=file_stream) + query_id = cursor.sfqid or result["queryId"] + + # Snowflake does a blocking HTTP request, so we send it to a thread. + query_status = await asyncio.to_thread(connection.get_query_status_throw_if_error, query_id) + + while connection.is_still_running(query_status): + query_status = await asyncio.to_thread(connection.get_query_status_throw_if_error, query_id) + await asyncio.sleep(poll_interval) + + return query_id + + +async def create_table_in_snowflake(connection: SnowflakeConnection, table_name: str) -> None: + """Asynchronously create the table if it doesn't exist. + + Note that we use the same schema as the snowflake-plugin for backwards compatibility.""" + await execute_async_query( + connection, + f""" + CREATE TABLE IF NOT EXISTS "{table_name}" ( + "uuid" STRING, + "event" STRING, + "properties" VARIANT, + "elements" VARIANT, + "people_set" VARIANT, + "people_set_once" VARIANT, + "distinct_id" STRING, + "team_id" INTEGER, + "ip" STRING, + "site_url" STRING, + "timestamp" TIMESTAMP + ) + COMMENT = 'PostHog generated events table' + """, + ) + + +async def put_file_to_snowflake_table( + connection: SnowflakeConnection, + file: BatchExportTemporaryFile, + table_name: str, + file_no: int, +): """Executes a PUT query using the provided cursor to the provided table_name. + Sadly, Snowflake's execute_async does not work with PUT statements. So, we pass the execute + call to run_in_executor: Since execute ends up boiling down to blocking IO (HTTP request), + the event loop should not be locked up. + + We add a file_no to the file_name when executing PUT as Snowflake will reject any files with the same + name. Since batch exports re-use the same file, our name does not change, but we don't want Snowflake + to reject or overwrite our new data. + Args: - cursor: A Snowflake cursor to execute the PUT query. - file_name: The name of the file to PUT. - table_name: The name of the table where to PUT the file. + connection: A SnowflakeConnection object as produced by snowflake.connector.connect. + file: The name of the local file to PUT. + table_name: The name of the Snowflake table where to PUT the file. + file_no: An int to identify which file number this is. Raises: TypeError: If we don't get a tuple back from Snowflake (should never happen). SnowflakeFileNotUploadedError: If the upload status is not 'UPLOADED'. """ - cursor.execute( - f""" - PUT file://{file_name} @%"{table_name}" - """ - ) + file.rewind() + + # We comply with the file-like interface of io.IOBase. + # So we ask mypy to be nice with us. + reader = io.BufferedReader(file) # type: ignore + query = f'PUT file://{file.name}_{file_no}.jsonl @%"{table_name}"' + cursor = connection.cursor() + + execute_put = functools.partial(cursor.execute, query, file_stream=reader) + + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, func=execute_put) + reader.detach() # BufferedReader closes the file otherwise. + result = cursor.fetchone() if not isinstance(result, tuple): # Mostly to appease mypy, as this query should always return a tuple. @@ -93,6 +243,55 @@ def put_file_to_snowflake_table(cursor: SnowflakeCursor, file_name: str, table_n raise SnowflakeFileNotUploadedError(table_name, status, message) +async def copy_loaded_files_to_snowflake_table( + connection: SnowflakeConnection, + table_name: str, +): + """Execute a COPY query in Snowflake to load any files PUT into the table. + + The query is executed asynchronously using Snowflake's polling API. + + Args: + connection: A SnowflakeConnection as returned by snowflake.connector.connect. + table_name: The table we are COPY-ing files into. + """ + query = f""" + COPY INTO "{table_name}" + FILE_FORMAT = (TYPE = 'JSON') + MATCH_BY_COLUMN_NAME = CASE_SENSITIVE + PURGE = TRUE + """ + query_id = await execute_async_query(connection, query) + + cursor = connection.cursor() + cursor.get_results_from_sfqid(query_id) + results = cursor.fetchall() + + for query_result in results: + if not isinstance(query_result, tuple): + # Mostly to appease mypy, as this query should always return a tuple. + raise TypeError(f"Expected tuple from Snowflake COPY INTO query but got: '{type(query_result)}'") + + if len(query_result) < 2: + raise SnowflakeFileNotLoadedError( + table_name, + "NO STATUS", + 0, + query_result[0] if len(query_result) == 1 else "NO ERROR MESSAGE", + ) + + _, status = query_result[0:2] + + if status != "LOADED": + errors_seen, first_error = query_result[5:7] + raise SnowflakeFileNotLoadedError( + table_name, + status or "NO STATUS", + errors_seen or 0, + first_error or "NO ERROR MESSAGE", + ) + + @activity.defn async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): """Activity streams data from ClickHouse to Snowflake. @@ -106,6 +305,17 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): inputs.data_interval_end, ) + should_resume, details = await should_resume_from_activity_heartbeat(activity, SnowflakeHeartbeatDetails, logger) + + if should_resume is True and details is not None: + data_interval_start = details.last_inserted_at.isoformat() + last_inserted_at = details.last_inserted_at + file_no = details.file_no + else: + data_interval_start = inputs.data_interval_start + last_inserted_at = None + file_no = 0 + async with get_client() as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") @@ -113,7 +323,7 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): count = await get_rows_count( client=client, team_id=inputs.team_id, - interval_start=inputs.data_interval_start, + interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, @@ -129,42 +339,31 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): logger.info("BatchExporting %s rows", count) - conn = snowflake.connector.connect( - user=inputs.user, - password=inputs.password, - account=inputs.account, - warehouse=inputs.warehouse, - database=inputs.database, - schema=inputs.schema, - role=inputs.role, - ) + rows_exported = get_rows_exported_metric() + bytes_exported = get_bytes_exported_metric() - try: - cursor = conn.cursor() - cursor.execute(f'USE DATABASE "{inputs.database}"') - cursor.execute(f'USE SCHEMA "{inputs.schema}"') - - # Create the table if it doesn't exist. Note that we use the same schema - # as the snowflake-plugin for backwards compatibility. - cursor.execute( - f""" - CREATE TABLE IF NOT EXISTS "{inputs.database}"."{inputs.schema}"."{inputs.table_name}" ( - "uuid" STRING, - "event" STRING, - "properties" VARIANT, - "elements" VARIANT, - "people_set" VARIANT, - "people_set_once" VARIANT, - "distinct_id" STRING, - "team_id" INTEGER, - "ip" STRING, - "site_url" STRING, - "timestamp" TIMESTAMP - ) - COMMENT = 'PostHog generated events table' - """ + async def flush_to_snowflake( + connection: SnowflakeConnection, + file: BatchExportTemporaryFile, + table_name: str, + file_no: int, + last: bool = False, + ): + logger.info( + "Putting %sfile %s containing %s records with size %s bytes", + "last " if last else "", + file_no, + file.records_since_last_reset, + file.bytes_since_last_reset, ) + await put_file_to_snowflake_table(connection, file, table_name, file_no) + rows_exported.add(file.records_since_last_reset) + bytes_exported.add(file.bytes_since_last_reset) + + with snowflake_connection(inputs) as connection: + await create_table_in_snowflake(connection, inputs.table_name) + results_iterator = get_results_iterator( client=client, team_id=inputs.team_id, @@ -173,118 +372,59 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): exclude_events=inputs.exclude_events, include_events=inputs.include_events, ) + result = None - local_results_file = tempfile.NamedTemporaryFile(suffix=".jsonl") - rows_in_file = 0 - - rows_exported = get_rows_exported_metric() - bytes_exported = get_bytes_exported_metric() - - def flush_to_snowflake(lrf: tempfile._TemporaryFileWrapper, rows_in_file: int): - lrf.flush() - put_file_to_snowflake_table(cursor, lrf.name, inputs.table_name) - rows_exported.add(rows_in_file) - bytes_exported.add(lrf.tell()) - - try: - while True: - try: - result = results_iterator.__next__() - - except StopIteration: - break - - except json.JSONDecodeError: - logger.info( - "Failed to decode a JSON value while iterating, potentially due to a ClickHouse error" - ) - # This is raised by aiochclient as we try to decode an error message from ClickHouse. - # So far, this error message only indicated that we were too slow consuming rows. - # So, we can resume from the last result. - if result is None: - # We failed right at the beginning - new_interval_start = None - else: - new_interval_start = result.get("inserted_at", None) - - if not isinstance(new_interval_start, str): - new_interval_start = inputs.data_interval_start - - results_iterator = get_results_iterator( - client=client, - team_id=inputs.team_id, - interval_start=new_interval_start, # This means we'll generate at least one duplicate. - interval_end=inputs.data_interval_end, - ) - continue - - if not result: - break - - # Write the results to a local file - local_results_file.write(json.dumps(result).encode("utf-8")) - local_results_file.write("\n".encode("utf-8")) - rows_in_file += 1 - - # Write results to Snowflake when the file reaches 50MB and - # reset the file, or if there is nothing else to write. - if ( - local_results_file.tell() - and local_results_file.tell() > settings.BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES - ): - logger.info("Uploading to Snowflake") - - # Flush the file to make sure everything is written - flush_to_snowflake(local_results_file, rows_in_file) - - # Delete the temporary file and create a new one - local_results_file.close() - local_results_file = tempfile.NamedTemporaryFile(suffix=".jsonl") - rows_in_file = 0 - - # Flush the file to make sure everything is written - flush_to_snowflake(local_results_file, rows_in_file) - - # We don't need the file anymore, close (and delete) it. - local_results_file.close() - cursor.execute( - f""" - COPY INTO "{inputs.table_name}" - FILE_FORMAT = (TYPE = 'JSON') - MATCH_BY_COLUMN_NAME = CASE_SENSITIVE - PURGE = TRUE - """ - ) - results = cursor.fetchall() - - for query_result in results: - if not isinstance(query_result, tuple): - # Mostly to appease mypy, as this query should always return a tuple. - raise TypeError(f"Expected tuple from Snowflake COPY INTO query but got: '{type(result)}'") - - if len(query_result) < 2: - raise SnowflakeFileNotLoadedError( - inputs.table_name, - "NO STATUS", - 0, - query_result[1] if len(query_result) == 1 else "NO ERROR MESSAGE", - ) - - _, status = query_result[0:2] - - if status != "LOADED": - errors_seen, first_error = query_result[5:7] - raise SnowflakeFileNotLoadedError( - inputs.table_name, - status or "NO STATUS", - errors_seen or 0, - first_error or "NO ERROR MESSAGE", - ) - - finally: - local_results_file.close() - finally: - conn.close() + + async def worker_shutdown_handler(): + """Handle the Worker shutting down by heart-beating our latest status.""" + await activity.wait_for_worker_shutdown() + logger.bind(last_inserted_at=last_inserted_at, file_no=file_no).debug("Worker shutting down!") + + if last_inserted_at is None: + # Don't heartbeat if worker shuts down before we could even send anything + # Just start from the beginning again. + return + + activity.heartbeat(last_inserted_at, file_no) + + asyncio.create_task(worker_shutdown_handler()) + + with BatchExportTemporaryFile() as local_results_file: + for result in results_iterator: + record = { + "uuid": result["uuid"], + "event": result["event"], + "properties": result["properties"], + "elements": result["elements"], + "people_set": result["set"], + "people_set_once": result["set_once"], + "distinct_id": result["distinct_id"], + "team_id": result["team_id"], + "ip": result["ip"], + "site_url": result["site_url"], + "timestamp": result["timestamp"], + } + local_results_file.write_records_to_jsonl([record]) + + if local_results_file.tell() > settings.BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES: + await flush_to_snowflake(connection, local_results_file, inputs.table_name, file_no) + + last_inserted_at = result["inserted_at"] + file_no += 1 + + activity.heartbeat(last_inserted_at, file_no) + + local_results_file.reset() + + if local_results_file.tell() > 0 and result is not None: + await flush_to_snowflake(connection, local_results_file, inputs.table_name, file_no, last=True) + + last_inserted_at = result["inserted_at"] + file_no += 1 + + activity.heartbeat(last_inserted_at, file_no) + + await copy_loaded_files_to_snowflake_table(connection, inputs.table_name) @workflow.defn(name="snowflake-export") @@ -361,6 +501,4 @@ async def run(self, inputs: SnowflakeBatchExportInputs): "ForbiddenError", ], update_inputs=update_inputs, - # Disable heartbeat timeout until we add heartbeat support. - heartbeat_timeout_seconds=None, ) diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index b5efb9c731891..88e2ba6813f6e 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -116,7 +116,7 @@ class TestAutoProjectMiddleware(APIBaseTest): @classmethod def setUpTestData(cls): super().setUpTestData() - cls.base_app_num_queries = 40 + cls.base_app_num_queries = 41 # Create another team that the user does have access to cls.second_team = Team.objects.create(organization=cls.organization, name="Second Life") diff --git a/posthog/test/test_user_permissions.py b/posthog/test/test_user_permissions.py index da9faef7330ad..b0562dbca57af 100644 --- a/posthog/test/test_user_permissions.py +++ b/posthog/test/test_user_permissions.py @@ -321,7 +321,7 @@ def test_dashboard_efficiency(self): assert user_permissions.insight(insight).effective_privilege_level is not None def test_team_lookup_efficiency(self): - user = User.objects.create(email="test2@posthog.com") + user = User.objects.create(email="test2@posthog.com", distinct_id="test2") models = [] for _ in range(10): organization, membership, team = Organization.objects.bootstrap( diff --git a/posthog/warehouse/api/test/test_view_link.py b/posthog/warehouse/api/test/test_view_link.py index 3a2dcae6bf160..0bcb57e187b86 100644 --- a/posthog/warehouse/api/test/test_view_link.py +++ b/posthog/warehouse/api/test/test_view_link.py @@ -2,7 +2,7 @@ APIBaseTest, ) from posthog.warehouse.models import DataWarehouseViewLink, DataWarehouseSavedQuery -from posthog.api.query import process_query +from posthog.api.services.query import process_query class TestViewLinkQuery(APIBaseTest): diff --git a/posthog/warehouse/models/datawarehouse_saved_query.py b/posthog/warehouse/models/datawarehouse_saved_query.py index bca809bb30912..9117fa7c4eaf0 100644 --- a/posthog/warehouse/models/datawarehouse_saved_query.py +++ b/posthog/warehouse/models/datawarehouse_saved_query.py @@ -47,7 +47,7 @@ class Meta: ] def get_columns(self) -> Dict[str, str]: - from posthog.api.query import process_query + from posthog.api.services.query import process_query # TODO: catch and raise error response = process_query(self.team, self.query)