diff --git a/.eslintrc.js b/.eslintrc.js index 73da9704eee46..8847173b0c79f 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -27,12 +27,12 @@ module.exports = { }, extends: [ 'eslint:recommended', - 'plugin:@typescript-eslint/recommended', + 'plugin:@typescript-eslint/recommended-type-checked', 'plugin:react/recommended', 'plugin:eslint-comments/recommended', 'plugin:storybook/recommended', - 'prettier', 'plugin:compat/recommended', + 'prettier', ], globals, parser: '@typescript-eslint/parser', @@ -42,6 +42,7 @@ module.exports = { }, ecmaVersion: 2018, sourceType: 'module', + project: 'tsconfig.json' }, plugins: ['prettier', 'react', 'cypress', '@typescript-eslint', 'no-only-tests', 'jest', 'compat', 'posthog'], rules: { @@ -72,7 +73,27 @@ module.exports = { '@typescript-eslint/no-empty-function': 'off', '@typescript-eslint/no-inferrable-types': 'off', '@typescript-eslint/ban-ts-comment': 'off', - '@typescript-eslint/no-non-null-assertion': 'error', + '@typescript-eslint/require-await': 'off', // TODO: Enable - this rule is useful, but doesn't have an autofix + '@typescript-eslint/no-unsafe-assignment': 'off', + '@typescript-eslint/no-unsafe-member-access': 'off', + '@typescript-eslint/no-unsafe-enum-comparison': 'off', + '@typescript-eslint/no-unsafe-argument': 'off', + '@typescript-eslint/no-unsafe-return': 'off', + '@typescript-eslint/no-unsafe-call': 'off', + '@typescript-eslint/no-explicit-any': 'off', + '@typescript-eslint/restrict-template-expressions': 'off', + '@typescript-eslint/explicit-function-return-type': [ + 'error', + { + allowExpressions: true, + }, + ], + '@typescript-eslint/explicit-module-boundary-types': [ + 'error', + { + allowArgumentsExplicitlyTypedAsAny: true, + }, + ], curly: 'error', 'no-restricted-imports': [ 'error', @@ -230,43 +251,29 @@ module.exports = { ...globals, given: 'readonly', }, + rules: { + // The below complains needlessly about expect(api.createInvite).toHaveBeenCalledWith(...) + '@typescript-eslint/unbound-method': 'off', + } }, { // disable these rules for files generated by kea-typegen files: ['*Type.ts', '*Type.tsx'], rules: { - '@typescript-eslint/no-explicit-any': ['off'], + 'no-restricted-imports': 'off', '@typescript-eslint/ban-types': ['off'], }, }, - { - // enable the rule specifically for TypeScript files - files: ['*.ts', '*.tsx'], - rules: { - '@typescript-eslint/no-explicit-any': ['off'], - '@typescript-eslint/explicit-function-return-type': [ - 'error', - { - allowExpressions: true, - }, - ], - '@typescript-eslint/explicit-module-boundary-types': [ - 'error', - { - allowArgumentsExplicitlyTypedAsAny: true, - }, - ], - }, - }, { files: ['*.js'], rules: { '@typescript-eslint/no-var-requires': 'off', + '@typescript-eslint/explicit-function-return-type': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', }, }, { files: 'eslint-rules/**/*', - extends: ['eslint:recommended'], rules: { '@typescript-eslint/no-var-requires': 'off', }, diff --git a/.github/workflows/ci-frontend.yml b/.github/workflows/ci-frontend.yml index 3095150406246..d290a0594bbf0 100644 --- a/.github/workflows/ci-frontend.yml +++ b/.github/workflows/ci-frontend.yml @@ -49,15 +49,15 @@ jobs: - name: Check formatting with prettier run: pnpm prettier:check - - name: Lint with ESLint - run: pnpm lint:js - - name: Lint with Stylelint run: pnpm lint:css - name: Generate logic types and run typescript with strict run: pnpm typegen:write && pnpm typescript:check + - name: Lint with ESLint + run: pnpm lint:js + - name: Check if "schema.json" is up to date run: pnpm schema:build:json && git diff --exit-code diff --git a/.run/Dev.run.xml b/.run/Dev.run.xml new file mode 100644 index 0000000000000..8e0efc8b0e7b3 --- /dev/null +++ b/.run/Dev.run.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index a9ed14cbd01cc..4047d14a6106b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,3 @@ # Changelog -Updates to the PostHog project can be found on [https://posthog.com/changelog](our changelog). \ No newline at end of file +Updates to the PostHog project can be found on [our changelog](https://posthog.com/changelog). diff --git a/cypress/e2e/actions.cy.ts b/cypress/e2e/actions.cy.ts index 9819b7d02cdab..356607c64bf8f 100644 --- a/cypress/e2e/actions.cy.ts +++ b/cypress/e2e/actions.cy.ts @@ -5,7 +5,7 @@ const createAction = (actionName: string): void => { cy.get('[data-attr=action-name-create]').should('exist') cy.get('[data-attr=action-name-create]').type(actionName) - cy.get('.ant-radio-group > :nth-child(3)').click() + cy.get('.LemonSegmentedButton > ul > :nth-child(3)').click() cy.get('[data-attr=edit-action-url-input]').click().type(Cypress.config().baseUrl) cy.get('[data-attr=save-action-button]').click() diff --git a/cypress/e2e/early-access-management.cy.ts b/cypress/e2e/early-access-management.cy.ts index 9a594d8d1c34c..8736a39ab945a 100644 --- a/cypress/e2e/early-access-management.cy.ts +++ b/cypress/e2e/early-access-management.cy.ts @@ -6,7 +6,7 @@ describe('Early Access Management', () => { it('Early access feature new and list', () => { // load an empty early access feature page cy.get('h1').should('contain', 'Early Access Management') - cy.title().should('equal', 'Early Access Management • PostHog') + cy.title().should('equal', 'Early access features • PostHog') cy.get('h2').should('contain', 'Create your first feature') cy.get('[data-attr="product-introduction-docs-link"]').should( 'contain', diff --git a/cypress/e2e/insights.cy.ts b/cypress/e2e/insights.cy.ts index 0e449825b2194..5157d21429ba9 100644 --- a/cypress/e2e/insights.cy.ts +++ b/cypress/e2e/insights.cy.ts @@ -24,7 +24,7 @@ describe('Insights', () => { cy.get('[data-attr=breadcrumb-0]').should('contain', 'Hogflix') cy.get('[data-attr=breadcrumb-1]').should('contain', 'Hogflix Demo App') - cy.get('[data-attr=breadcrumb-2]').should('have.text', 'Insights') + cy.get('[data-attr=breadcrumb-2]').should('have.text', 'Product analytics') cy.get('[data-attr=breadcrumb-3]').should('have.text', 'insight name') }) diff --git a/docker/clickhouse/config.xml b/docker/clickhouse/config.xml index f3f858be7d117..7047c93e5c5d8 100644 --- a/docker/clickhouse/config.xml +++ b/docker/clickhouse/config.xml @@ -20,17 +20,20 @@ - trace - test (not for production usage) - [1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 + [1]: + https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114 --> trace /var/log/clickhouse-server/clickhouse-server.log /var/log/clickhouse-server/clickhouse-server.err.log 1000M 10 - + - + @@ -217,7 +225,8 @@ /path/to/ssl_ca_cert_file - none @@ -232,10 +241,12 @@ false - + - + /etc/clickhouse-server/server.crt /etc/clickhouse-server/server.key + true true sslv2,sslv3 @@ -264,24 +276,30 @@ - + 100 0 @@ -302,21 +320,25 @@ --> 0.9 - 4194304 - 0 - @@ -341,14 +363,18 @@ - - - + true @@ -644,14 +698,16 @@ - + localhost 9000 - + @@ -666,22 +722,28 @@ Example: "yandex.ru", "yandex.ru." and "www.yandex.ru" are different hosts. If port is explicitly specified in URL, the host:port is checked as a whole. If host specified here without port, any port with this host allowed. - "yandex.ru" -> "yandex.ru:443", "yandex.ru:80" etc. is allowed, but "yandex.ru:80" -> only "yandex.ru:80" is allowed. - If the host is specified as IP address, it is checked as specified in URL. Example: "[2a02:6b8:a::a]". - If there are redirects and support for redirects is enabled, every redirect (the Location field) is checked. + "yandex.ru" -> "yandex.ru:443", "yandex.ru:80" etc. is allowed, but "yandex.ru:80" -> only + "yandex.ru:80" is allowed. + If the host is specified as IP address, it is checked as specified in URL. Example: + "[2a02:6b8:a::a]". + If there are redirects and support for redirects is enabled, every redirect (the Location field) is + checked. Host should be specified using the host xml tag: yandex.ru --> .* - @@ -701,7 +763,8 @@ @@ -710,7 +773,6 @@ - 3600 @@ -788,7 +850,8 @@ system query_log
toYYYYMM(event_date) - @@ -843,7 +909,8 @@ + Part log contains information about all actions with parts in MergeTree tables (creation, deletion, + merges, downloads).--> system part_log
@@ -852,8 +919,10 @@
- + system metric_log
@@ -933,7 +1003,8 @@ --> - + @@ -965,12 +1036,14 @@ --> - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + @@ -1032,7 +1107,8 @@ - + /var/lib/clickhouse/format_schemas/ - false - + false - + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 @@ -1183,4 +1267,4 @@ --> - + \ No newline at end of file diff --git a/docker/clickhouse/users-dev.xml b/docker/clickhouse/users-dev.xml index dd6e54d7c5de3..704e99ef9e961 100644 --- a/docker/clickhouse/users-dev.xml +++ b/docker/clickhouse/users-dev.xml @@ -15,7 +15,8 @@ with minimum number of different symbols between replica's hostname and local hostname (Hamming distance). in_order - first live replica is chosen in specified order. - first_or_random - if first replica one has higher number of errors, pick a random one from replicas with minimum number of errors. + first_or_random - if first replica one has higher number of errors, pick a random one from replicas + with minimum number of errors. --> random @@ -45,30 +46,39 @@ Password could be empty. If you want to specify SHA256, place it in 'password_sha256_hex' element. - Example: 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 - Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July 2019). + Example: + 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 + Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July + 2019). If you want to specify double SHA1, place it in 'password_double_sha1_hex' element. - Example: e395796d6546b1b65db9d665cd43f0e858dd4303 + Example: + e395796d6546b1b65db9d665cd43f0e858dd4303 - If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for authentication, + If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for + authentication, place its name in 'server' element inside 'ldap' element. Example: my_ldap_server - If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in the main config), + If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in + the main config), place 'kerberos' element instead of 'password' (and similar) elements. - The name part of the canonical principal name of the initiator must match the user name for authentication to succeed. - You can also place 'realm' element inside 'kerberos' element to further restrict authentication to only those requests + The name part of the canonical principal name of the initiator must match the user name for + authentication to succeed. + You can also place 'realm' element inside 'kerberos' element to further restrict authentication to + only those requests whose initiator's realm matches it. Example: Example: EXAMPLE.COM How to generate decent password: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha256sum | tr -d '-' In first line will be password and in second - corresponding SHA256. How to generate double SHA1: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' In first line will be password and in second - corresponding double SHA1. --> @@ -89,7 +99,8 @@ To check access, DNS query is performed, and all received addresses compared to peer address. Regular expression for host names. Example, ^server\d\d-\d\d-\d\.yandex\.ru$ To check access, DNS PTR query is performed for peer address and then regexp is applied. - Then, for result of PTR query, another DNS query is performed and all received addresses compared to peer address. + Then, for result of PTR query, another DNS query is performed and all received addresses compared + to peer address. Strongly recommended that regexp is ends with $ All results of DNS requests are cached till server restart. --> @@ -126,4 +137,4 @@ - + \ No newline at end of file diff --git a/docker/clickhouse/users.xml b/docker/clickhouse/users.xml index 49ac9f73e0de5..ece3df0f09fbe 100644 --- a/docker/clickhouse/users.xml +++ b/docker/clickhouse/users.xml @@ -15,7 +15,8 @@ with minimum number of different symbols between replica's hostname and local hostname (Hamming distance). in_order - first live replica is chosen in specified order. - first_or_random - if first replica one has higher number of errors, pick a random one from replicas with minimum number of errors. + first_or_random - if first replica one has higher number of errors, pick a random one from replicas + with minimum number of errors. --> random @@ -43,30 +44,39 @@ Password could be empty. If you want to specify SHA256, place it in 'password_sha256_hex' element. - Example: 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 - Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July 2019). + Example: + 65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5 + Restrictions of SHA256: impossibility to connect to ClickHouse using MySQL JS client (as of July + 2019). If you want to specify double SHA1, place it in 'password_double_sha1_hex' element. - Example: e395796d6546b1b65db9d665cd43f0e858dd4303 + Example: + e395796d6546b1b65db9d665cd43f0e858dd4303 - If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for authentication, + If you want to specify a previously defined LDAP server (see 'ldap_servers' in the main config) for + authentication, place its name in 'server' element inside 'ldap' element. Example: my_ldap_server - If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in the main config), + If you want to authenticate the user via Kerberos (assuming Kerberos is enabled, see 'kerberos' in + the main config), place 'kerberos' element instead of 'password' (and similar) elements. - The name part of the canonical principal name of the initiator must match the user name for authentication to succeed. - You can also place 'realm' element inside 'kerberos' element to further restrict authentication to only those requests + The name part of the canonical principal name of the initiator must match the user name for + authentication to succeed. + You can also place 'realm' element inside 'kerberos' element to further restrict authentication to + only those requests whose initiator's realm matches it. Example: Example: EXAMPLE.COM How to generate decent password: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha256sum | tr -d '-' In first line will be password and in second - corresponding SHA256. How to generate double SHA1: - Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' + Execute: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | + sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' In first line will be password and in second - corresponding double SHA1. --> @@ -87,7 +97,8 @@ To check access, DNS query is performed, and all received addresses compared to peer address. Regular expression for host names. Example, ^server\d\d-\d\d-\d\.yandex\.ru$ To check access, DNS PTR query is performed for peer address and then regexp is applied. - Then, for result of PTR query, another DNS query is performed and all received addresses compared to peer address. + Then, for result of PTR query, another DNS query is performed and all received addresses compared + to peer address. Strongly recommended that regexp is ends with $ All results of DNS requests are cached till server restart. --> @@ -124,4 +135,4 @@ - + \ No newline at end of file diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 87838d0b39dcc..88addd2d7f416 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -43,6 +43,7 @@ def create_missing_billing_customer(**kwargs) -> CustomerInfo: usage_summary={ "events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}, + "rows_synced": {"limit": None, "usage": 0}, }, free_trial_until=None, available_features=[], @@ -96,6 +97,7 @@ def create_billing_customer(**kwargs) -> CustomerInfo: usage_summary={ "events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}, + "rows_synced": {"limit": None, "usage": 0}, }, free_trial_until=None, ) @@ -292,6 +294,7 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "usage_summary": { "events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}, + "rows_synced": {"limit": None, "usage": 0}, }, "free_trial_until": None, } @@ -363,6 +366,7 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "usage_summary": { "events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}, + "rows_synced": {"limit": None, "usage": 0}, }, "free_trial_until": None, "current_total_amount_usd": "0.00", @@ -521,6 +525,11 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "todays_usage": 0, "usage": 0, }, + "rows_synced": { + "limit": None, + "todays_usage": 0, + "usage": 0, + }, "period": ["2022-10-07T11:12:48", "2022-11-07T11:12:48"], } @@ -556,6 +565,11 @@ def mock_implementation_missing_customer(url: str, headers: Any = None, params: "todays_usage": 0, "usage": 0, }, + "rows_synced": { + "limit": None, + "todays_usage": 0, + "usage": 0, + }, "period": ["2022-10-07T11:12:48", "2022-11-07T11:12:48"], } assert self.organization.customer_id == "cus_123" @@ -613,5 +627,6 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma assert self.organization.usage == { "events": {"limit": None, "usage": 0, "todays_usage": 0}, "recordings": {"limit": None, "usage": 0, "todays_usage": 0}, + "rows_synced": {"limit": None, "usage": 0, "todays_usage": 0}, "period": ["2022-10-07T11:12:48", "2022-11-07T11:12:48"], } diff --git a/ee/billing/billing_manager.py b/ee/billing/billing_manager.py index c626083460ef4..5a8119c57df9b 100644 --- a/ee/billing/billing_manager.py +++ b/ee/billing/billing_manager.py @@ -225,6 +225,7 @@ def update_org_details(self, organization: Organization, billing_status: Billing usage_info = OrganizationUsageInfo( events=usage_summary["events"], recordings=usage_summary["recordings"], + rows_synced=usage_summary.get("rows_synced", None), period=[ data["billing_period"]["current_period_start"], data["billing_period"]["current_period_end"], diff --git a/ee/billing/quota_limiting.py b/ee/billing/quota_limiting.py index ae6eefcc0b77a..ef3e12a421575 100644 --- a/ee/billing/quota_limiting.py +++ b/ee/billing/quota_limiting.py @@ -17,6 +17,7 @@ convert_team_usage_rows_to_dict, get_teams_with_billable_event_count_in_period, get_teams_with_recording_count_in_period, + get_teams_with_rows_synced_in_period, ) from posthog.utils import get_current_day @@ -26,11 +27,13 @@ class QuotaResource(Enum): EVENTS = "events" RECORDINGS = "recordings" + ROWS_SYNCED = "rows_synced" OVERAGE_BUFFER = { QuotaResource.EVENTS: 0, QuotaResource.RECORDINGS: 1000, + QuotaResource.ROWS_SYNCED: 0, } @@ -53,7 +56,7 @@ def remove_limited_team_tokens(resource: QuotaResource, tokens: List[str]) -> No @cache_for(timedelta(seconds=30), background_refresh=True) -def list_limited_team_tokens(resource: QuotaResource) -> List[str]: +def list_limited_team_attributes(resource: QuotaResource) -> List[str]: now = timezone.now() redis_client = get_client() results = redis_client.zrangebyscore(f"{QUOTA_LIMITER_CACHE_KEY}{resource.value}", min=now.timestamp(), max="+inf") @@ -63,6 +66,7 @@ def list_limited_team_tokens(resource: QuotaResource) -> List[str]: class UsageCounters(TypedDict): events: int recordings: int + rows_synced: int def org_quota_limited_until(organization: Organization, resource: QuotaResource) -> Optional[int]: @@ -70,6 +74,8 @@ def org_quota_limited_until(organization: Organization, resource: QuotaResource) return None summary = organization.usage.get(resource.value, {}) + if not summary: + return None usage = summary.get("usage", 0) todays_usage = summary.get("todays_usage", 0) limit = summary.get("limit") @@ -93,19 +99,34 @@ def sync_org_quota_limits(organization: Organization): if not organization.usage: return None - team_tokens: List[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] - - if not team_tokens: - capture_exception(Exception(f"quota_limiting: No team tokens found for organization: {organization.id}")) - return - - for resource in [QuotaResource.EVENTS, QuotaResource.RECORDINGS]: + for resource in [QuotaResource.EVENTS, QuotaResource.RECORDINGS, QuotaResource.ROWS_SYNCED]: + team_attributes = get_team_attribute_by_quota_resource(organization, resource) quota_limited_until = org_quota_limited_until(organization, resource) if quota_limited_until: - add_limited_team_tokens(resource, {x: quota_limited_until for x in team_tokens}) + add_limited_team_tokens(resource, {x: quota_limited_until for x in team_attributes}) else: - remove_limited_team_tokens(resource, team_tokens) + remove_limited_team_tokens(resource, team_attributes) + + +def get_team_attribute_by_quota_resource(organization: Organization, resource: QuotaResource): + if resource in [QuotaResource.EVENTS, QuotaResource.RECORDINGS]: + team_tokens: List[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] + + if not team_tokens: + capture_exception(Exception(f"quota_limiting: No team tokens found for organization: {organization.id}")) + return + + return team_tokens + + if resource == QuotaResource.ROWS_SYNCED: + team_ids: List[str] = [x for x in list(organization.teams.values_list("id", flat=True)) if x] + + if not team_ids: + capture_exception(Exception(f"quota_limiting: No team ids found for organization: {organization.id}")) + return + + return team_ids def set_org_usage_summary( @@ -125,8 +146,10 @@ def set_org_usage_summary( new_usage = copy.deepcopy(new_usage) - for field in ["events", "recordings"]: + for field in ["events", "recordings", "rows_synced"]: resource_usage = new_usage[field] # type: ignore + if not resource_usage: + continue if todays_usage: resource_usage["todays_usage"] = todays_usage[field] # type: ignore @@ -155,6 +178,9 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, teams_with_recording_count_in_period=convert_team_usage_rows_to_dict( get_teams_with_recording_count_in_period(period_start, period_end) ), + teams_with_rows_synced_in_period=convert_team_usage_rows_to_dict( + get_teams_with_rows_synced_in_period(period_start, period_end) + ), ) teams: Sequence[Team] = list( @@ -171,6 +197,7 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, team_report = UsageCounters( events=all_data["teams_with_event_count_in_period"].get(team.id, 0), recordings=all_data["teams_with_recording_count_in_period"].get(team.id, 0), + rows_synced=all_data["teams_with_rows_synced_in_period"].get(team.id, 0), ) org_id = str(team.organization.id) @@ -183,7 +210,7 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, for field in team_report: org_report[field] += team_report[field] # type: ignore - quota_limited_orgs: Dict[str, Dict[str, int]] = {"events": {}, "recordings": {}} + quota_limited_orgs: Dict[str, Dict[str, int]] = {"events": {}, "recordings": {}, "rows_synced": {}} # We find all orgs that should be rate limited for org_id, todays_report in todays_usage_report.items(): @@ -195,7 +222,7 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, if set_org_usage_summary(org, todays_usage=todays_report): org.save(update_fields=["usage"]) - for field in ["events", "recordings"]: + for field in ["events", "recordings", "rows_synced"]: quota_limited_until = org_quota_limited_until(org, QuotaResource(field)) if quota_limited_until: @@ -207,12 +234,13 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, previously_quota_limited_team_tokens: Dict[str, Dict[str, int]] = { "events": {}, "recordings": {}, + "rows_synced": {}, } for field in quota_limited_orgs: - previously_quota_limited_team_tokens[field] = list_limited_team_tokens(QuotaResource(field)) + previously_quota_limited_team_tokens[field] = list_limited_team_attributes(QuotaResource(field)) - quota_limited_teams: Dict[str, Dict[str, int]] = {"events": {}, "recordings": {}} + quota_limited_teams: Dict[str, Dict[str, int]] = {"events": {}, "recordings": {}, "rows_synced": {}} # Convert the org ids to team tokens for team in teams: @@ -233,6 +261,7 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, properties = { "quota_limited_events": quota_limited_orgs["events"].get(org_id, None), "quota_limited_recordings": quota_limited_orgs["events"].get(org_id, None), + "quota_limited_rows_synced": quota_limited_orgs["rows_synced"].get(org_id, None), } report_organization_action( diff --git a/ee/billing/test/test_quota_limiting.py b/ee/billing/test/test_quota_limiting.py index 3bdc70a06df9e..b8e68c235b2c5 100644 --- a/ee/billing/test/test_quota_limiting.py +++ b/ee/billing/test/test_quota_limiting.py @@ -9,7 +9,7 @@ from ee.billing.quota_limiting import ( QUOTA_LIMITER_CACHE_KEY, QuotaResource, - list_limited_team_tokens, + list_limited_team_attributes, org_quota_limited_until, replace_limited_team_tokens, set_org_usage_summary, @@ -47,15 +47,18 @@ def test_billing_rate_limit_not_set_if_missing_org_usage(self) -> None: result = update_all_org_billing_quotas() assert result["events"] == {} assert result["recordings"] == {} + assert result["rows_synced"] == {} assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}events", 0, -1) == [] assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}recordings", 0, -1) == [] + assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}rows_synced", 0, -1) == [] def test_billing_rate_limit(self) -> None: with self.settings(USE_TZ=False): self.organization.usage = { "events": {"usage": 99, "limit": 100}, "recordings": {"usage": 1, "limit": 100}, + "rows_synced": {"usage": 5, "limit": 100}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } self.organization.save() @@ -77,16 +80,19 @@ def test_billing_rate_limit(self) -> None: org_id = str(self.organization.id) assert result["events"] == {org_id: 1612137599} assert result["recordings"] == {} + assert result["rows_synced"] == {} assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}events", 0, -1) == [ self.team.api_token.encode("UTF-8") ] assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}recordings", 0, -1) == [] + assert self.redis_client.zrange(f"{QUOTA_LIMITER_CACHE_KEY}rows_synced", 0, -1) == [] self.organization.refresh_from_db() assert self.organization.usage == { "events": {"usage": 99, "limit": 100, "todays_usage": 10}, "recordings": {"usage": 1, "limit": 100, "todays_usage": 0}, + "rows_synced": {"usage": 5, "limit": 100, "todays_usage": 0}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } @@ -94,6 +100,7 @@ def test_set_org_usage_summary_updates_correctly(self): self.organization.usage = { "events": {"usage": 99, "limit": 100}, "recordings": {"usage": 1, "limit": 100}, + "rows_synced": {"usage": 5, "limit": 100}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } self.organization.save() @@ -101,6 +108,7 @@ def test_set_org_usage_summary_updates_correctly(self): new_usage = dict( events={"usage": 100, "limit": 100}, recordings={"usage": 2, "limit": 100}, + rows_synced={"usage": 6, "limit": 100}, period=[ "2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z", @@ -112,6 +120,7 @@ def test_set_org_usage_summary_updates_correctly(self): assert self.organization.usage == { "events": {"usage": 100, "limit": 100, "todays_usage": 0}, "recordings": {"usage": 2, "limit": 100, "todays_usage": 0}, + "rows_synced": {"usage": 6, "limit": 100, "todays_usage": 0}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } @@ -119,6 +128,7 @@ def test_set_org_usage_summary_does_nothing_if_the_same(self): self.organization.usage = { "events": {"usage": 99, "limit": 100, "todays_usage": 10}, "recordings": {"usage": 1, "limit": 100, "todays_usage": 11}, + "rows_synced": {"usage": 5, "limit": 100, "todays_usage": 11}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } self.organization.save() @@ -126,6 +136,7 @@ def test_set_org_usage_summary_does_nothing_if_the_same(self): new_usage = dict( events={"usage": 99, "limit": 100}, recordings={"usage": 1, "limit": 100}, + rows_synced={"usage": 5, "limit": 100}, period=[ "2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z", @@ -137,6 +148,7 @@ def test_set_org_usage_summary_does_nothing_if_the_same(self): assert self.organization.usage == { "events": {"usage": 99, "limit": 100, "todays_usage": 10}, "recordings": {"usage": 1, "limit": 100, "todays_usage": 11}, + "rows_synced": {"usage": 5, "limit": 100, "todays_usage": 11}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } @@ -144,15 +156,19 @@ def test_set_org_usage_summary_updates_todays_usage(self): self.organization.usage = { "events": {"usage": 99, "limit": 100, "todays_usage": 10}, "recordings": {"usage": 1, "limit": 100, "todays_usage": 11}, + "rows_synced": {"usage": 5, "limit": 100, "todays_usage": 11}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } self.organization.save() - assert set_org_usage_summary(self.organization, todays_usage={"events": 20, "recordings": 21}) + assert set_org_usage_summary( + self.organization, todays_usage={"events": 20, "recordings": 21, "rows_synced": 21} + ) assert self.organization.usage == { "events": {"usage": 99, "limit": 100, "todays_usage": 20}, "recordings": {"usage": 1, "limit": 100, "todays_usage": 21}, + "rows_synced": {"usage": 5, "limit": 100, "todays_usage": 21}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } @@ -163,6 +179,7 @@ def test_org_quota_limited_until(self): self.organization.usage = { "events": {"usage": 99, "limit": 100}, "recordings": {"usage": 1, "limit": 100}, + "rows_synced": {"usage": 99, "limit": 100}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } @@ -184,6 +201,11 @@ def test_org_quota_limited_until(self): self.organization.usage["recordings"]["usage"] = 1100 # Over limit + buffer assert org_quota_limited_until(self.organization, QuotaResource.RECORDINGS) == 1612137599 + assert org_quota_limited_until(self.organization, QuotaResource.ROWS_SYNCED) is None + + self.organization.usage["rows_synced"]["usage"] = 101 + assert org_quota_limited_until(self.organization, QuotaResource.ROWS_SYNCED) == 1612137599 + def test_over_quota_but_not_dropped_org(self): self.organization.usage = None assert org_quota_limited_until(self.organization, QuotaResource.EVENTS) is None @@ -191,12 +213,14 @@ def test_over_quota_but_not_dropped_org(self): self.organization.usage = { "events": {"usage": 100, "limit": 90}, "recordings": {"usage": 100, "limit": 90}, + "rows_synced": {"usage": 100, "limit": 90}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } self.organization.never_drop_data = True assert org_quota_limited_until(self.organization, QuotaResource.EVENTS) is None assert org_quota_limited_until(self.organization, QuotaResource.RECORDINGS) is None + assert org_quota_limited_until(self.organization, QuotaResource.ROWS_SYNCED) is None # reset for subsequent tests self.organization.never_drop_data = False @@ -208,21 +232,32 @@ def test_sync_org_quota_limits(self): now = timezone.now().timestamp() replace_limited_team_tokens(QuotaResource.EVENTS, {"1234": now + 10000}) + replace_limited_team_tokens(QuotaResource.ROWS_SYNCED, {"1337": now + 10000}) self.organization.usage = { "events": {"usage": 99, "limit": 100}, "recordings": {"usage": 1, "limit": 100}, + "rows_synced": {"usage": 35, "limit": 100}, "period": ["2021-01-01T00:00:00Z", "2021-01-31T23:59:59Z"], } sync_org_quota_limits(self.organization) - assert list_limited_team_tokens(QuotaResource.EVENTS) == ["1234"] + assert list_limited_team_attributes(QuotaResource.EVENTS) == ["1234"] + assert list_limited_team_attributes(QuotaResource.ROWS_SYNCED) == ["1337"] self.organization.usage["events"]["usage"] = 120 + self.organization.usage["rows_synced"]["usage"] = 120 sync_org_quota_limits(self.organization) - assert sorted(list_limited_team_tokens(QuotaResource.EVENTS)) == sorted( + assert sorted(list_limited_team_attributes(QuotaResource.EVENTS)) == sorted( ["1234", self.team.api_token, other_team.api_token] ) + # rows_synced uses teams, not tokens + assert sorted(list_limited_team_attributes(QuotaResource.ROWS_SYNCED)) == sorted( + ["1337", str(self.team.pk), str(other_team.pk)] + ) + self.organization.usage["events"]["usage"] = 80 + self.organization.usage["rows_synced"]["usage"] = 36 sync_org_quota_limits(self.organization) - assert sorted(list_limited_team_tokens(QuotaResource.EVENTS)) == sorted(["1234"]) + assert sorted(list_limited_team_attributes(QuotaResource.EVENTS)) == sorted(["1234"]) + assert sorted(list_limited_team_attributes(QuotaResource.ROWS_SYNCED)) == sorted(["1337"]) diff --git a/ee/clickhouse/test/test_client.py b/ee/clickhouse/test/test_client.py deleted file mode 100644 index ab5ba1b4a53e0..0000000000000 --- a/ee/clickhouse/test/test_client.py +++ /dev/null @@ -1,129 +0,0 @@ -from unittest.mock import patch - -import fakeredis -from clickhouse_driver.errors import ServerException -from django.test import TestCase - -from posthog.clickhouse.client import execute_async as client -from posthog.client import sync_execute -from posthog.test.base import ClickhouseTestMixin - - -class ClickhouseClientTestCase(TestCase, ClickhouseTestMixin): - def setUp(self): - self.redis_client = fakeredis.FakeStrictRedis() - - def test_async_query_client(self): - query = "SELECT 1+1" - team_id = 2 - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - result = client.get_status_or_results(team_id, query_id) - self.assertFalse(result.error) - self.assertTrue(result.complete) - self.assertEqual(result.results, [[2]]) - - def test_async_query_client_errors(self): - query = "SELECT WOW SUCH DATA FROM NOWHERE THIS WILL CERTAINLY WORK" - team_id = 2 - self.assertRaises( - ServerException, - client.enqueue_execute_with_progress, - **{"team_id": team_id, "query": query, "bypass_celery": True}, - ) - try: - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - except Exception: - pass - - result = client.get_status_or_results(team_id, query_id) - self.assertTrue(result.error) - self.assertRegex(result.error_message, "Code: 62.\nDB::Exception: Syntax error:") - - def test_async_query_client_does_not_leak(self): - query = "SELECT 1+1" - team_id = 2 - wrong_team = 5 - query_id = client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - result = client.get_status_or_results(wrong_team, query_id) - self.assertTrue(result.error) - self.assertEqual(result.error_message, "Requesting team is not executing team") - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_is_lazy(self, execute_sync_mock): - query = "SELECT 4 + 4" - team_id = 2 - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Assert that we only called clickhouse once - execute_sync_mock.assert_called_once() - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_is_lazy_but_not_too_lazy(self, execute_sync_mock): - query = "SELECT 8 + 8" - team_id = 2 - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Try the same query again, but with force - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True, force=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, bypass_celery=True) - - # Assert that we called clickhouse twice - self.assertEqual(execute_sync_mock.call_count, 2) - - @patch("posthog.clickhouse.client.execute_async.enqueue_clickhouse_execute_with_progress") - def test_async_query_client_manual_query_uuid(self, execute_sync_mock): - # This is a unique test because technically in the test pattern `SELECT 8 + 8` is already - # in redis. This tests to make sure it is treated as a unique run of that query - query = "SELECT 8 + 8" - team_id = 2 - query_id = "I'm so unique" - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True) - - # Try the same query again, but with force - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True, force=True) - - # Try the same query again (for good measure!) - client.enqueue_execute_with_progress(team_id, query, query_id=query_id, bypass_celery=True) - - # Assert that we called clickhouse twice - self.assertEqual(execute_sync_mock.call_count, 2) - - def test_client_strips_comments_from_request(self): - """ - To ensure we can easily copy queries from `system.query_log` in e.g. - Metabase, we strip comments from the query we send. Metabase doesn't - display multilined output. - - See https://github.com/metabase/metabase/issues/14253 - - Note I'm not really testing much complexity, I trust that those will - come out as failures in other tests. - """ - from posthog.clickhouse.query_tagging import tag_queries - - # First add in the request information that should be added to the sql. - # We check this to make sure it is not removed by the comment stripping - with self.capture_select_queries() as sqls: - tag_queries(kind="request", id="1") - sync_execute( - query=""" - -- this request returns 1 - SELECT 1 - """ - ) - self.assertEqual(len(sqls), 1) - first_query = sqls[0] - self.assertIn(f"SELECT 1", first_query) - self.assertNotIn("this request returns", first_query) - - # Make sure it still includes the "annotation" comment that includes - # request routing information for debugging purposes - self.assertIn("/* request:1 */", first_query) diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr index f039a2994204e..262c4a8e1e195 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr @@ -1,6 +1,6 @@ # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results ' - /* user_id:126 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:132 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events diff --git a/ee/clickhouse/views/test/test_clickhouse_trends.py b/ee/clickhouse/views/test/test_clickhouse_trends.py index 75ab015e39a15..8bf86c1524006 100644 --- a/ee/clickhouse/views/test/test_clickhouse_trends.py +++ b/ee/clickhouse/views/test/test_clickhouse_trends.py @@ -118,7 +118,7 @@ def test_includes_only_intervals_within_range(client: Client): { "action": ANY, "breakdown_value": cohort["id"], - "label": "$pageview - test cohort", + "label": "test cohort", "count": 3.0, "data": [1.0, 1.0, 1.0], # Prior to the fix this would also include '29-Aug-2021' @@ -827,14 +827,12 @@ def test_insight_trends_cumulative(self): ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - person_response = get_people_from_url_ok( - self.client, data_response["$pageview - val"]["2012-01-14"].person_url - ) + person_response = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 3 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 3 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -862,12 +860,12 @@ def test_insight_trends_cumulative(self): properties=[{"type": "person", "key": "key", "value": "some_val"}], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - people = get_people_from_url_ok(self.client, data_response["$pageview - val"]["2012-01-14"].person_url) + people = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 3 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 3 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -894,12 +892,12 @@ def test_insight_trends_cumulative(self): ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) - people = get_people_from_url_ok(self.client, data_response["$pageview - val"]["2012-01-14"].person_url) + people = get_people_from_url_ok(self.client, data_response["val"]["2012-01-14"].person_url) - assert data_response["$pageview - val"]["2012-01-13"].value == 1 - assert data_response["$pageview - val"]["2012-01-13"].breakdown_value == "val" - assert data_response["$pageview - val"]["2012-01-14"].value == 2 - assert data_response["$pageview - val"]["2012-01-14"].label == "14-Jan-2012" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-14"].value == 2 + assert data_response["val"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in people]) == sorted( [str(created_people["p1"].uuid), str(created_people["p3"].uuid)] @@ -933,12 +931,10 @@ def test_breakdown_with_filter(self): properties=[{"key": "key", "value": "oh", "operator": "not_icontains"}], ) data_response = get_trends_time_series_ok(self.client, params, self.team) - person_response = get_people_from_url_ok( - self.client, data_response["sign up - val"]["2012-01-13"].person_url - ) + person_response = get_people_from_url_ok(self.client, data_response["val"]["2012-01-13"].person_url) - assert data_response["sign up - val"]["2012-01-13"].value == 1 - assert data_response["sign up - val"]["2012-01-13"].breakdown_value == "val" + assert data_response["val"]["2012-01-13"].value == 1 + assert data_response["val"]["2012-01-13"].breakdown_value == "val" assert sorted([p["id"] for p in person_response]) == sorted([str(created_people["person1"].uuid)]) @@ -950,11 +946,9 @@ def test_breakdown_with_filter(self): events=[{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], ) aggregate_response = get_trends_aggregate_ok(self.client, params, self.team) - aggregate_person_response = get_people_from_url_ok( - self.client, aggregate_response["sign up - val"].person_url - ) + aggregate_person_response = get_people_from_url_ok(self.client, aggregate_response["val"].person_url) - assert aggregate_response["sign up - val"].value == 1 + assert aggregate_response["val"].value == 1 assert sorted([p["id"] for p in aggregate_person_response]) == sorted([str(created_people["person1"].uuid)]) def test_insight_trends_compare(self): diff --git a/frontend/__snapshots__/components-editable-field--default.png b/frontend/__snapshots__/components-editable-field--default.png index 2d16114431388..f68ba65618170 100644 Binary files a/frontend/__snapshots__/components-editable-field--default.png and b/frontend/__snapshots__/components-editable-field--default.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--empty-with-action.png b/frontend/__snapshots__/components-product-empty-state--empty-with-action.png index dd10594e21d1c..4c6bc2766b5e4 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--empty-with-action.png and b/frontend/__snapshots__/components-product-empty-state--empty-with-action.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png b/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png index d9ed865218733..a93edc4abb8e1 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png and b/frontend/__snapshots__/components-product-empty-state--not-empty-with-action.png differ diff --git a/frontend/__snapshots__/components-product-empty-state--product-introduction.png b/frontend/__snapshots__/components-product-empty-state--product-introduction.png index dd10594e21d1c..4c6bc2766b5e4 100644 Binary files a/frontend/__snapshots__/components-product-empty-state--product-introduction.png and b/frontend/__snapshots__/components-product-empty-state--product-introduction.png differ diff --git a/frontend/__snapshots__/components-properties-table--properties-table.png b/frontend/__snapshots__/components-properties-table--properties-table.png new file mode 100644 index 0000000000000..0ebb3a71ccb83 Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--properties-table.png differ diff --git a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png index 2038fbe5c8bb2..02abe5eaa23a3 100644 Binary files a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png and b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png b/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png index b6109c6884322..90ae77d5ca04a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png and b/frontend/__snapshots__/lemon-ui-lemon-table--empty-loading.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-table--loading.png b/frontend/__snapshots__/lemon-ui-lemon-table--loading.png index e5852c23bda01..f3f2287fcdb4c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-table--loading.png and b/frontend/__snapshots__/lemon-ui-lemon-table--loading.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png index cba5e73d5578f..314ba79e762b7 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png index 13a21303d19ab..c6353784ceaf1 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--edit.png b/frontend/__snapshots__/scenes-app-dashboards--edit.png index ca634e775f0d7..0bd4f10c2b233 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--edit.png and b/frontend/__snapshots__/scenes-app-dashboards--edit.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--show.png b/frontend/__snapshots__/scenes-app-dashboards--show.png index 0bac6702636e7..9f1dac8d8c809 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--show.png and b/frontend/__snapshots__/scenes-app-dashboards--show.png differ diff --git a/frontend/__snapshots__/scenes-app-events--event-explorer.png b/frontend/__snapshots__/scenes-app-events--event-explorer.png index 4ed82b3bdbbdd..7d3287e05481b 100644 Binary files a/frontend/__snapshots__/scenes-app-events--event-explorer.png and b/frontend/__snapshots__/scenes-app-events--event-explorer.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png index b954a89c3fe4f..189dc9741ea0f 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png b/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png index fd9d704276d4c..683e286506729 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png and b/frontend/__snapshots__/scenes-app-experiments--experiments-list-pay-gate.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiments-list.png b/frontend/__snapshots__/scenes-app-experiments--experiments-list.png index f6760a46e6b69..4072657487cb8 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiments-list.png and b/frontend/__snapshots__/scenes-app-experiments--experiments-list.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png index 34fc0266e6ee9..60c850a72b2db 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png index c640e778e8505..2d6a0dd22fbb2 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png index ff8d20c392180..a36d7365e7344 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png index f46520b2f73dc..613e89cdbc0d8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png index d3727abe59c66..d2c639e108166 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png index 7f8fbe65c33f4..bcd656dcc26a3 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png index 7f37b4f88ea9e..93a2510edbd3e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png index 52b192f0436ff..88fce2b75ec39 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png index 9aa218739300b..234167ed34ceb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png index f7301be8fcc9b..7ef2484ddde67 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png index 2426224527e72..8eaedc5c1d3fa 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png index 3f938630e0cee..87320f1ad5b19 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png index 97c300aec44da..92811307a20d4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png index 451a3e13a2d4f..ee0abcee40c6b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png index 2cd74f758f4f0..cd77f1bbd8447 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png index 5e437308fefd9..b0c052e1b37ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png index 3ac105cb27e6c..62ab313160740 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png index 0c68ed20b354c..0f65f00462135 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png index b93219c3d0204..20f8540e7af66 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png index a03a837b14ca3..5ba4dd941fcbf 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png index 57c8868d90d40..64b2955db1cfe 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png index 41354872d146c..7fde8e692b299 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png index 0c51f972a0c9e..a0d765fdb531a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png index 1bc80b159fcf0..a41c083a673a9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png index 1fceceed5b2ba..10a781cafc5d0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png index 7f8d3f622e01d..8760764ed3a04 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png index 91f6f6e560edd..c4212fc4ab28d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png index bd131be5920a7..4f11382fa8bb4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png index 38cb27172dece..a4c4a82e5d207 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle.png b/frontend/__snapshots__/scenes-app-insights--lifecycle.png index 89b738cf06472..b5af8e303507d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention--webkit.png index 38102732ee359..3f6259a8a2428 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png index cb52e0f1495f4..8e6390a225b40 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png index ee03a7556c872..998fedb12e86e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png index 1e71a2fc11b12..6262a5918f1f1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png index 3e2ab6d65836f..62dd6f95bbd41 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png index 9d791a3259380..0a8e1c20fdb87 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit.png index 45c2836c18811..cd3d9f866c7f4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention.png b/frontend/__snapshots__/scenes-app-insights--retention.png index 9a7251ee3698d..5df60b1e00204 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention.png and b/frontend/__snapshots__/scenes-app-insights--retention.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png index b1f44c1341ec3..d226c57fe43df 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png index fc56f416a79ff..c458ce60c87b2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png index 3cc58acbcacee..80e20f0a84a3e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness.png b/frontend/__snapshots__/scenes-app-insights--stickiness.png index e3a0289b2bde0..5f6daca8e6c78 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness.png and b/frontend/__snapshots__/scenes-app-insights--stickiness.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png index d3ad0d0416ed0..451decec4637d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png index 8f14499429e91..77b2e0087b84b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png index a2d3654a7b019..39cb74dee61dd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png index eacd277f8c466..85107049f853f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png index d2c34da43a597..41ba6d59e1550 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png index bccdd29949efa..f70af9d2e782c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png index 689dcd9839e8d..f57645211428a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-area-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-area.png b/frontend/__snapshots__/scenes-app-insights--trends-area.png index bc2055bb61140..fdd0315060948 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-area.png and b/frontend/__snapshots__/scenes-app-insights--trends-area.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png index 9348697847367..5d60dee4f60b7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png index 6542e66ded261..8df383d36fc1b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png index 9bfb5ce522765..3a7274819f516 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png index c0c5a2092f525..e32072ec0780d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png index 1f0039036ffa4..03c62e056870b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png index b854c985c0849..c9bd130f6cdab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png index 932b2d3d43abc..ffaa1c4bf1988 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar.png b/frontend/__snapshots__/scenes-app-insights--trends-bar.png index e6b4d9722076b..74a5209277252 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png index b3a5b20f6394c..81275e0a9df95 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png index 5a1d695b843d6..ab2904ba751e9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png index 9d275c34efbf3..966b934574728 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png index e308b6b409e0d..ca3343a84046c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png index e2d82fc57c0e0..8f97eb8c1a01c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png index 066c2201792a3..48a045de1d451 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-labels.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png index 0ac5093e23ca3..d4873d617014e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png index 1990ae3d264c0..a158e442de8ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png index b3d789e752710..5e1fb2d08feb2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png index 42561f322039c..225acef8aacb3 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png index 8a47ca33b0fd2..c036f5a792f9e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png index 2788c12f3b2c7..ca3654b2fd11d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png b/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png index 939ae1dea2f56..d2c5a5a81dcc9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-multi.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line.png b/frontend/__snapshots__/scenes-app-insights--trends-line.png index 975132fef1b19..49bbad9f70249 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line.png and b/frontend/__snapshots__/scenes-app-insights--trends-line.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png index f3161e700a738..a4feb23317dd1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png index 20c2074825f8e..a34e6e1d5e272 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png index 80c580574079c..d9cf620c1a161 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-number-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-number.png b/frontend/__snapshots__/scenes-app-insights--trends-number.png index baf47a94ff5db..03b2456ee4782 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-number.png and b/frontend/__snapshots__/scenes-app-insights--trends-number.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png index 93b7e9b656117..660bb356237ca 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png index d33d21c2387f1..cadbd8a861f31 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png index e11265b7a23c5..646050c69e70b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png index f83b718feeb79..bd1f3ed00e233 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png index 6e89f0b8e8683..082bca2e23f47 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png index 6177698ddf6dc..7a1c58bf02c1d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-labels.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png index dcb273e64439b..bc2f9c129b6f9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png index a5b457de916b1..3e784f8c855d5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png index c80f95a6464a1..886c9d33a0add 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie.png b/frontend/__snapshots__/scenes-app-insights--trends-pie.png index bea0f178cf14c..c6000ed1654c6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png index 106f7b70d1f54..cee544d679cc5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png index ff41c7540bfcf..4a32c1bf8df36 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png index 2d1533eb70717..dc9333bb1780d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png index 2f708f79c912c..f9e6a6f710268 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png index 18487bb88d559..ed70303f61142 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png index 38c6892a0c112..d71a4ba3b777d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png index 4a6fe4f1a4daf..43417b1c2f0ac 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table.png b/frontend/__snapshots__/scenes-app-insights--trends-table.png index bab2405d9ac61..2507ecadf9e8f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table.png and b/frontend/__snapshots__/scenes-app-insights--trends-table.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png index f6af074e96923..94e1c6b5604ab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png index 979febee96067..066eb2c1ac02a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png index d3a3dec473f0b..63e0e28282346 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png index fb29412159cd5..f3599c707dd15 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png index 7f099baaf9eab..a0f6e839e7e60 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png index 362964c3bcf89..140d24237944a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png index 91225e794f2b4..c908b3c0a5d9e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value.png b/frontend/__snapshots__/scenes-app-insights--trends-value.png index 94177c7f28870..ea8fd3d8fd413 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value.png and b/frontend/__snapshots__/scenes-app-insights--trends-value.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png index 25eaf83180416..24462204e7738 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png index 5ecbf86975a4f..e11671b67ea0a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png index 4afd8b0955fbd..9fbed136e738d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map.png index 0050ed308e178..cbb62ce7deb40 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png index c2354f8a1112d..bd9ba9dbf8a97 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png index a09292dd4f0cb..ea0ad2f69c6ae 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png index 7199f4a937fbc..af38b73f25636 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths.png b/frontend/__snapshots__/scenes-app-insights--user-paths.png index 2d4d7040cd6f6..deec9d37f5b61 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths.png and b/frontend/__snapshots__/scenes-app-insights--user-paths.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png index e18ecc8d6a5a5..ae5e9601bdbb4 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png new file mode 100644 index 0000000000000..4268b9820e627 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png new file mode 100644 index 0000000000000..f7ea3ec3cff02 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png new file mode 100644 index 0000000000000..6e4f613841bdf Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png index 2922f7f7736ff..a000f5709360c 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png index 72113b0438d81..d5df660c362ee 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png index b9810d5bf2186..5655e24e12a0c 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png index 11799f73e5bb9..06bdfb7c4f880 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page.png differ diff --git a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png index 55081c3b37420..5850c12954b74 100644 Binary files a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png and b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png b/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png index c4c27f4db817a..7bb41e1837cc3 100644 Binary files a/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png and b/frontend/__snapshots__/scenes-app-recordings--recordings-play-lists.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--card-view.png b/frontend/__snapshots__/scenes-app-saved-insights--card-view.png index 8c6d47aa79795..fd1b5084ad4bd 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--card-view.png and b/frontend/__snapshots__/scenes-app-saved-insights--card-view.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png b/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png index 7757d243d548c..7952158fdb025 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png and b/frontend/__snapshots__/scenes-app-saved-insights--empty-state.png differ diff --git a/frontend/__snapshots__/scenes-app-saved-insights--list-view.png b/frontend/__snapshots__/scenes-app-saved-insights--list-view.png index 520f2c041c0a8..ae911ef6d0319 100644 Binary files a/frontend/__snapshots__/scenes-app-saved-insights--list-view.png and b/frontend/__snapshots__/scenes-app-saved-insights--list-view.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-templates.png b/frontend/__snapshots__/scenes-app-surveys--survey-templates.png index 069a66dbfbb5b..d888557c99407 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-templates.png and b/frontend/__snapshots__/scenes-app-surveys--survey-templates.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization.png b/frontend/__snapshots__/scenes-other-settings--settings-organization.png index 05fee3ba22511..240b6b4c957e5 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization.png differ diff --git a/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.tsx b/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.tsx index cb120f597a5b1..2d64c4c5d32e0 100644 --- a/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.tsx +++ b/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.tsx @@ -120,9 +120,10 @@ function FeaturePreview({ feature }: { feature: EnrichedEarlyAccessFeature }): J /> { - await submitEarlyAccessFeatureFeedback(feedback) - setFeedback('') + onClick={() => { + void submitEarlyAccessFeatureFeedback(feedback).then(() => { + setFeedback('') + }) }} loading={activeFeedbackFlagKeyLoading} fullWidth diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index 780a3787f7295..e727859e8c19f 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -52,7 +52,14 @@ .LemonButton { min-height: 2.25rem !important; // Reduce minimum height - padding: 0.375rem !important; // Use a custom padding for the navbar only + + > span { + padding: 0.25rem !important; + } + + .LemonButton__content { + font-size: 0.813rem; + } } ul { @@ -64,7 +71,7 @@ } li + li { - margin-top: 1px; + margin-top: 0.25rem; } } } diff --git a/frontend/src/layout/navigation-3000/Navigation.tsx b/frontend/src/layout/navigation-3000/Navigation.tsx index 851a3fe2b86ab..522dfc2bef618 100644 --- a/frontend/src/layout/navigation-3000/Navigation.tsx +++ b/frontend/src/layout/navigation-3000/Navigation.tsx @@ -8,7 +8,7 @@ import './Navigation.scss' import { themeLogic } from './themeLogic' import { navigation3000Logic } from './navigationLogic' import clsx from 'clsx' -import { Scene, SceneConfig } from 'scenes/sceneTypes' +import { SceneConfig } from 'scenes/sceneTypes' import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FEATURE_FLAGS } from 'lib/constants' import { SidePanel } from './sidepanel/SidePanel' @@ -18,7 +18,6 @@ export function Navigation({ sceneConfig, }: { children: ReactNode - scene: Scene | null sceneConfig: SceneConfig | null }): JSX.Element { useMountedLogic(themeLogic) diff --git a/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss b/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss index 75b3dae2df67d..c143a10085e9a 100644 --- a/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss +++ b/frontend/src/layout/navigation-3000/components/Breadcrumbs.scss @@ -26,33 +26,39 @@ font-size: calc(0.75rem + 0.0625rem * var(--breadcrumbs-compaction-rate)); line-height: 1rem; font-weight: 600; - user-select: none; pointer-events: auto; } -.Breadcrumbs3000__crumbs { - display: flex; - align-items: center; -} - .Breadcrumbs3000__trail { flex-grow: 1; flex-shrink: 1; - overflow-x: auto; + min-width: 0; +} + +.Breadcrumbs3000__crumbs { + height: 1rem; + margin-top: 0.25rem; + display: flex; + align-items: center; + overflow: visible; } .Breadcrumbs3000__here { + visibility: var(--breadcrumbs-title-large-visibility); position: relative; line-height: 1.2; - margin: calc(0.25rem * (1 - var(--breadcrumbs-compaction-rate))) 0 0; + margin: 0; + padding: calc(0.5rem * (1 - var(--breadcrumbs-compaction-rate))) 0 0; font-size: 1rem; font-weight: 700; overflow: hidden; - height: calc(1em * 1.2 * (1 - var(--breadcrumbs-compaction-rate))); + height: calc(1.2em * (1 - var(--breadcrumbs-compaction-rate))); + box-sizing: content-box; - > span { + > * { position: absolute; - bottom: 0; + bottom: 0.25rem; + height: 1.2em; } } @@ -70,9 +76,10 @@ } &.Breadcrumbs3000__breadcrumb--here { + visibility: var(--breadcrumbs-title-small-visibility); cursor: default; - > span { + > * { opacity: 1; transform: translateY(calc(100% * (1 - var(--breadcrumbs-compaction-rate)))); } diff --git a/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx b/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx index 344d36f55f47c..cbbcc6403ae48 100644 --- a/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx +++ b/frontend/src/layout/navigation-3000/components/Breadcrumbs.tsx @@ -1,13 +1,14 @@ -import React, { useEffect, useState } from 'react' +import React, { useLayoutEffect, useState } from 'react' import { useActions, useValues } from 'kea' import { IconArrowDropDown } from 'lib/lemon-ui/icons' import { Link } from 'lib/lemon-ui/Link' import './Breadcrumbs.scss' -import { Breadcrumb as IBreadcrumb } from '~/types' +import { FinalizedBreadcrumb } from '~/types' import clsx from 'clsx' import { Popover } from 'lib/lemon-ui/Popover/Popover' import { breadcrumbsLogic } from '~/layout/navigation/Breadcrumbs/breadcrumbsLogic' import { LemonSkeleton } from '@posthog/lemon-ui' +import { EditableField } from 'lib/components/EditableField/EditableField' const COMPACTION_DISTANCE = 44 @@ -17,27 +18,47 @@ const COMPACTION_DISTANCE = 44 * - The "Quick scene actions" buttons (zero or more buttons on the right) */ export function Breadcrumbs(): JSX.Element | null { - const { breadcrumbs } = useValues(breadcrumbsLogic) + const { breadcrumbs, renameState } = useValues(breadcrumbsLogic) const { setActionsContainer } = useActions(breadcrumbsLogic) const [compactionRate, setCompactionRate] = useState(0) - useEffect(() => { + useLayoutEffect(() => { function handleScroll(): void { const scrollTop = document.getElementsByTagName('main')[0].scrollTop - setCompactionRate(Math.min(scrollTop / COMPACTION_DISTANCE, 1)) + const newCompactionRate = Math.min(scrollTop / COMPACTION_DISTANCE, 1) + setCompactionRate(newCompactionRate) + if ( + renameState && + ((newCompactionRate > 0.5 && compactionRate <= 0.5) || + (newCompactionRate <= 0.5 && compactionRate > 0.5)) + ) { + // Transfer selection from the outgoing input to the incoming one + const [source, target] = newCompactionRate > 0.5 ? ['large', 'small'] : ['small', 'large'] + const sourceEl = document.querySelector(`input[name="item-name-${source}"]`) + const targetEl = document.querySelector(`input[name="item-name-${target}"]`) + if (sourceEl && targetEl) { + targetEl.focus() + targetEl.setSelectionRange(sourceEl.selectionStart || 0, sourceEl.selectionEnd || 0) + } + } } const main = document.getElementsByTagName('main')[0] main.addEventListener('scroll', handleScroll) return () => main.removeEventListener('scroll', handleScroll) - }, []) + }, [compactionRate]) return breadcrumbs.length ? (
@@ -65,14 +86,43 @@ export function Breadcrumbs(): JSX.Element | null { } interface BreadcrumbProps { - breadcrumb: IBreadcrumb + breadcrumb: FinalizedBreadcrumb index: number here?: boolean } function Breadcrumb({ breadcrumb, index, here }: BreadcrumbProps): JSX.Element { + const { renameState } = useValues(breadcrumbsLogic) + const { tentativelyRename, finishRenaming } = useActions(breadcrumbsLogic) const [popoverShown, setPopoverShown] = useState(false) + let nameElement: JSX.Element + if (breadcrumb.name != null && breadcrumb.onRename) { + nameElement = ( + tentativelyRename(breadcrumb.globalKey, newName)} + onSave={(newName) => { + void breadcrumb.onRename?.(newName) + }} + mode={renameState && renameState[0] === breadcrumb.globalKey ? 'edit' : 'view'} + onModeToggle={(newMode) => { + if (newMode === 'edit') { + tentativelyRename(breadcrumb.globalKey, breadcrumb.name as string) + } else { + finishRenaming() + } + setPopoverShown(false) + }} + compactButtons="xsmall" + editingIndication="underlined" + /> + ) + } else { + nameElement = {breadcrumb.name} + } + const Component = breadcrumb.path ? Link : 'div' const breadcrumbContent = ( - {breadcrumb.name} + {nameElement} {breadcrumb.popover && } ) @@ -118,13 +168,39 @@ function Breadcrumb({ breadcrumb, index, here }: BreadcrumbProps): JSX.Element { } interface HereProps { - breadcrumb: IBreadcrumb + breadcrumb: FinalizedBreadcrumb } function Here({ breadcrumb }: HereProps): JSX.Element { + const { renameState } = useValues(breadcrumbsLogic) + const { tentativelyRename, finishRenaming } = useActions(breadcrumbsLogic) + return (

- {breadcrumb.name || } + {breadcrumb.name == null ? ( + + ) : breadcrumb.onRename ? ( + tentativelyRename(breadcrumb.globalKey, newName)} + onSave={(newName) => { + void breadcrumb.onRename?.(newName) + }} + mode={renameState && renameState[0] === breadcrumb.globalKey ? 'edit' : 'view'} + onModeToggle={(newMode) => { + if (newMode === 'edit') { + tentativelyRename(breadcrumb.globalKey, breadcrumb.name as string) + } else { + finishRenaming() + } + }} + compactButtons="xsmall" + editingIndication="underlined" + /> + ) : ( + {breadcrumb.name} + )}

) } diff --git a/frontend/src/layout/navigation-3000/components/NavbarButton.tsx b/frontend/src/layout/navigation-3000/components/NavbarButton.tsx index 72e07508a9ebf..77d8ad33317b6 100644 --- a/frontend/src/layout/navigation-3000/components/NavbarButton.tsx +++ b/frontend/src/layout/navigation-3000/components/NavbarButton.tsx @@ -45,7 +45,7 @@ export const NavbarButton: FunctionComponent = React.forwardR if (tag === 'alpha') { content = ( <> - {content} + {content} ALPHA @@ -54,7 +54,7 @@ export const NavbarButton: FunctionComponent = React.forwardR } else if (tag === 'beta') { content = ( <> - {content} + {content} BETA @@ -75,6 +75,8 @@ export const NavbarButton: FunctionComponent = React.forwardR }} className={clsx('NavbarButton', isUsingNewNav && here && 'NavbarButton--here')} fullWidth + type="secondary" + stealth={true} {...buttonProps} > {content} diff --git a/frontend/src/layout/navigation-3000/components/Sidebar.tsx b/frontend/src/layout/navigation-3000/components/Sidebar.tsx index e2b0ef1adbc7f..e48b2a8a45e08 100644 --- a/frontend/src/layout/navigation-3000/components/Sidebar.tsx +++ b/frontend/src/layout/navigation-3000/components/Sidebar.tsx @@ -6,7 +6,7 @@ import React, { useRef, useState } from 'react' import { navigation3000Logic } from '../navigationLogic' import { KeyboardShortcut } from './KeyboardShortcut' import { SidebarAccordion, pluralizeCategory } from './SidebarAccordion' -import { SidebarCategory, SidebarLogic, SidebarNavbarItem } from '../types' +import { SidebarLogic, SidebarNavbarItem } from '../types' import { Spinner } from 'lib/lemon-ui/Spinner' import { useDebouncedCallback } from 'use-debounce' import { SidebarList } from './SidebarList' @@ -177,7 +177,7 @@ function SidebarContent({ return contents.length !== 1 ? ( <> - {(contents as SidebarCategory[]).map((accordion) => ( + {contents.map((accordion) => ( ))} diff --git a/frontend/src/layout/navigation-3000/components/SidebarList.tsx b/frontend/src/layout/navigation-3000/components/SidebarList.tsx index f6d006c32dc3b..c9c76efa0aa64 100644 --- a/frontend/src/layout/navigation-3000/components/SidebarList.tsx +++ b/frontend/src/layout/navigation-3000/components/SidebarList.tsx @@ -307,7 +307,7 @@ function SidebarListItem({ item, validateName, active, style }: SidebarListItemP navigation3000Logic.actions.focusPreviousItem() e.preventDefault() } else if (e.key === 'Enter') { - save(newName || '').then(() => { + void save(newName || '').then(() => { // In the keyboard nav experience, we need to refocus the item once it's a link again setTimeout(() => ref.current?.focus(), 0) }) @@ -327,7 +327,7 @@ function SidebarListItem({ item, validateName, active, style }: SidebarListItemP }} onBlur={(e) => { if (e.relatedTarget?.ariaLabel === 'Save name') { - save(newName || '') + void save(newName || '') } else { cancel() } diff --git a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx index 13435fc8b1d6d..056a0ccb06cdc 100644 --- a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx +++ b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx @@ -78,7 +78,7 @@ export const featureFlagsSidebarLogic = kea([ items: [ { label: 'Edit', - to: urls.featureFlag(featureFlag.id as number), + to: urls.featureFlag(featureFlag.id), onClick: () => { featureFlagLogic({ id: featureFlag.id as number }).mount() featureFlagLogic({ @@ -106,8 +106,8 @@ export const featureFlagsSidebarLogic = kea([ }, { label: 'Copy flag key', - onClick: async () => { - await copyToClipboard(featureFlag.key, 'feature flag key') + onClick: () => { + void copyToClipboard(featureFlag.key, 'feature flag key') }, }, { @@ -128,7 +128,7 @@ export const featureFlagsSidebarLogic = kea([ { label: 'Delete feature flag', onClick: () => { - deleteWithUndo({ + void deleteWithUndo({ endpoint: `projects/${currentTeamId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: actions.loadFeatureFlags, diff --git a/frontend/src/layout/navigation-3000/sidebars/insights.ts b/frontend/src/layout/navigation-3000/sidebars/insights.ts index d89d7e310a0a1..efc9766d0a7c6 100644 --- a/frontend/src/layout/navigation-3000/sidebars/insights.ts +++ b/frontend/src/layout/navigation-3000/sidebars/insights.ts @@ -86,7 +86,7 @@ export const insightsSidebarLogic = kea([ }, { onClick: () => { - deleteWithUndo({ + void deleteWithUndo({ object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: actions.loadInsights, @@ -116,7 +116,7 @@ export const insightsSidebarLogic = kea([ for (let i = startIndex; i < startIndex + INSIGHTS_PER_PAGE; i++) { cache.requestedInsights[i] = true } - await savedInsightsLogic.actions.setSavedInsightsFilters( + await savedInsightsLogic.asyncActions.setSavedInsightsFilters( { page: Math.floor(startIndex / INSIGHTS_PER_PAGE) + 1 }, true, false diff --git a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx index d011a0a6561de..1f85b5bf928de 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx @@ -98,6 +98,8 @@ export function SidePanel(): JSX.Element | null { } data-attr={`sidepanel-tab-${tab}`} active={activeTab === tab} + type="secondary" + stealth={true} > {label} diff --git a/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss b/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss index 625535c49575e..6141582fea852 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss +++ b/frontend/src/layout/navigation/Breadcrumbs/Breadcrumbs.scss @@ -22,7 +22,7 @@ &--actionable { cursor: pointer; - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts index dd49842bc5fa7..ce74771427690 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts +++ b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.test.ts @@ -25,8 +25,8 @@ describe('breadcrumbsLogic', () => { // test with .delay because subscriptions happen async router.actions.push(urls.savedInsights()) - await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Insights • PostHog' }) - expect(global.document.title).toEqual('Insights • PostHog') + await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Product analytics • PostHog' }) + expect(global.document.title).toEqual('Product analytics • PostHog') router.actions.push(urls.dashboards()) await expectLogic(logic).delay(1).toMatchValues({ documentTitle: 'Dashboards • PostHog' }) diff --git a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx index 1d1739651cb09..d15aefbb51a12 100644 --- a/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx +++ b/frontend/src/layout/navigation/Breadcrumbs/breadcrumbsLogic.tsx @@ -1,4 +1,4 @@ -import { actions, connect, kea, path, props, reducers, selectors } from 'kea' +import { actions, connect, kea, listeners, path, props, reducers, selectors } from 'kea' import { organizationLogic } from 'scenes/organizationLogic' import { teamLogic } from 'scenes/teamLogic' import './Breadcrumbs.scss' @@ -11,7 +11,7 @@ import { Lettermark } from 'lib/lemon-ui/Lettermark' import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture' import { ProjectSwitcherOverlay } from '~/layout/navigation/ProjectSwitcher' import { OrganizationSwitcherOverlay } from '~/layout/navigation/OrganizationSwitcher' -import { Breadcrumb } from '~/types' +import { Breadcrumb, FinalizedBreadcrumb } from '~/types' import { subscriptions } from 'kea-subscriptions' export const breadcrumbsLogic = kea([ @@ -37,6 +37,11 @@ export const breadcrumbsLogic = kea([ })), actions({ setActionsContainer: (element: HTMLElement | null) => ({ element }), + tentativelyRename: (breadcrumbGlobalKey: string, tentativeName: string) => ({ + breadcrumbGlobalKey, + tentativeName, + }), + finishRenaming: true, }), reducers({ actionsContainer: [ @@ -45,7 +50,17 @@ export const breadcrumbsLogic = kea([ setActionsContainer: (_, { element }) => element, }, ], + renameState: [ + null as [breadcrumbGlobalKey: string, tentativeName: string] | null, + { + tentativelyRename: (_, { breadcrumbGlobalKey, tentativeName }) => [breadcrumbGlobalKey, tentativeName], + finishRenaming: () => null, + }, + ], }), + listeners(({ actions }) => ({ + [sceneLogic.actionTypes.loadScene]: () => actions.finishRenaming(), // Cancel renaming on navigation away + })), selectors(() => ({ sceneBreadcrumbs: [ (s) => [ @@ -94,6 +109,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'me', name: user.first_name, symbol: , }) @@ -104,6 +120,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'instance', name: stripHTTP(preflight.site_url), symbol: , }) @@ -114,6 +131,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'organization', name: currentOrganization.name, symbol: , popover: @@ -131,6 +149,7 @@ export const breadcrumbsLogic = kea([ return breadcrumbs } breadcrumbs.push({ + key: 'project', name: currentTeam.name, popover: { overlay: , @@ -144,8 +163,24 @@ export const breadcrumbsLogic = kea([ ], breadcrumbs: [ (s) => [s.appBreadcrumbs, s.sceneBreadcrumbs], - (appBreadcrumbs, sceneBreadcrumbs) => { - return [...appBreadcrumbs, ...sceneBreadcrumbs] + (appBreadcrumbs, sceneBreadcrumbs): FinalizedBreadcrumb[] => { + const breadcrumbs = Array(appBreadcrumbs.length + sceneBreadcrumbs.length) + const globalPathSoFar: string[] = [] + for (let i = 0; i < appBreadcrumbs.length; i++) { + globalPathSoFar.push(String(appBreadcrumbs[i].key)) + breadcrumbs[i] = { + ...appBreadcrumbs[i], + globalKey: globalPathSoFar.join('.'), + } + } + for (let i = 0; i < sceneBreadcrumbs.length; i++) { + globalPathSoFar.push(String(sceneBreadcrumbs[i].key)) + breadcrumbs[i + appBreadcrumbs.length] = { + ...sceneBreadcrumbs[i], + globalKey: globalPathSoFar.join('.'), + } + } + return breadcrumbs }, ], firstBreadcrumb: [(s) => [s.breadcrumbs], (breadcrumbs) => breadcrumbs[0]], diff --git a/frontend/src/layout/navigation/Navigation.tsx b/frontend/src/layout/navigation/Navigation.tsx index 57b29956bdde1..18ff8d9fed01f 100644 --- a/frontend/src/layout/navigation/Navigation.tsx +++ b/frontend/src/layout/navigation/Navigation.tsx @@ -1,6 +1,6 @@ import clsx from 'clsx' import { BillingAlertsV2 } from 'lib/components/BillingAlertsV2' -import { Scene, SceneConfig } from 'scenes/sceneTypes' +import { SceneConfig } from 'scenes/sceneTypes' import { Breadcrumbs } from './Breadcrumbs/Breadcrumbs' import { ProjectNotice } from './ProjectNotice' import { SideBar } from './SideBar/SideBar' @@ -9,16 +9,14 @@ import { ReactNode } from 'react' export function Navigation({ children, - scene, sceneConfig, }: { children: ReactNode - scene: Scene | null sceneConfig: SceneConfig | null }): JSX.Element { return (
- {scene !== Scene.Ingestion && } +
{ - updateCurrentTeam(altTeamForIngestion?.id, urls.ingestion()) + updateCurrentTeam(altTeamForIngestion?.id, urls.products()) }} data-attr="demo-project-alt-team-ingestion_link" > - ingestion wizard + onboarding wizard {' '} to get started with your own data. @@ -60,8 +61,11 @@ export function ProjectNotice(): JSX.Element | null { message: ( <> This project has no events yet. Go to the{' '} - - ingestion wizard + + onboarding wizard {' '} or grab your project API key/HTML snippet from{' '} @@ -71,7 +75,7 @@ export function ProjectNotice(): JSX.Element | null { ), action: { - to: '/ingestion', + to: urls.onboarding(ProductKey.PRODUCT_ANALYTICS), 'data-attr': 'demo-warning-cta', icon: , children: 'Go to wizard', diff --git a/frontend/src/layout/navigation/SideBar/SideBar.tsx b/frontend/src/layout/navigation/SideBar/SideBar.tsx index b946d96f6de83..b5f8c1e3f8ef3 100644 --- a/frontend/src/layout/navigation/SideBar/SideBar.tsx +++ b/frontend/src/layout/navigation/SideBar/SideBar.tsx @@ -201,7 +201,7 @@ function Pages(): JSX.Element { } identifier={Scene.EarlyAccessFeatures} - title={'Early Access Management'} + title={'Early access features'} to={urls.earlyAccessFeatures()} />
Data
@@ -210,7 +210,7 @@ function Pages(): JSX.Element { icon={} identifier={Scene.Events} to={urls.events()} - title={'Event Explorer'} + title={'Event explorer'} /> } @@ -230,7 +230,7 @@ function Pages(): JSX.Element { } identifier={Scene.DataWarehouse} - title={'Data Warehouse'} + title={'Data warehouse'} to={urls.dataWarehouse()} highlight="beta" /> @@ -240,7 +240,7 @@ function Pages(): JSX.Element {
Apps
{canViewPlugins(currentOrganization) && ( } identifier={Scene.Apps} to={urls.projectApps()} diff --git a/frontend/src/layout/navigation/TopBar/TopBar.scss b/frontend/src/layout/navigation/TopBar/TopBar.scss index 8e1732a7d2d34..79815df7875f5 100644 --- a/frontend/src/layout/navigation/TopBar/TopBar.scss +++ b/frontend/src/layout/navigation/TopBar/TopBar.scss @@ -130,7 +130,7 @@ } .SitePopover__side-link { - color: var(--primary); + color: var(--primary-3000); margin-left: 0.5rem; font-weight: 600; font-size: 0.8125rem; diff --git a/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts b/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts index b5f3e96fd31c8..ecef00b2cdfb0 100644 --- a/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts +++ b/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts @@ -31,7 +31,7 @@ describe('announcementLogic', () => { }) it('hides announcements during the ingestion phase', async () => { - router.actions.push(urls.ingestion()) + router.actions.push(urls.products()) await expectLogic(logic).toMatchValues({ cloudAnnouncement: DEFAULT_CLOUD_ANNOUNCEMENT, shownAnnouncementType: null, diff --git a/frontend/src/layout/navigation/TopBar/announcementLogic.ts b/frontend/src/layout/navigation/TopBar/announcementLogic.ts index 4a947e95107c7..60e0b5915b2f0 100644 --- a/frontend/src/layout/navigation/TopBar/announcementLogic.ts +++ b/frontend/src/layout/navigation/TopBar/announcementLogic.ts @@ -3,7 +3,6 @@ import { router } from 'kea-router' import { FEATURE_FLAGS, OrganizationMembershipLevel } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' -import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' import { navigationLogic } from '../navigationLogic' import posthog from 'posthog-js' @@ -87,7 +86,8 @@ export const announcementLogic = kea([ (closable && (closed || (relevantAnnouncementType && persistedClosedAnnouncements[relevantAnnouncementType]))) || // hide if already closed - pathname == urls.ingestion() // hide during the ingestion phase + pathname.includes('/onboarding') || + pathname.includes('/products') // hide during the onboarding phase ) { return null } diff --git a/frontend/src/layout/navigation/TopBar/notificationsLogic.tsx b/frontend/src/layout/navigation/TopBar/notificationsLogic.tsx index cc11a0c436539..4b25f0afdc43f 100644 --- a/frontend/src/layout/navigation/TopBar/notificationsLogic.tsx +++ b/frontend/src/layout/navigation/TopBar/notificationsLogic.tsx @@ -55,9 +55,9 @@ export const notificationsLogic = kea([ clearTimeout(values.pollTimeout) try { - const response = (await api.get( + const response = await api.get( `api/projects/${teamLogic.values.currentTeamId}/activity_log/important_changes` - )) as ChangesResponse + ) // we can't rely on automatic success action here because we swallow errors so always succeed actions.clearErrorCount() return response @@ -115,14 +115,17 @@ export const notificationsLogic = kea([ a.created_at.isAfter(b.created_at) ? a : b ).created_at actions.setMarkReadTimeout( - window.setTimeout(async () => { - await api.create( - `api/projects/${teamLogic.values.currentTeamId}/activity_log/bookmark_activity_notification`, - { - bookmark: bookmarkDate.toISOString(), - } - ) - actions.markAllAsRead(bookmarkDate.toISOString()) + window.setTimeout(() => { + void api + .create( + `api/projects/${teamLogic.values.currentTeamId}/activity_log/bookmark_activity_notification`, + { + bookmark: bookmarkDate.toISOString(), + } + ) + .then(() => { + actions.markAllAsRead(bookmarkDate.toISOString()) + }) }, MARK_READ_TIMEOUT) ) } diff --git a/frontend/src/lib/animations/animations.ts b/frontend/src/lib/animations/animations.ts index 7d30b6932498c..40551f4979cb1 100644 --- a/frontend/src/lib/animations/animations.ts +++ b/frontend/src/lib/animations/animations.ts @@ -33,7 +33,7 @@ async function fetchJson(url: string): Promise> { export async function getAnimationSource(animation: AnimationType): Promise> { if (!animationCache[animation]) { - if (!fetchCache[animation]) { + if (!(animation in fetchCache)) { fetchCache[animation] = fetchJson(animations[animation].url) } animationCache[animation] = await fetchCache[animation] diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index d1de3a313acb2..44eedef5bbb95 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -65,7 +65,7 @@ import { EVENT_PROPERTY_DEFINITIONS_PER_PAGE } from 'scenes/data-management/prop import { ActivityLogItem, ActivityScope } from 'lib/components/ActivityLog/humanizeActivity' import { ActivityLogProps } from 'lib/components/ActivityLog/ActivityLog' import { SavedSessionRecordingPlaylistsResult } from 'scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic' -import { QuerySchema } from '~/queries/schema' +import { QuerySchema, QueryStatus } from '~/queries/schema' import { decompressSync, strFromU8 } from 'fflate' import { getCurrentExporterData } from '~/exporter/exporterViewLogic' import { encodeParams } from 'kea-router' @@ -542,6 +542,10 @@ class ApiRequest { return this.projectsDetail(teamId).addPathComponent('query') } + public queryStatus(queryId: string, teamId?: TeamType['id']): ApiRequest { + return this.query(teamId).addPathComponent(queryId) + } + // Notebooks public notebooks(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('notebooks') @@ -1447,7 +1451,7 @@ const api = { }, async update( notebookId: NotebookType['short_id'], - data: Pick + data: Partial> ): Promise { return await new ApiRequest().notebook(notebookId).update({ data }) }, @@ -1722,6 +1726,12 @@ const api = { }, }, + queryStatus: { + async get(queryId: string): Promise { + return await new ApiRequest().queryStatus(queryId).get() + }, + }, + queryURL: (): string => { return new ApiRequest().query().assembleFullUrl(true) }, @@ -1730,7 +1740,8 @@ const api = { query: T, options?: ApiMethodOptions, queryId?: string, - refresh?: boolean + refresh?: boolean, + async?: boolean ): Promise< T extends { [response: string]: any } ? T['response'] extends infer P | undefined @@ -1740,7 +1751,7 @@ const api = { > { return await new ApiRequest() .query() - .create({ ...options, data: { query, client_query_id: queryId, refresh: refresh } }) + .create({ ...options, data: { query, client_query_id: queryId, refresh: refresh, async } }) }, /** Fetch data from specified URL. The result already is JSON-parsed. */ diff --git a/frontend/src/lib/components/ActivationSidebar/activationLogic.ts b/frontend/src/lib/components/ActivationSidebar/activationLogic.ts index b2579b889df9e..16cf859f6aefc 100644 --- a/frontend/src/lib/components/ActivationSidebar/activationLogic.ts +++ b/frontend/src/lib/components/ActivationSidebar/activationLogic.ts @@ -8,7 +8,7 @@ import { membersLogic } from 'scenes/organization/membersLogic' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { teamLogic } from 'scenes/teamLogic' import { navigationLogic } from '~/layout/navigation/navigationLogic' -import { EventDefinitionType, TeamBasicType } from '~/types' +import { EventDefinitionType, ProductKey, TeamBasicType } from '~/types' import type { activationLogicType } from './activationLogicType' import { urls } from 'scenes/urls' import { savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' @@ -327,7 +327,7 @@ export const activationLogic = kea([ runTask: async ({ id }) => { switch (id) { case ActivationTasks.IngestFirstEvent: - router.actions.push(urls.ingestion()) + router.actions.push(urls.onboarding(ProductKey.PRODUCT_ANALYTICS)) break case ActivationTasks.InviteTeamMember: actions.showInviteModal() diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx index 2287859a196f4..fcc633bb7dfe0 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx @@ -104,7 +104,7 @@ export const activityLogLogic = kea([ })), listeners(({ actions }) => ({ setPage: async (_, breakpoint) => { - await breakpoint() + breakpoint() actions.fetchActivity() }, })), diff --git a/frontend/src/lib/components/Animation/Animation.tsx b/frontend/src/lib/components/Animation/Animation.tsx index ddda67a0060e7..6852c4f57ce29 100644 --- a/frontend/src/lib/components/Animation/Animation.tsx +++ b/frontend/src/lib/components/Animation/Animation.tsx @@ -39,7 +39,7 @@ export function Animation({ // Actually fetch the animation. Uses a cache to avoid multiple requests for the same file. // Show a fallback spinner if failed to fetch. useEffect(() => { - let unmounted = false + let unmounted = false // Poor person's abort controller async function loadAnimation(): Promise { try { const source = await getAnimationSource(type) @@ -48,7 +48,7 @@ export function Animation({ !unmounted && setShowFallbackSpinner(true) } } - loadAnimation() + void loadAnimation() return () => { unmounted = true } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss index 2dbb9042ea037..b0f5f5a471e2c 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss @@ -12,8 +12,8 @@ flex-direction: column; &--highlighted { - border-color: var(--primary); - outline: 1px solid var(--primary); + border-color: var(--primary-3000); + outline: 1px solid var(--primary-3000); } .ant-alert { @@ -160,7 +160,7 @@ width: 1rem; border-radius: 0.25rem; margin-right: 0.25rem; - background: var(--primary); + background: var(--primary-3000); color: var(--bg-light); line-height: 1rem; font-size: 0.625rem; diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx index 93a1c595298b1..a66f87cad040a 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx @@ -144,7 +144,7 @@ export interface InsightCardProps extends Resizeable, React.HTMLAttributes void removeFromDashboard?: () => void - deleteWithUndo?: () => void + deleteWithUndo?: () => Promise refresh?: () => void rename?: () => void duplicate?: () => void @@ -194,7 +194,6 @@ export function FilterBasedCardContent({
{ diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx index 129a365f6d84c..2b85413419f02 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx @@ -107,7 +107,7 @@ export function InsightMeta({ samplingNotice={ insight.filters.sampling_factor && insight.filters.sampling_factor < 1 ? ( - + ) : null } diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx index 746316e302c65..9e6dc2e07c71c 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx @@ -32,7 +32,6 @@ interface TextCardBodyProps extends Pick, ' export function TextContent({ text, closeDetails, className }: TextCardBodyProps): JSX.Element { return ( - // eslint-disable-next-line react/forbid-dom-props
closeDetails?.()}> {text}
diff --git a/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx b/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx index 180ac9e794838..9c9ca574f8194 100644 --- a/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx +++ b/frontend/src/lib/components/CodeSnippet/CodeSnippet.tsx @@ -132,8 +132,10 @@ export function CodeSnippet({ } - onClick={async () => { - text && (await copyToClipboard(text, thing)) + onClick={() => { + if (text) { + void copyToClipboard(text, thing) + } }} size={compact ? 'small' : 'medium'} /> diff --git a/frontend/src/lib/components/CommandBar/searchBarLogic.ts b/frontend/src/lib/components/CommandBar/searchBarLogic.ts index bf609bc9a2c94..91c649fc9eb89 100644 --- a/frontend/src/lib/components/CommandBar/searchBarLogic.ts +++ b/frontend/src/lib/components/CommandBar/searchBarLogic.ts @@ -93,7 +93,6 @@ export const searchBarLogic = kea([ }), listeners(({ values, actions }) => ({ openResult: ({ index }) => { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const result = values.searchResults![index] router.actions.push(urlForResult(result)) actions.hideCommandBar() diff --git a/frontend/src/lib/components/CommandPalette/CommandPalette.scss b/frontend/src/lib/components/CommandPalette/CommandPalette.scss index 0972ea918e81e..e2622169149a0 100644 --- a/frontend/src/lib/components/CommandPalette/CommandPalette.scss +++ b/frontend/src/lib/components/CommandPalette/CommandPalette.scss @@ -109,7 +109,7 @@ cursor: pointer; &::after { - background: var(--primary); + background: var(--primary-3000); } } diff --git a/frontend/src/lib/components/CommandPalette/CommandPalette.tsx b/frontend/src/lib/components/CommandPalette/CommandPalette.tsx index ce07fb425829c..e32c98f3175de 100644 --- a/frontend/src/lib/components/CommandPalette/CommandPalette.tsx +++ b/frontend/src/lib/components/CommandPalette/CommandPalette.tsx @@ -36,7 +36,7 @@ function _CommandPalette(): JSX.Element | null { useEventListener('keydown', (event) => { if (isSqueak && event.key === 'Enter') { - squeakAudio?.play() + void squeakAudio?.play() } else if (event.key === 'Escape') { event.preventDefault() // Return to previous flow diff --git a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx index bef1adb4210d1..b6590a40ed8eb 100644 --- a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx +++ b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx @@ -10,7 +10,7 @@ import { loaders } from 'kea-loaders' import type { debugCHQueriesLogicType } from './DebugCHQueriesType' import { IconRefresh } from 'lib/lemon-ui/icons' -export async function debugCHQueries(): Promise { +export function openCHQueriesDebugModal(): void { LemonDialog.open({ title: 'ClickHouse queries recently executed for this user', content: , diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index 6e83d77772135..1555276502b3a 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -11,7 +11,7 @@ import { userLogic } from 'scenes/userLogic' import { personalAPIKeysLogic } from '../../../scenes/settings/user/personalAPIKeysLogic' import { teamLogic } from 'scenes/teamLogic' import posthog from 'posthog-js' -import { debugCHQueries } from './DebugCHQueries' +import { openCHQueriesDebugModal } from './DebugCHQueries' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { urls } from 'scenes/urls' import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' @@ -576,9 +576,7 @@ export const commandPaletteLogic = kea([ ? { icon: IconTools, display: 'Debug ClickHouse Queries', - executor: () => { - debugCHQueries() - }, + executor: () => openCHQueriesDebugModal(), } : [], } diff --git a/frontend/src/lib/components/CopyToClipboard.tsx b/frontend/src/lib/components/CopyToClipboard.tsx index e1525cee04b23..4ffd1456cef3f 100644 --- a/frontend/src/lib/components/CopyToClipboard.tsx +++ b/frontend/src/lib/components/CopyToClipboard.tsx @@ -4,9 +4,7 @@ import { Tooltip } from 'lib/lemon-ui/Tooltip' import { IconCopy } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' -interface InlineProps extends HTMLProps { - children?: JSX.Element | string - explicitValue?: string +interface InlinePropsBase extends HTMLProps { description?: string /** Makes text selectable instead of copying on click anywhere */ selectable?: boolean @@ -16,6 +14,15 @@ interface InlineProps extends HTMLProps { iconPosition?: 'end' | 'start' style?: React.CSSProperties } +interface InlinePropsWithStringInside extends InlinePropsBase { + children: string + explicitValue?: string +} +interface InlinePropsWithJSXInside extends InlinePropsBase { + children?: JSX.Element + explicitValue: string +} +type InlineProps = InlinePropsWithStringInside | InlinePropsWithJSXInside export function CopyToClipboardInline({ children, @@ -29,8 +36,7 @@ export function CopyToClipboardInline({ style, ...props }: InlineProps): JSX.Element { - const copy = async (): Promise => - await copyToClipboard(explicitValue ?? (children ? children.toString() : ''), description) + const copy = async (): Promise => await copyToClipboard((explicitValue ?? children) as string, description) const content = ( - {children} + {children && {children}} } diff --git a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx index d4020f53d7292..c1371a66414fe 100644 --- a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx +++ b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.tsx @@ -83,7 +83,7 @@ export function RollingDateRangeFilter({ data-attr="rolling-date-range-date-options-selector" id="rolling-date-range-date-options-selector" value={dateOption} - onChange={(newValue): void => setDateOption(newValue as string)} + onChange={(newValue): void => setDateOption(newValue)} onClick={(e): void => { e.stopPropagation() toggleDateOptionsSelector() diff --git a/frontend/src/lib/components/DateFilter/dateFilterLogic.ts b/frontend/src/lib/components/DateFilter/dateFilterLogic.ts index 4e8e6bf25b60b..afe18d4e7b7e3 100644 --- a/frontend/src/lib/components/DateFilter/dateFilterLogic.ts +++ b/frontend/src/lib/components/DateFilter/dateFilterLogic.ts @@ -43,7 +43,7 @@ export const dateFilterLogic = kea([ }, ], rangeDateFrom: [ - (props.dateFrom && (dayjs.isDayjs(props.dateFrom) || isDate.test(props.dateFrom as string)) + (props.dateFrom && (dayjs.isDayjs(props.dateFrom) || isDate.test(props.dateFrom)) ? dayjs(props.dateFrom) : null) as Dayjs | null, { @@ -52,7 +52,7 @@ export const dateFilterLogic = kea([ }, ], rangeDateTo: [ - (props.dateTo && (dayjs.isDayjs(props.dateTo) || isDate.test(props.dateTo as string)) + (props.dateTo && (dayjs.isDayjs(props.dateTo) || isDate.test(props.dateTo)) ? dayjs(props.dateTo) : dayjs()) as Dayjs | null, { diff --git a/frontend/src/lib/components/DebugNotice.tsx b/frontend/src/lib/components/DebugNotice.tsx index 11d70ba34054d..4ceb631d56d77 100644 --- a/frontend/src/lib/components/DebugNotice.tsx +++ b/frontend/src/lib/components/DebugNotice.tsx @@ -29,7 +29,7 @@ export function DebugNotice(): JSX.Element | null { return (
setNoticeHidden(true)}> -
+
DEBUG mode { it('make local state dirty', async () => { await expectLogic(logic, async () => { - await logic.actions.setDefinition(mockEventDefinitions[0]) - await logic.actions.setPopoverState(DefinitionPopoverState.Edit) + logic.actions.setDefinition(mockEventDefinitions[0]) + logic.actions.setPopoverState(DefinitionPopoverState.Edit) }) .toDispatchActions(['setDefinition', 'setPopoverState']) .toMatchValues({ @@ -87,9 +87,9 @@ describe('definitionPopoverLogic', () => { it('cancel', async () => { await expectLogic(logic, async () => { - await logic.actions.setDefinition(mockEventDefinitions[0]) - await logic.actions.setPopoverState(DefinitionPopoverState.Edit) - await logic.actions.setLocalDefinition({ description: 'new description' }) + logic.actions.setDefinition(mockEventDefinitions[0]) + logic.actions.setPopoverState(DefinitionPopoverState.Edit) + logic.actions.setLocalDefinition({ description: 'new description' }) }) .toDispatchActions(['setLocalDefinition']) .toMatchValues({ @@ -159,7 +159,7 @@ describe('definitionPopoverLogic', () => { }, { type: TaxonomicFilterGroupType.Cohorts, - definition: mockCohort as CohortType, + definition: mockCohort, url: `api/projects/@current/cohorts/${mockCohort.id}`, dispatchActions: [cohortsModel, ['updateCohort']], }, @@ -178,10 +178,10 @@ describe('definitionPopoverLogic', () => { logic.mount() const expectChain = expectLogic(logic, async () => { - await logic.actions.setDefinition(group.definition) - await logic.actions.setPopoverState(DefinitionPopoverState.Edit) - await logic.actions.setLocalDefinition({ description: 'new and improved description' }) - await logic.actions.handleSave({}) + logic.actions.setDefinition(group.definition) + logic.actions.setPopoverState(DefinitionPopoverState.Edit) + logic.actions.setLocalDefinition({ description: 'new and improved description' }) + logic.actions.handleSave({}) }).toDispatchActions(['setDefinitionSuccess', 'setPopoverState', 'handleSave']) if (group.dispatchActions.length > 0) { @@ -202,9 +202,9 @@ describe('definitionPopoverLogic', () => { it('add tags', async () => { await expectLogic(logic, async () => { - await logic.actions.setDefinition(mockEventDefinitions[0]) - await logic.actions.setPopoverState(DefinitionPopoverState.Edit) - await logic.actions.setLocalDefinition({ tags: ['ohhello', 'ohwow'] }) + logic.actions.setDefinition(mockEventDefinitions[0]) + logic.actions.setPopoverState(DefinitionPopoverState.Edit) + logic.actions.setLocalDefinition({ tags: ['ohhello', 'ohwow'] }) }) .toDispatchActions(['setDefinitionSuccess', 'setLocalDefinition']) .toMatchValues({ @@ -221,8 +221,8 @@ describe('definitionPopoverLogic', () => { logic.mount() await expectLogic(logic, async () => { - await logic.actions.setDefinition(mockEventDefinitions[0]) - await logic.actions.setDefinition(mockEventDefinitions[1]) + logic.actions.setDefinition(mockEventDefinitions[0]) + logic.actions.setDefinition(mockEventDefinitions[1]) }) .toDispatchActions(['setDefinitionSuccess']) .toMatchValues({ diff --git a/frontend/src/lib/components/EditableField/EditableField.scss b/frontend/src/lib/components/EditableField/EditableField.scss index c58f63e9d2114..3bd0610a9ff90 100644 --- a/frontend/src/lib/components/EditableField/EditableField.scss +++ b/frontend/src/lib/components/EditableField/EditableField.scss @@ -4,7 +4,7 @@ max-width: 100%; &:not(.EditableField--multiline) { - line-height: 2rem; + line-height: 1.15em; } i { @@ -23,14 +23,13 @@ align-items: center; width: fit-content; max-width: calc(100% + 0.5rem); - min-height: 2rem; padding: 0.25rem; // Some padding to give the focus outline more breathing space margin: -0.25rem; white-space: pre-wrap; overflow: auto; } - &--editing .EditableField__highlight { + &.EditableField--editing .EditableField__highlight { flex-grow: 1; align-items: flex-end; width: auto; @@ -39,6 +38,23 @@ border-radius: var(--radius); } + &.EditableField--underlined { + .EditableField__highlight { + padding: 0; + margin: 0; + } + + &.EditableField--editing .EditableField__highlight { + outline: none; + + input { + text-decoration: underline; + text-decoration-color: var(--muted); + text-underline-offset: 0.5em; + } + } + } + .EditableField__autosize { align-self: center; min-width: 0; diff --git a/frontend/src/lib/components/EditableField/EditableField.tsx b/frontend/src/lib/components/EditableField/EditableField.tsx index 40e61b3e57d08..2070ea7f1cbe3 100644 --- a/frontend/src/lib/components/EditableField/EditableField.tsx +++ b/frontend/src/lib/components/EditableField/EditableField.tsx @@ -22,11 +22,14 @@ export interface EditableFieldProps { multiline?: boolean /** Whether to render the content as Markdown in view mode. */ markdown?: boolean - compactButtons?: boolean + compactButtons?: boolean | 'xsmall' // The 'xsmall' is somewhat hacky, but necessary for 3000 breadcrumbs /** Whether this field should be gated behind a "paywall". */ paywall?: boolean /** Controlled mode. */ mode?: 'view' | 'edit' + onModeToggle?: (newMode: 'view' | 'edit') => void + /** @default 'outlined' */ + editingIndication?: 'outlined' | 'underlined' className?: string style?: React.CSSProperties 'data-attr'?: string @@ -53,6 +56,8 @@ export function EditableField({ compactButtons = false, paywall = false, mode, + onModeToggle, + editingIndication = 'outlined', className, style, 'data-attr': dataAttr, @@ -60,13 +65,16 @@ export function EditableField({ notice, }: EditableFieldProps): JSX.Element { const [localIsEditing, setLocalIsEditing] = useState(false) - const [tentativeValue, setTentativeValue] = useState(value) + const [localTentativeValue, setLocalTentativeValue] = useState(value) useEffect(() => { - setTentativeValue(value) + setLocalTentativeValue(value) }, [value]) + useEffect(() => { + setLocalIsEditing(mode === 'edit') + }, [mode]) - const isSaveable = !minLength || tentativeValue.length >= minLength + const isSaveable = !minLength || localTentativeValue.length >= minLength const mouseDownOnCancelButton = (e: React.MouseEvent): void => { // if saveOnBlur is set the onBlur handler of the input fires before the onClick event of the button @@ -76,12 +84,14 @@ export function EditableField({ const cancel = (): void => { setLocalIsEditing(false) - setTentativeValue(value) + setLocalTentativeValue(value) + onModeToggle?.('view') } const save = (): void => { - onSave?.(tentativeValue) + onSave?.(localTentativeValue) setLocalIsEditing(false) + onModeToggle?.('view') } const isEditing = !paywall && (mode === 'edit' || localIsEditing) @@ -107,6 +117,7 @@ export function EditableField({ 'EditableField', multiline && 'EditableField--multiline', isEditing && 'EditableField--editing', + editingIndication === 'underlined' && 'EditableField--underlined', className )} data-attr={dataAttr} @@ -127,12 +138,12 @@ export function EditableField({ {multiline ? ( { onChange?.(e.target.value) - setTentativeValue(e.target.value) + setLocalTentativeValue(e.target.value) }} - onBlur={saveOnBlur ? (tentativeValue !== value ? save : cancel) : undefined} + onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} onKeyDown={handleKeyDown} placeholder={placeholder} minLength={minLength} @@ -142,12 +153,12 @@ export function EditableField({ ) : ( { onChange?.(e.target.value) - setTentativeValue(e.target.value) + setLocalTentativeValue(e.target.value) }} - onBlur={saveOnBlur ? (tentativeValue !== value ? save : cancel) : undefined} + onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} onKeyDown={handleKeyDown} placeholder={placeholder} minLength={minLength} @@ -155,7 +166,7 @@ export function EditableField({ autoFocus={autoFocus} /> )} - {!mode && ( + {(!mode || !!onModeToggle) && (
{markdown && ( @@ -164,7 +175,7 @@ export function EditableField({ )} ) : ( <> - {tentativeValue && markdown ? ( - {tentativeValue} + {localTentativeValue && markdown ? ( + {localTentativeValue} ) : ( - tentativeValue || {placeholder} + localTentativeValue || {placeholder} )} - {!mode && ( + {(!mode || !!onModeToggle) && (
} - size={compactButtons ? 'small' : undefined} - onClick={() => setLocalIsEditing(true)} + size={ + typeof compactButtons === 'string' + ? compactButtons + : compactButtons + ? 'small' + : undefined + } + onClick={() => { + setLocalIsEditing(true) + onModeToggle?.('edit') + }} data-attr={`edit-prop-${name}`} disabled={paywall} noPadding diff --git a/frontend/src/lib/components/ExportButton/ExportButton.tsx b/frontend/src/lib/components/ExportButton/ExportButton.tsx index b4d6e7e052452..2468fc60c1992 100644 --- a/frontend/src/lib/components/ExportButton/ExportButton.tsx +++ b/frontend/src/lib/components/ExportButton/ExportButton.tsx @@ -51,7 +51,7 @@ export function ExportButton({ items, ...buttonProps }: ExportButtonProps): JSX. key={i} fullWidth status="stealth" - onClick={() => triggerExport(triggerExportProps)} + onClick={() => void triggerExport(triggerExportProps)} data-attr={`export-button-${exportFormatExtension}`} data-ph-capture-attribute-export-target={target} data-ph-capture-attribute-export-body={ diff --git a/frontend/src/lib/components/ExportButton/exporter.tsx b/frontend/src/lib/components/ExportButton/exporter.tsx index 84ebf2a5eecb9..3ff134f766ecb 100644 --- a/frontend/src/lib/components/ExportButton/exporter.tsx +++ b/frontend/src/lib/components/ExportButton/exporter.tsx @@ -48,8 +48,8 @@ export async function triggerExport(asset: TriggerExportProps): Promise { lemonToast.error('Export failed!') } } else { - // eslint-disable-next-line no-async-promise-executor - const poller = new Promise(async (resolve, reject) => { + // eslint-disable-next-line no-async-promise-executor,@typescript-eslint/no-misused-promises + const poller = new Promise(async (resolve, reject) => { const trackingProperties = { export_format: asset.export_format, dashboard: asset.dashboard, diff --git a/frontend/src/lib/components/FullScreen.tsx b/frontend/src/lib/components/FullScreen.tsx index 2ca3ec95706e0..95859d4efe88e 100644 --- a/frontend/src/lib/components/FullScreen.tsx +++ b/frontend/src/lib/components/FullScreen.tsx @@ -3,7 +3,7 @@ import { useEffect } from 'react' export function FullScreen({ onExit }: { onExit?: () => any }): null { const selector = 'aside.ant-layout-sider, .layout-top-content' useEffect(() => { - const myClasses = window.document.querySelectorAll(selector) as NodeListOf + const myClasses = window.document.querySelectorAll(selector) for (let i = 0; i < myClasses.length; i++) { myClasses[i].style.display = 'none' @@ -16,7 +16,7 @@ export function FullScreen({ onExit }: { onExit?: () => any }): null { } try { - window.document.body.requestFullscreen().then(() => { + void document.body.requestFullscreen().then(() => { window.addEventListener('fullscreenchange', handler, false) }) } catch { @@ -31,15 +31,15 @@ export function FullScreen({ onExit }: { onExit?: () => any }): null { } return () => { - const elements = window.document.querySelectorAll(selector) as NodeListOf + const elements = window.document.querySelectorAll(selector) for (let i = 0; i < elements.length; i++) { elements[i].style.display = 'block' } try { window.removeEventListener('fullscreenchange', handler, false) - if (window.document.fullscreenElement !== null) { - window.document.exitFullscreen() + if (document.fullscreenElement !== null) { + void document.exitFullscreen() } } catch { // will break on IE11 diff --git a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss index 17b0dded01c1d..c0264a96288f6 100644 --- a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss +++ b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.scss @@ -4,14 +4,14 @@ border-radius: 4px; &.SelectableElement--selected { - background: var(--primary); + background: var(--primary-3000); } &:hover { - background: var(--primary-light); + background: var(--primary-3000-hover); } &:active { - background: var(--primary-dark); + background: var(--primary-3000-active); } } diff --git a/frontend/src/lib/components/Map/Maplibre.scss b/frontend/src/lib/components/Map/Maplibre.scss index f4c4cdeb2fda7..2a7688a3f23af 100644 --- a/frontend/src/lib/components/Map/Maplibre.scss +++ b/frontend/src/lib/components/Map/Maplibre.scss @@ -1,6 +1,6 @@ .maplibregl-ctrl-attrib-button:focus, .maplibregl-ctrl-group button:focus { - box-shadow: 0 0 2px 2px var(--primary); + box-shadow: 0 0 2px 2px var(--primary-3000); } @media screen { diff --git a/frontend/src/lib/components/ObjectTags/objectTagsLogic.test.ts b/frontend/src/lib/components/ObjectTags/objectTagsLogic.test.ts index 41ebccae03559..de0716d7bfbfa 100644 --- a/frontend/src/lib/components/ObjectTags/objectTagsLogic.test.ts +++ b/frontend/src/lib/components/ObjectTags/objectTagsLogic.test.ts @@ -28,7 +28,7 @@ describe('objectTagsLogic', () => { }) it('handle adding a new tag', async () => { await expectLogic(logic, async () => { - await logic.actions.setNewTag('Nigh') + logic.actions.setNewTag('Nigh') logic.actions.handleAdd('Nightly') }) .toDispatchActions(['setNewTag']) @@ -43,7 +43,7 @@ describe('objectTagsLogic', () => { newTag: '', }) // @ts-expect-error - const mockedOnChange = props.onChange?.mock as any + const mockedOnChange = props.onChange?.mock expect(mockedOnChange.calls.length).toBe(1) expect(mockedOnChange.calls[0][0]).toBe('nightly') expect(mockedOnChange.calls[0][1]).toEqual(['a', 'b', 'c', 'nightly']) @@ -69,7 +69,7 @@ describe('objectTagsLogic', () => { tags: ['b', 'c'], }) // @ts-expect-error - const mockedOnChange = props.onChange?.mock as any + const mockedOnChange = props.onChange?.mock expect(mockedOnChange.calls.length).toBe(1) expect(mockedOnChange.calls[0][0]).toBe('a') expect(mockedOnChange.calls[0][1]).toEqual(['b', 'c']) diff --git a/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx b/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx index 125c31d54eb0d..0a0917a29280e 100644 --- a/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx +++ b/frontend/src/lib/components/ProductIntroduction/ProductIntroduction.tsx @@ -85,7 +85,7 @@ export const ProductIntroduction = ({ {action ? ( } + icon={} onClick={() => { updateHasSeenProductIntroFor(productKey, true) action && action() diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss index 2ec1fd268b87f..f530132d670a6 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss @@ -5,21 +5,31 @@ } .property-value-type { - display: flex; align-items: center; - width: fit-content; - height: 1.25rem; - padding: 0.125rem 0.25rem; - letter-spacing: 0.25px; + background: var(--mid); border-radius: var(--radius); border: 1px solid var(--border-light); - background: var(--mid); color: var(--muted-alt); + cursor: default; + display: flex; font-size: 0.625rem; font-weight: 500; + height: 1.25rem; + letter-spacing: 0.25px; + padding: 0.125rem 0.25rem; text-transform: uppercase; white-space: nowrap; - cursor: default; + width: fit-content; + + .posthog-3000 & { + background: none; + border-radius: calc(var(--radius) * 0.75); + border-style: solid; + border-width: 1px; + font-family: var(--font-mono); + font-size: 0.688rem; + padding: 0.075rem 0.25rem; + } &:not(:first-child) { margin-left: 0.25rem; @@ -27,10 +37,10 @@ } .properties-table-value { - min-width: 12rem; - max-width: fit-content; - display: flex; align-items: center; + display: flex; + max-width: fit-content; + min-width: 12rem; .value-link { > * { @@ -45,7 +55,20 @@ .editable { text-decoration: underline dotted; - text-decoration-color: var(--primary); + text-decoration-color: var(--primary-3000); cursor: pointer; + + .posthog-3000 & { + border: 1px solid transparent; + border-radius: calc(var(--radius) * 0.75); + margin-left: -0.25rem; + padding: 0.125rem 0.25rem; + text-decoration: none; + + &:hover { + background: var(--bg-light); + border: 1px solid var(--border-light); + } + } } } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx new file mode 100644 index 0000000000000..4a577e8c10741 --- /dev/null +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx @@ -0,0 +1,25 @@ +import { Meta, StoryFn } from '@storybook/react' +import { PropertiesTable as PropertiesTableComponent } from '.' +import { PropertyDefinitionType } from '~/types' + +const meta: Meta = { + title: 'Components/Properties Table', + component: PropertiesTableComponent, +} +export default meta + +export const PropertiesTable: StoryFn = () => { + const properties = { + name: 'John Doe', + age: 30, + url: 'https://www.google.com', + is_good: true, + evil_level: null, + tags: ['best', 'cool', 'awesome'], + location: { + city: 'Prague', + country: 'Czechia', + }, + } + return +} diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index c074b490fb35a..f96fb045c022f 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -15,6 +15,7 @@ import { NewProperty } from 'scenes/persons/NewProperty' import { LemonCheckbox, LemonInput, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { PropertyDefinitionType } from '~/types' +import { IconPencil } from '@posthog/icons' type HandledType = 'string' | 'number' | 'bigint' | 'boolean' | 'undefined' | 'null' type Type = HandledType | 'symbol' | 'object' | 'function' @@ -85,18 +86,19 @@ function ValueDisplay({ const valueComponent = ( canEdit && textBasedTypes.includes(valueType) && setEditing(true)} > {!isURL(value) ? ( - valueString + {valueString} ) : ( {valueString} )} + {canEdit && } ) @@ -283,13 +285,10 @@ export function PropertiesTable({ title: '', width: 0, render: function Copy(_, item: any): JSX.Element | false { - if (Array.isArray(item[1]) || item[1] instanceof Object) { - return false - } return ( { diff --git a/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss b/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss index b30e128738345..fa5bdc421a424 100644 --- a/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss +++ b/frontend/src/lib/components/PropertyGroupFilters/PropertyGroupFilters.scss @@ -63,7 +63,7 @@ font-size: 12px; &.selected { - background-color: var(--primary); + background-color: var(--primary-3000); color: #fff; } } diff --git a/frontend/src/lib/components/Sharing/SharingModal.stories.tsx b/frontend/src/lib/components/Sharing/SharingModal.stories.tsx index e2fc7e134b254..39d2e4b48ad06 100644 --- a/frontend/src/lib/components/Sharing/SharingModal.stories.tsx +++ b/frontend/src/lib/components/Sharing/SharingModal.stories.tsx @@ -62,7 +62,7 @@ const Template = (args: Partial & { licensed?: boolean }): JS created_at: '2022-06-28T12:30:51.459746Z', enabled: true, access_token: '1AEQjQ2xNLGoiyI0UnNlLzOiBZWWMQ', - ...(req.body as any), + ...req.body, }, ] }, diff --git a/frontend/src/lib/components/Sharing/SharingModal.tsx b/frontend/src/lib/components/Sharing/SharingModal.tsx index 446ac5065df42..57ce2eb06d509 100644 --- a/frontend/src/lib/components/Sharing/SharingModal.tsx +++ b/frontend/src/lib/components/Sharing/SharingModal.tsx @@ -111,7 +111,7 @@ export function SharingModalContent({ await copyToClipboard(shareLink, 'link')} + onClick={() => void copyToClipboard(shareLink, 'link')} icon={} > Copy public link diff --git a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts index 87082bfbbc6d3..b1fc22ef02e39 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.ts @@ -48,8 +48,8 @@ export const subscriptionsLogic = kea([ }), listeners(({ actions }) => ({ - deleteSubscription: ({ id }) => { - deleteWithUndo({ + deleteSubscription: async ({ id }) => { + await deleteWithUndo({ endpoint: api.subscriptions.determineDeleteEndpoint(), object: { name: 'Subscription', id }, callback: () => actions.loadSubscriptions(), diff --git a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss index 227fbe2346059..97c2e739162ac 100644 --- a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss +++ b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.scss @@ -48,7 +48,7 @@ } &.taxonomy-icon-built-in { - color: var(--primary); + color: var(--primary-3000); } } } @@ -68,6 +68,10 @@ &.hover { background-color: var(--primary-bg-hover); border-radius: var(--radius); + + .posthog-3000 & { + background-color: var(--bg-3000); + } } &.selected { @@ -82,7 +86,7 @@ } &.expand-row { - color: var(--primary); + color: var(--primary-3000); } } } diff --git a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss index 98f01dd22f7c5..108cbcb552950 100644 --- a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss +++ b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.scss @@ -5,10 +5,6 @@ display: flex; flex-direction: column; - .posthog-3000 & { - background: var(--bg-3000); - } - &.force-minimum-width { min-width: 300px; } @@ -50,6 +46,19 @@ background: var(--side); border-color: var(--side); + .posthog-3000 & { + color: var(--default); + font-weight: 500; + + &:not(.taxonomic-pill-active) { + opacity: 0.7; + } + + &:hover { + opacity: 1; + } + } + &.taxonomic-count-zero { color: var(--muted); cursor: not-allowed; diff --git a/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss b/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss index d0ac9a6fe4220..80a1630463217 100644 --- a/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss +++ b/frontend/src/lib/components/TimelineSeekbar/TimelineSeekbar.scss @@ -23,14 +23,14 @@ width: fit-content; padding: 0 0.25rem; border-radius: var(--radius); - background: var(--primary); + background: var(--primary-3000); color: var(--bg-light); line-height: 1.25rem; font-size: 0.75rem; font-weight: 500; &::selection { - background: var(--primary-light); // Default selection background is invisible on primary + background: var(--primary-3000-hover); // Default selection background is invisible on primary } .Spinner { @@ -77,7 +77,7 @@ left: 0; height: var(--timeline-seekbar-thickness); width: calc(100% - var(--timeline-seekbar-arrow-width)); - background: var(--primary); + background: var(--primary-3000); } .TimelineSeekbar__line-start, @@ -91,7 +91,7 @@ display: block; margin: calc(var(--timeline-seekbar-thickness) + 0.125rem) 0; height: var(--timeline-seekbar-arrow-height); - background: var(--primary); + background: var(--primary-3000); } } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 63ef80121bccc..f8b3e96456bbc 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -136,6 +136,7 @@ export const FEATURE_FLAGS = { ROLE_BASED_ACCESS: 'role-based-access', // owner: #team-experiments, @liyiy QUERY_RUNNING_TIME: 'query_running_time', // owner: @mariusandra QUERY_TIMINGS: 'query-timings', // owner: @mariusandra + QUERY_ASYNC: 'query-async', // owner: @webjunkie POSTHOG_3000: 'posthog-3000', // owner: @Twixes POSTHOG_3000_NAV: 'posthog-3000-nav', // owner: @Twixes ENABLE_PROMPTS: 'enable-prompts', // owner: @lharries diff --git a/frontend/src/lib/dayjs.ts b/frontend/src/lib/dayjs.ts index e4f4881dfd390..51bf082205b51 100644 --- a/frontend/src/lib/dayjs.ts +++ b/frontend/src/lib/dayjs.ts @@ -61,7 +61,6 @@ export function dayjsLocalToTimezone( // We could only use types like "dayjs.OpUnitType", causing errors such as: // error TS2312: An interface can only extend an object type or intersection of object types with statically known members. -// eslint-disable-next-line @typescript-eslint/no-empty-interface export interface Dayjs extends DayjsOriginal {} export type UnitTypeShort = 'd' | 'D' | 'M' | 'y' | 'h' | 'm' | 's' | 'ms' diff --git a/frontend/src/lib/hooks/useAsyncHandler.ts b/frontend/src/lib/hooks/useAsyncHandler.ts index 3f0f4717ea7d0..af962ad35a322 100644 --- a/frontend/src/lib/hooks/useAsyncHandler.ts +++ b/frontend/src/lib/hooks/useAsyncHandler.ts @@ -9,7 +9,7 @@ import { useState } from 'react' * return Click me */ export function useAsyncHandler( - onEvent: ((e: E) => any | Promise) | undefined + onEvent: ((e: E) => any) | undefined ): { loading: boolean; onEvent: ((e: E) => void) | undefined } { const [loading, setLoading] = useState(false) @@ -19,7 +19,7 @@ export function useAsyncHandler( const result = onEvent(e) if (result instanceof Promise) { setLoading(true) - result.finally(() => setLoading(false)) + void result.finally(() => setLoading(false)) } } } diff --git a/frontend/src/lib/internalMetrics.ts b/frontend/src/lib/internalMetrics.ts index 417fa56e82a2e..e137db77e41b8 100644 --- a/frontend/src/lib/internalMetrics.ts +++ b/frontend/src/lib/internalMetrics.ts @@ -58,7 +58,7 @@ export async function apiGetWithTimeToSeeDataTracking( error = e } const requestDurationMs = performance.now() - requestStartMs - captureTimeToSeeData(teamId, { + void captureTimeToSeeData(teamId, { ...timeToSeeDataPayload, api_url: url, status: error ? 'failure' : 'success', diff --git a/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss b/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss index 7a5d1c1db4b74..b01a0c24d3f18 100644 --- a/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss +++ b/frontend/src/lib/lemon-ui/LemonActionableTooltip/LemonActionableTooltip.scss @@ -22,7 +22,7 @@ } .LemonActionableTooltip__icon { - color: var(--primary); + color: var(--primary-3000); display: flex; align-items: center; width: 1.5rem; diff --git a/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss b/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss index 186df996abe84..adc91cce935f8 100644 --- a/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss +++ b/frontend/src/lib/lemon-ui/LemonBadge/LemonBadge.scss @@ -1,5 +1,5 @@ .LemonBadge { - --lemon-badge-color: var(--primary); + --lemon-badge-color: var(--primary-3000); --lemon-badge-size: 1.5rem; --lemon-badge-font-size: 0.75rem; --lemon-badge-position-offset: 0.5rem; diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss index d683e23796f6f..5ce621e0bb385 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.scss @@ -1,6 +1,4 @@ .LemonButton { - --lemon-button-height: 2.5rem; - position: relative; transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, transform 100ms ease; @@ -9,7 +7,6 @@ flex-shrink: 0; align-items: center; justify-content: flex-start; - min-height: var(--lemon-button-height); padding: 0.25rem 0.75rem; gap: 0.5rem; background: none; @@ -23,6 +20,12 @@ user-select: none; appearance: none !important; // Important as this gets overridden by Ant styles... + > span { + display: flex; + flex: 1; + gap: 0.5rem; + } + .LemonButton__content { flex: 1; line-height: initial; @@ -32,6 +35,11 @@ &[aria-disabled='true']:not(.LemonButton--loading) { cursor: not-allowed; + + > span { + cursor: not-allowed; + } + opacity: var(--opacity-disabled); } @@ -43,7 +51,6 @@ width: 100%; padding-left: 0.5rem; padding-right: 0.5rem; - overflow: hidden; .LemonButton__content { overflow: hidden; @@ -51,11 +58,13 @@ } &.LemonButton--centered { - justify-content: center; + > span { + justify-content: center !important; + } .LemonButton__content { - flex: initial; - text-align: center; + flex: initial !important; + text-align: center !important; } } @@ -63,78 +72,46 @@ padding-left: 0.5rem; } - &.LemonButton--has-side-icon { - padding-right: 0.5rem; - } - &.LemonButton--no-content { padding-left: 0.5rem; padding-right: 0.5rem; } &.LemonButton--xsmall { - --lemon-button-height: 1.5rem; - - padding: 0.125rem 0.375rem; gap: 0.25rem; font-size: 0.75rem; - .LemonButton__icon { - font-size: 0.875rem; - } - - &.LemonButton--has-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-left: 0.25rem; + > span { + gap: 0.25rem; } - &.LemonButton--has-side-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-right: 0.25rem; + .LemonButton__icon { + font-size: 0.875rem; } } &.LemonButton--small, .Breadcrumbs3000 & { - --lemon-button-height: 2rem; - - padding: 0.125rem 0.5rem; gap: 0.25rem; - .LemonButton__icon { - font-size: 1.25rem; - } - - &.LemonButton--has-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-left: 0.375rem; + > span { + gap: 0.25rem; } - &.LemonButton--has-side-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-right: 0.375rem; + .LemonButton__icon { + font-size: 1.25rem; } } &.LemonButton--large { - --lemon-button-height: 3.5rem; - - padding: 0.5rem 1rem; - gap: 0.75rem; font-size: 1rem; - .LemonButton__icon { - font-size: 1.75rem; + > span { + gap: 0.75rem; } - &.LemonButton--has-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-left: 0.75rem; - } - - &.LemonButton--has-side-icon:not(.LemonButton--no-padding), - &.LemonButton--no-content:not(.LemonButton--no-padding) { - padding-right: 0.75rem; + .LemonButton__icon { + font-size: 1.75rem; } } @@ -143,8 +120,6 @@ height: auto; width: auto; padding: 0; - padding-left: 0; - padding-right: 0; &.LemonButton--full-width { width: 100%; @@ -156,115 +131,29 @@ font-size: 1.5rem; flex-shrink: 0; transition: color 200ms ease; - justify-items: center; - } - - // LemonStealth has some specific styles - &.LemonButton--status-stealth { - font-weight: 400; - color: var(--default); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--primary-highlight); - color: inherit; // Avoid links being colored on hover - } - - &.LemonButton--active { - font-weight: 500; - - // These buttons keep their font-weight when actve - &.LemonButtonWithSideAction, - &[role='menuitem'], - &[aria-haspopup='true'] { - font-weight: 400; - } - } - - .LemonButton__icon { - color: var(--muted-alt); - } - - // Secondary - outlined color style - &.LemonButton--secondary { - background: var(--bg-light); - border: 1px solid var(--border); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--primary-highlight); - border-color: var(--primary); - } - - &:not([aria-disabled='true']):active { - border-color: var(--primary-dark); - } - } + place-items: center center; } @each $status in ('primary', 'danger', 'primary-alt', 'muted') { &.LemonButton--status-#{$status} { - color: var(--#{$status}, var(--primary)); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--#{$status}-highlight, var(--primary-highlight)); - } - - &:not([aria-disabled='true']):active { - color: var(--#{$status}-dark, var(--primary-dark)); - - .LemonButton__icon { - color: var(--#{$status}-dark, var(--primary-dark)); - } - } + color: var(--#{$status}-3000, var(--#{$status}, var(--primary))); .LemonButton__icon { - color: var(--#{$status}); + color: var(--#{$status}-3000, var(--#{$status})); } // Primary - blocked color style &.LemonButton--primary { color: #fff; - background: var(--#{$status}); - - .LemonButton__icon { - color: #fff; - } + background: var(--#{$status}-3000, var(--#{$status})); &:not([aria-disabled='true']):hover, &.LemonButton--active { color: #fff; - background: var(--#{$status}-light, var(--#{$status})); - - .LemonButton__icon { - color: #fff; - } } &:not([aria-disabled='true']):active { - background: var(--#{$status}-dark, var(--#{$status})); color: #fff; - - .LemonButton__icon { - color: #fff; - } - } - } - - // Secondary - outlined color style - &.LemonButton--secondary { - background: var(--bg-light); - border: 1px solid var(--border); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--#{$status}-highlight, var(--primary-highlight)); - border-color: var(--#{$status}); - } - - &:not([aria-disabled='true']):active { - border-color: var(--#{$status}-dark, var(--status)); } } } @@ -284,67 +173,6 @@ color: #fff !important; } } - - .posthog-3000 & { - font-size: 0.8125rem; - border: none !important; // 3000 buttons never have borders - - .LemonButton__icon { - color: var(--muted); - } - - &.LemonButton--status-primary { - color: var(--muted); - } - - &.LemonButton--status-stealth { - color: var(--default); - } - - &.LemonButton--primary { - color: #fff; - background: var(--primary-3000); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--primary-3000-hover); - color: #fff; - } - - .LemonButton__icon { - color: #fff; - } - } - - &.LemonButton--secondary { - color: var(--default); - background: var(--secondary-3000); - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - background: var(--secondary-3000-hover); - color: var(--default); - } - - .LemonButton__icon { - color: var(--default); - } - } - - &:not([aria-disabled='true']):hover, - &.LemonButton--active { - color: var(--default); - background: var(--border); - - .LemonButton__icon { - color: var(--default); - } - } - - &:not([aria-disabled='true']):active { - transform: scale(calc(35 / 36)); - } - } } .LemonButtonWithSideAction { @@ -352,19 +180,11 @@ } .LemonButtonWithSideAction__spacer { - height: 1.5rem; - width: 1.5rem; box-sizing: content-box; &.LemonButtonWithSideAction__spacer--divider { - opacity: 0.17; - padding-left: 0.375rem; border-left: 1px solid currentColor; } - - .LemonButton--small & { - margin-left: 0.25rem; - } } .LemonButtonWithSideAction__side-button { diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.tsx b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.tsx index 1d0f1cd90b8a9..d52f2e601200e 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.tsx +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.tsx @@ -5,6 +5,8 @@ import { Link } from '../Link' import { Spinner } from '../Spinner/Spinner' import { Tooltip, TooltipProps } from '../Tooltip' import './LemonButton.scss' +import './LemonButtonLegacy.scss' +import './LemonButton3000.scss' import { LemonDropdown, LemonDropdownProps } from '../LemonDropdown' import { PopoverReferenceContext } from '../Popover' @@ -63,6 +65,8 @@ export interface LemonButtonPropsBase /** Like plain `disabled`, except we enforce a reason to be shown in the tooltip. */ disabledReason?: string | null | false noPadding?: boolean + /** Hides the button chrome until hover. */ + stealth?: boolean size?: 'xsmall' | 'small' | 'medium' | 'large' 'data-attr'?: string 'aria-label'?: string @@ -92,6 +96,7 @@ export const LemonButton: React.FunctionComponent - {icon ? {icon} : null} - {children ? {children} : null} - {sideIcon ? {sideIcon} : null} + + {icon ? {icon} : null} + {children ? {children} : null} + {sideIcon ? {sideIcon} : null} + ) diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss new file mode 100644 index 0000000000000..e0067bc5406af --- /dev/null +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton3000.scss @@ -0,0 +1,266 @@ +.posthog-3000 { + --transition: opacity 200ms ease, transform 200ms ease; + + .LemonButton { + border-width: 0; + border-style: solid; + border-color: transparent; + min-height: 2em; + padding: 0; + position: relative; + outline: none; + transition: var(--transition); + border-radius: 6px; + cursor: pointer; + + > span { + border-radius: 6px; + font-size: 0.875rem; + display: flex; + flex-direction: row; + flex-shrink: 0; + align-items: center; + justify-content: flex-start; + background: none; + border-width: 1px; + border-style: solid; + border-color: transparent; + font-weight: 500; + gap: 0.5rem; + line-height: 1.5rem; + min-height: 2em; + position: relative; + text-align: left; + transition: var(--transition); + padding: 0.25rem 0.75rem; + width: 100%; + + .LemonButton__icon:first-child { + transition: var(--transition); + color: var(--default); + opacity: 0.5; + } + } + + &.LemonButton--xsmall { + min-height: 1.5rem; + padding-left: 0; + + > span { + min-height: 1.5rem; + padding: 0.125rem 0.375rem; + } + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + > span { + padding-left: 0.25rem; + } + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + > span { + padding-right: 0.25rem; + } + } + } + + &.LemonButton--small { + min-height: 1.8rem; + + > span { + min-height: 1.8rem; + padding: 0.25rem 0.5rem; + } + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + > span { + padding-left: 0.375rem; + } + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + > span { + padding-right: 0.375rem; + } + } + } + + &.LemonButton--large { + min-height: 2.5rem; + + > span { + gap: 0.75rem; + min-height: 2.5rem; + padding: 0.5rem 1rem; + } + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-left: 0; + + > span { + padding-left: 0.75rem; + } + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-right: 0; + + > span { + padding-right: 0.75rem; + } + } + } + + &.LemonButton--no-padding { + padding: 0; + min-height: 0; + + > span { + padding: 0; + min-height: 0; + } + } + + &:not(.LemonButton--tertiary) { + border-width: 1px; + padding-bottom: 1px; + + > span { + margin: 0 -1px; + top: -1px; + } + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + > span { + top: -1.5px; + } + } + + &:not([aria-disabled='true']):active { + > span { + top: -0.5px; + } + } + } + + &.LemonButton--primary { + background: var(--primary-3000-frame-bg); + border-color: var(--primary-3000-frame-border); + + > span { + background: var(--primary-3000-button-bg); + border-color: var(--primary-3000-button-border); + color: #111; + font-weight: 600; + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + border-color: var(--primary-3000-button-border-hover); + } + } + } + + &.LemonButton--secondary { + background: var(--secondary-3000-frame-bg); + border-color: var(--secondary-3000-frame-border); + + &:not([aria-disabled='true']):hover > span { + border-color: var(--secondary-3000-button-border-hover); + } + + > span { + color: var(--default); + background: var(--accent-3000); + border-color: var(--secondary-3000-button-border); + } + + &.LemonButton--active { + > span { + color: var(--default); + background: var(--bg-light); + border-color: var(--secondary-3000-button-border-hover); + } + } + } + + &.LemonButton--is-stealth:not(.LemonButton--active) { + &:hover { + > span { + border-color: var(--secondary-3000-button-border); + } + } + + &:not(:hover) { + background-color: transparent; + border-color: transparent; + + > span { + background-color: transparent; + border-color: transparent; + color: var(--muted); + } + } + } + + &.LemonButton--tertiary { + color: var(--default); + + &.LemonButton--status-danger { + color: var(--danger); + } + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + background-color: var(--bg-3000); + } + } + } + + .LemonButtonWithSideAction__spacer { + color: var(--muted); + height: 1.25rem; + width: 1.25rem; + + &.LemonButtonWithSideAction__spacer--divider { + margin-left: 0.25rem; + padding: 0; + } + } + + // SideAction buttons are buttons next to other buttons in the DOM but layered on top. since they're on another button, we don't want them to look like buttons. + .LemonButtonWithSideAction__side-button { + top: 1px; + right: 1px; + bottom: 4px; + width: 1.625rem; + transform: none; + border-top-right-radius: 5px; + border-bottom-right-radius: 5px; + + & .LemonButton { + background: none !important; + border: none !important; + padding-bottom: 0 !important; + margin: 0 auto !important; + height: 100%; + + > span { + margin: auto !important; + top: 0 !important; + background: none !important; + border: none !important; + } + } + + &:not([aria-disabled='true']):hover { + background: rgb(0 0 0 / 10%); + } + } +} diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss b/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss new file mode 100644 index 0000000000000..fc10519414fee --- /dev/null +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButtonLegacy.scss @@ -0,0 +1,193 @@ +body:not(.posthog-3000) { + .LemonButton { + --lemon-button-height: 2.5rem; + + min-height: var(--lemon-button-height); + + &.LemonButton--has-side-icon { + padding-right: 0.5rem; + } + + &.LemonButton--xsmall { + padding: 0.125rem 0.375rem; + + --lemon-button-height: 1.5rem; + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-left: 0.25rem; + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-right: 0.25rem; + } + } + + &.LemonButton--small, + .Breadcrumbs3000 & { + --lemon-button-height: 2rem; + + padding: 0.125rem 0.5rem; + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-left: 0.375rem; + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-right: 0.375rem; + } + } + + &.LemonButton--full-width { + overflow: hidden; + } + + &.LemonButton--large { + --lemon-button-height: 3.5rem; + + padding: 0.5rem 1rem; + + &.LemonButton--has-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-left: 0.75rem; + } + + &.LemonButton--has-side-icon:not(.LemonButton--no-padding), + &.LemonButton--no-content:not(.LemonButton--no-padding) { + padding-right: 0.75rem; + } + } + + &.LemonButton--no-padding { + min-height: 0; + height: auto; + width: auto; + padding: 0; + padding-left: 0; + padding-right: 0; + + &.LemonButton--full-width { + width: 100%; + } + } + + // LemonStealth has some specific styles + &.LemonButton--status-stealth { + font-weight: 400; + color: var(--default); + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + background: var(--primary-highlight); + color: inherit; // Avoid links being colored on hover + } + + &.LemonButton--active { + font-weight: 500; + + // These buttons keep their font-weight when actve + &.LemonButtonWithSideAction, + &[role='menuitem'], + &[aria-haspopup='true'] { + font-weight: 400; + } + } + + .LemonButton__icon { + color: var(--muted-alt); + } + + // Secondary - outlined color style + &.LemonButton--secondary { + background: var(--bg-light); + border: 1px solid var(--border); + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + background: var(--primary-highlight); + border-color: var(--primary); + } + + &:not([aria-disabled='true']):active { + border-color: var(--primary-dark); + } + } + } + + @each $status in ('primary', 'danger', 'primary-alt', 'muted') { + &.LemonButton--status-#{$status} { + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + background: var(--#{$status}-highlight, var(--primary-highlight)); + } + + &:not([aria-disabled='true']):active { + color: var(--#{$status}-dark, var(--primary-dark)); + + .LemonButton__icon { + color: var(--#{$status}-dark, var(--primary-dark)); + } + } + + // Primary - blocked color style + &.LemonButton--primary { + .LemonButton__icon { + color: #fff; + } + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + color: #fff; + background: var(--#{$status}-light, var(--#{$status})); + + .LemonButton__icon { + color: #fff; + } + } + + &:not([aria-disabled='true']):active { + color: #fff; + background: var(--#{$status}-dark, var(--#{$status})); + + .LemonButton__icon { + color: #fff; + } + } + } + + // Secondary - outlined color style + &.LemonButton--secondary { + background: var(--bg-light); + border: 1px solid var(--border); + + &:not([aria-disabled='true']):hover, + &.LemonButton--active { + background: var(--#{$status}-highlight, var(--primary-highlight)); + border-color: var(--#{$status}); + } + + &:not([aria-disabled='true']):active { + border-color: var(--#{$status}-dark, var(--status)); + } + } + } + } + } + + .LemonButtonWithSideAction__spacer { + height: 1.5rem; + width: 1.5rem; + + &.LemonButtonWithSideAction__spacer--divider { + opacity: 0.17; + padding-left: 0.375rem; + } + + .LemonButton--small & { + margin-left: 0.25rem; + } + } +} diff --git a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss index 8ffed36ab0d40..135c3c2a43e24 100644 --- a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss +++ b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss @@ -15,6 +15,10 @@ --tick-length: 12.73; // Approximation of tick length, which is (3 + 6) * sqrt(2) --box-color: var(--primary); + .posthog-3000 & { + --box-color: var(--primary-3000); + } + display: flex; align-items: center; cursor: pointer; @@ -59,7 +63,7 @@ &.LemonCheckbox:not(.LemonCheckbox--disabled):hover, &.LemonCheckbox:not(.LemonCheckbox--disabled):active { label { - --box-color: var(--primary-light); + --box-color: var(--primary-3000-hover); .LemonCheckbox__box { border-color: var(--box-color); @@ -68,7 +72,7 @@ } &.LemonCheckbox:not(.LemonCheckbox--disabled):active label { - --box-color: var(--primary-dark); + --box-color: var(--primary-3000-active); } &.LemonCheckbox--checked { diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss index 7d86dc8538262..6dd509e146f97 100644 --- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss +++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss @@ -19,8 +19,8 @@ } .LemonCollapsePanel__header { - min-height: 2.875rem; - border-radius: 0; + min-height: 2.875rem !important; + border-radius: 0 !important; padding: 0.5rem 0.75rem !important; // Override reduced side padding font-weight: 500 !important; // Override status="stealth"'s font-weight @@ -30,11 +30,15 @@ } .LemonCollapsePanel__body { - transition: height 200ms ease; - height: 0; - overflow: hidden; border-top-width: 1px; box-sizing: content-box; + height: 0; + overflow: hidden; + transition: height 200ms ease; + + .posthog-3000 & { + background: var(--bg-light); + } } .LemonCollapsePanel__content { diff --git a/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss b/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss index 1a266d6ba5b08..731f84ed06b90 100644 --- a/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss +++ b/frontend/src/lib/lemon-ui/LemonFileInput/LemonFileInput.scss @@ -11,6 +11,6 @@ left: calc(-1 * var(--file-drop-target-padding)); height: calc(100% + var(--file-drop-target-padding) * 2); width: calc(100% + var(--file-drop-target-padding) * 2); - border: 3px dashed var(--primary); + border: 3px dashed var(--primary-3000); border-radius: var(--radius); } diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss index 03943ad9f9bf1..c00645bafc1e1 100644 --- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss +++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss @@ -1,27 +1,26 @@ .LemonInput { - transition: background-color 200ms ease, color 200ms ease, border-color 200ms ease, opacity 200ms ease; - display: flex; - min-height: 2.5rem; - padding: 0.25rem 0.5rem; + align-items: center; background: none; border-radius: var(--radius); + border: 1px solid var(--border); + color: var(--default); + cursor: text; + display: flex; font-size: 0.875rem; - text-align: left; + gap: 0.25rem; + justify-content: center; line-height: 1.25rem; - cursor: text; - color: var(--default); - border: 1px solid var(--border); + min-height: 2.5rem; + padding: 0.25rem 0.5rem; + text-align: left; background-color: var(--bg-light); - align-items: center; - justify-content: center; - gap: 0.25rem; &:hover:not([aria-disabled='true']) { - border-color: var(--primary-light); + border-color: var(--primary-3000-hover); } &.LemonInput--focused:not([aria-disabled='true']) { - border-color: var(--primary); + border-color: var(--primary-3000); } &.LemonInput--transparent-background { @@ -67,7 +66,7 @@ &.LemonInput--has-content { > .LemonIcon { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss b/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss index 3f99c8ea0f9c5..50506eac65e68 100644 --- a/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss +++ b/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss @@ -32,7 +32,7 @@ margin: 1rem auto; border-radius: var(--radius); background-color: var(--bg-light); - border: 1px solid var(--border); + border: 1px solid var(--border-3000); box-shadow: var(--shadow-elevation); transition: opacity var(--modal-transition-time) ease-out, transform var(--modal-transition-time) ease-out; display: flex; diff --git a/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss b/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss index f847cc3127765..cdec3b871b333 100644 --- a/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss +++ b/frontend/src/lib/lemon-ui/LemonRow/LemonRow.scss @@ -20,7 +20,7 @@ font-weight: 600; .LemonRow__icon { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.scss b/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.scss index 291b5ffa77646..fd66747c15565 100644 --- a/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.scss +++ b/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.scss @@ -2,9 +2,7 @@ position: relative; flex-shrink: 0; width: fit-content; - background: var(--bg-light); border-radius: var(--radius); - border: 1px solid var(--border); > ul { z-index: 1; // Place above slider @@ -16,83 +14,154 @@ &.LemonSegmentedButton--full-width { width: 100%; } -} -.LemonSegmentedButton__slider { - // This is a real element and not ::after to avoid initial transition from 0 width - position: absolute; - top: -1px; // 1px of border - left: -1px; // 1px of border - height: calc(100% + 2px); // 1px of border (top + bottom) - width: calc(var(--lemon-segmented-button-slider-width) + 2px); // 1px of border (left + right) - transform: translateX(var(--lemon-segmented-button-slider-offset)); - background: var(--primary); - - &.LemonSegmentedButton__slider--first { - border-top-left-radius: var(--radius); - border-bottom-left-radius: var(--radius); + .LemonSegmentedButton__option { + display: flex; + flex: 1; + + .LemonButton__content { + text-wrap: nowrap; + } } +} - &.LemonSegmentedButton__slider--last { - border-top-right-radius: var(--radius); - border-bottom-right-radius: var(--radius); +body:not(.posthog-3000) { + .LemonSegmentedButton { + background: var(--bg-light); + border: 1px solid var(--border); } - .LemonSegmentedButton--transitioning & { + .LemonSegmentedButton__slider { + // This is a real element and not ::after to avoid initial transition from 0 width transition: width 200ms ease, transform 200ms ease, border-radius 200ms ease; will-change: width, transform, border-radius; - } -} + position: absolute; + top: -1px; // 1px of border + left: -1px; // 1px of border + height: calc(100% + 2px); // 1px of border (top + bottom) + width: calc(var(--lemon-segmented-button-slider-width) + 2px); // 1px of border (left + right) + transform: translateX(var(--lemon-segmented-button-slider-offset)); + background: var(--primary); -.LemonSegmentedButton__option { - display: flex; - flex: 1; - - .LemonButton { - // Original transition with outline added - transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, - outline 200ms ease; - outline: 1px solid transparent; - border-radius: 0; - min-height: calc(var(--lemon-button-height) - 2px); - } + &.LemonSegmentedButton__slider--first { + border-top-left-radius: var(--radius); + border-bottom-left-radius: var(--radius); + } - &:first-child, - &:first-child .LemonButton { - border-top-left-radius: var(--radius); - border-bottom-left-radius: var(--radius); + &.LemonSegmentedButton__slider--last { + border-top-right-radius: var(--radius); + border-bottom-right-radius: var(--radius); + } } - &:last-child, - &:last-child .LemonButton { - border-top-right-radius: var(--radius); - border-bottom-right-radius: var(--radius); - } + .LemonSegmentedButton__option { + .LemonButton { + // Original transition with outline added + transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease, + outline 200ms ease; + outline: 1px solid transparent; + border-radius: 0; + min-height: calc(var(--lemon-button-height) - 2px); - &:not(:last-child) { - border-right: 1px solid var(--border); - } + &:hover { + > span { + border-color: none !important; + } + } + } + + &:first-child, + &:first-child .LemonButton { + border-top-left-radius: var(--radius); + border-bottom-left-radius: var(--radius); + } + + &:last-child, + &:last-child .LemonButton { + border-top-right-radius: var(--radius); + border-bottom-right-radius: var(--radius); + } + + &:not(:last-child) { + border-right: 1px solid var(--border); + } + + &:not(.LemonSegmentedButton__option--disabled, .LemonSegmentedButton__option--selected) { + &:hover .LemonButton { + outline-color: var(--primary); + } - &:not(.LemonSegmentedButton__option--disabled, .LemonSegmentedButton__option--selected) { - &:hover .LemonButton { - outline-color: var(--primary); + &:active .LemonButton { + outline-color: var(--primary-dark); + } } - &:active .LemonButton { - outline-color: var(--primary-dark); + &.LemonSegmentedButton__option--selected { + .LemonButton, + .LemonButton__icon { + color: #fff; + } + + .LemonButton { + &:hover, + &:active { + background: none; // Disable LemonButton's hover styles for the selected option + } + } } } +} - &.LemonSegmentedButton__option--selected { - .LemonButton, - .LemonButton__icon { - color: #fff; +.posthog-3000 { + .LemonSegmentedButton__option { + & .LemonButton, + & .LemonButton > span { + border-radius: 0; } - .LemonButton { - &:hover, - &:active { - background: none; // Disable LemonButton's hover styles for the selected option + .LemonButton > span { + background: var(--bg-3000); + + .LemonButton__icon, + .LemonButton__content { + opacity: 0.4; + } + } + + .LemonButton.LemonButton--secondary:not([aria-disabled='true']):hover { + > span { + border-color: var(--secondary-3000-button-border); + } + } + + &:first-child, + &:first-child .LemonButton, + &:first-child .LemonButton > span { + border-top-left-radius: var(--radius); + border-bottom-left-radius: var(--radius); + } + + &:last-child, + &:last-child .LemonButton, + &:last-child .LemonButton > span { + border-top-right-radius: var(--radius); + border-bottom-right-radius: var(--radius); + } + + &:not(:last-child) { + .LemonButton { + border-right: none; + } + } + + &.LemonSegmentedButton__option--selected { + .LemonButton > span { + background-color: var(--bg-light); + + .LemonButton__icon, + .LemonButton__content { + opacity: 1; + } } } } diff --git a/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.tsx b/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.tsx index a925394115938..69f4bea414371 100644 --- a/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.tsx +++ b/frontend/src/lib/lemon-ui/LemonSegmentedButton/LemonSegmentedButton.tsx @@ -1,8 +1,11 @@ import clsx from 'clsx' import React from 'react' -import { LemonButton } from '../LemonButton' +import { LemonButton, LemonButtonProps } from '../LemonButton' import { useSliderPositioning } from '../hooks' import './LemonSegmentedButton.scss' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' export interface LemonSegmentedButtonOption { value: T @@ -18,7 +21,7 @@ export interface LemonSegmentedButtonProps { value?: T onChange?: (newValue: T) => void options: LemonSegmentedButtonOption[] - size?: 'small' | 'medium' + size?: LemonButtonProps['size'] className?: string fullWidth?: boolean } @@ -41,6 +44,15 @@ export function LemonSegmentedButton({ HTMLDivElement, HTMLLIElement >(value, 200) + const { featureFlags } = useValues(featureFlagLogic) + + const has3000 = featureFlags[FEATURE_FLAGS.POSTHOG_3000] + + let buttonProps = {} + + if (has3000) { + buttonProps = { status: 'stealth', type: 'secondary', motion: false } + } return (
({ icon={option.icon} data-attr={option['data-attr']} center + {...buttonProps} > {option.label} diff --git a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx index 5686b6af19412..51093dcb54c11 100644 --- a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx @@ -140,7 +140,7 @@ export function LemonSelect({ className={clsx(className, isClearButtonShown && 'LemonSelect--clearable')} icon={activeLeaf?.icon} // so that the pop-up isn't shown along with the close button - sideIcon={isClearButtonShown ?
: undefined} + sideIcon={isClearButtonShown ? <> : undefined} type="secondary" status="stealth" {...buttonProps} diff --git a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss index a382503f15b58..59b8e3bd4d44a 100644 --- a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss +++ b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss @@ -23,7 +23,7 @@ &:not(.ant-select-disabled):active { .ant-select-selector { background: var(--bg-light); - border-color: var(--primary); + border-color: var(--primary-3000); box-shadow: none; } } @@ -65,10 +65,14 @@ .LemonSelectMultipleDropdown { background: var(--bg-light); - padding: 0.5rem; border-radius: var(--radius); border: 1px solid var(--primary); margin: -4px 0; // Counteract antd wrapper + padding: 0.5rem; + + .posthog-3000 & { + border: 1px solid var(--primary-3000); + } .ant-select-item { padding: 0; diff --git a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx index 49bee6c0f3589..dc324b2af77d6 100644 --- a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx +++ b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx @@ -88,7 +88,7 @@ export function LemonSelectMultiple({ const typedOnChange = onChange as (newValue: string | null) => void typedOnChange(typedValues) } else { - const typedValues = v.map((token) => token.toString().trim()) as string[] + const typedValues = v.map((token) => token.toString().trim()) const typedOnChange = onChange as (newValue: string[]) => void typedOnChange(typedValues) } diff --git a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss index 6ce695c71a252..50b5e18e475d6 100644 --- a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss +++ b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss @@ -96,9 +96,9 @@ justify-content: center; .LemonSwitch--checked & { - transform: translateX(1rem); background-color: var(--primary); border-color: var(--primary); + transform: translateX(1rem); } .LemonSwitch--active & { diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss index 2a911f8ef6bb0..6e37c6cd5eecc 100644 --- a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss +++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss @@ -1,13 +1,20 @@ .LemonTable { position: relative; width: 100%; - background: var(--bg-light); + background: var(--bg-table); border-radius: var(--radius); border: 1px solid var(--border); overflow: hidden; flex: 1; --row-base-height: 3rem; + + .posthog-3000 & { + --row-base-height: auto; + + font-size: 13px; + } + --row-horizontal-padding: 1rem; &.LemonTable--with-ribbon { @@ -90,6 +97,16 @@ max-width: 30rem; font-size: 0.75rem; } + + a.Link { + .posthog-3000 & { + color: var(--default); + + &:not(:disabled):hover { + color: var(--primary-3000-hover); + } + } + } } .LemonTable__content > table { @@ -105,10 +122,25 @@ letter-spacing: 0.03125rem; text-transform: uppercase; + .posthog-3000 & { + background: none; + } + > tr { > th { font-weight: 700; text-align: left; + + .posthog-3000 & { + padding-top: 0.5rem; + padding-bottom: 0.5rem; + } + + .LemonButton { + .posthog-3000 & { + margin: -0.5rem 0; + } + } } &.LemonTable__row--grouping { @@ -136,8 +168,21 @@ } > td { + color: var(--text-secondary); padding-top: 0.5rem; padding-bottom: 0.5rem; + + .posthog-3000 & { + padding-top: 0.3rem; + padding-bottom: 0.3rem; + } + + .LemonButton { + .posthog-3000 & { + margin-top: -0.2rem; + margin-bottom: -0.2rem; + } + } } } } @@ -212,8 +257,22 @@ .LemonTable__header { cursor: default; + .posthog-3000 & { + opacity: 0.4; + } + &.LemonTable__header--actionable { cursor: pointer; + + .posthog-3000 & { + &:hover { + opacity: 0.7; + } + + &:active { + opacity: 0.9; + } + } } } @@ -222,6 +281,12 @@ align-items: center; justify-content: space-between; line-height: 1.5; + + div { + .posthog-3000 & { + white-space: nowrap; + } + } } .LemonTable__footer { diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss index 314f0e2806528..4e30b5ddec332 100644 --- a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss +++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss @@ -1,15 +1,15 @@ .LemonTableLoader { - transition: height 200ms ease, top 200ms ease; - z-index: 10; - position: absolute; - left: 0; - padding: 0; - bottom: -1px; - width: 100%; - height: 0; background: var(--primary-bg-active); border: none !important; + bottom: -1px; + height: 0; + left: 0; overflow: hidden; + padding: 0.05rem !important; + position: absolute; + transition: height 200ms ease, top 200ms ease; + width: 100%; + z-index: 10; &::after { content: ''; @@ -20,11 +20,16 @@ height: 100%; animation: LemonTableLoader__swooping 1.5s linear infinite; background: var(--primary); + + .posthog-3000 & { + animation: loading-bar 1.5s linear infinite; + background: var(--primary-3000); + } } &.LemonTableLoader--enter-active, &.LemonTableLoader--enter-done { - height: 0.25rem; + height: 0.125rem; } } diff --git a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss index 4ff742148785c..967a0add69417 100644 --- a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss +++ b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss @@ -62,7 +62,7 @@ } &:active { - color: var(--primary-dark); + color: var(--primary-3000-active); } &.LemonTabs__tab--active { @@ -79,8 +79,9 @@ } .LemonTabs__tab-content { - display: flex; align-items: center; - padding: 0.75rem 0; cursor: pointer; + display: flex; + padding: 0.75rem 0; + white-space: nowrap; } diff --git a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss index 6b8dce12a4432..348d596a1cec3 100644 --- a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss +++ b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss @@ -1,48 +1,99 @@ .LemonTag { - font-size: 0.75rem; - font-weight: var(--font-medium); + align-items: center; background: var(--border); - padding: 0.125rem 0.25rem; border-radius: var(--radius); - display: inline-flex; - align-items: center; color: var(--default); + display: inline-flex; + font-size: 0.75rem; + font-weight: var(--font-medium); line-height: 1rem; + padding: 0.125rem 0.25rem; white-space: nowrap; + .posthog-3000 & { + background: none; + border-radius: calc(var(--radius) * 0.75); + border-style: solid; + border-width: 1px; + font-size: 0.688rem; + padding: 0.075rem 0.25rem; + } + &.primary { - background-color: var(--primary); + background-color: var(--primary-3000); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--primary-3000); + color: var(--primary-3000); + } } &.highlight { background-color: var(--mark); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--mark); + color: var(--mark); + } } &.warning { background-color: var(--warning); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--warning); + color: var(--warning); + } } &.danger { background-color: var(--danger); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--danger); + color: var(--danger); + } } &.success { background-color: var(--success); color: #fff; + + .posthog-3000 & { + background: none; + border-color: var(--success); + color: var(--success); + } } &.completion { background-color: var(--purple-light); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--purple-light); + color: var(--purple-light); + } } &.caution { background-color: var(--danger-lighter); color: var(--bg-charcoal); + + .posthog-3000 & { + background: none; + border-color: var(--danger-lighter); + color: var(--danger-lighter); + } } &.none { @@ -62,7 +113,7 @@ .LemonTag__right-button { margin-left: 0.25rem; - min-height: 1.5rem; + min-height: 1.5rem !important; padding: 0.125rem !important; } } diff --git a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss index 389975e57915a..ccc45dc5f36a2 100644 --- a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss +++ b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss @@ -15,7 +15,7 @@ display: block; &:not(:disabled):hover { - border: 1px solid var(--primary-light); + border: 1px solid var(--primary-3000-hover); } width: 100%; @@ -27,7 +27,7 @@ } &:focus:not(:disabled) { - border: 1px solid var(--primary); + border: 1px solid var(--primary-3000); } .Field--error & { diff --git a/frontend/src/lib/lemon-ui/Link/Link.scss b/frontend/src/lib/lemon-ui/Link/Link.scss index 7b4702499c601..2500c9b62debe 100644 --- a/frontend/src/lib/lemon-ui/Link/Link.scss +++ b/frontend/src/lib/lemon-ui/Link/Link.scss @@ -1,20 +1,20 @@ .Link { - transition: color 200ms ease, opacity 200ms ease; background: none; - color: var(--link); border: none; + color: var(--link); + cursor: pointer; + line-height: inherit; outline: none; padding: 0; - line-height: inherit; - cursor: pointer; + transition: none; &:not(:disabled) { &:hover { - color: var(--primary-light); + color: var(--primary-3000-hover); } &:active { - color: var(--primary-dark); + color: var(--primary-3000-active); } } diff --git a/frontend/src/lib/lemon-ui/Popover/Popover.scss b/frontend/src/lib/lemon-ui/Popover/Popover.scss index f989f61f3f5bb..5dbb894d99043 100644 --- a/frontend/src/lib/lemon-ui/Popover/Popover.scss +++ b/frontend/src/lib/lemon-ui/Popover/Popover.scss @@ -38,7 +38,7 @@ opacity: 0; .Popover--actionable & { - border-color: var(--primary); + border-color: var(--primary-3000); } // We set the offset below instead of using floating-ui's offset(), because we need there to be no gap between @@ -95,7 +95,7 @@ } .posthog-3000 & { - background: var(--bg-3000); + background: var(--bg-light); padding: 0.25rem; } @@ -136,7 +136,7 @@ } .Popover--actionable & { - border-color: var(--primary); + border-color: var(--primary-3000); } } diff --git a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss index 9e5a327867146..d3f5896baebca 100644 --- a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss +++ b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.scss @@ -93,7 +93,7 @@ height: 1.5rem; width: 1.5rem; border-radius: 50%; - background: var(--primary); + background: var(--primary-3000); color: #fff; font-size: 0.625rem; font-weight: 600; diff --git a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.tsx b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.tsx index 4e855d4ba00b7..80f308b960191 100644 --- a/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.tsx +++ b/frontend/src/lib/lemon-ui/ProfilePicture/ProfilePicture.tsx @@ -47,7 +47,7 @@ export function ProfilePicture({ const emailHash = md5(emailOrNameWithEmail.trim().toLowerCase()) const tentativeUrl = `https://www.gravatar.com/avatar/${emailHash}?s=96&d=404` // The image will be cached, so it's best to do GET request check before trying to render it - fetch(tentativeUrl).then((response) => { + void fetch(tentativeUrl).then((response) => { if (response.status === 200) { setGravatarUrl(tentativeUrl) } diff --git a/frontend/src/lib/lemon-ui/lemonToast.tsx b/frontend/src/lib/lemon-ui/lemonToast.tsx index b829ea33d0746..5332a95b0599a 100644 --- a/frontend/src/lib/lemon-ui/lemonToast.tsx +++ b/frontend/src/lib/lemon-ui/lemonToast.tsx @@ -18,7 +18,7 @@ export function ToastCloseButton({ closeToast }: { closeToast?: () => void }): J interface ToastButton { label: string - action: () => void + action: (() => void) | (() => Promise) dataAttr?: string } @@ -47,7 +47,7 @@ export function ToastContent({ type, message, button, id }: ToastContentProps): {button && ( { - button.action() + void button.action() toast.dismiss(id) }} type="secondary" @@ -84,7 +84,7 @@ export const lemonToast = { }, warning(message: string | JSX.Element, { button, ...toastOptions }: ToastOptionsWithButton = {}): void { posthog.capture('toast warning', { - message: message.toString(), + message: String(message), button: button?.label, toastId: toastOptions.toastId, }) @@ -96,7 +96,7 @@ export const lemonToast = { }, error(message: string | JSX.Element, { button, ...toastOptions }: ToastOptionsWithButton = {}): void { posthog.capture('toast error', { - message: message.toString(), + message: String(message), button: button?.label, toastId: toastOptions.toastId, }) diff --git a/frontend/src/lib/logic/inAppPrompt/inAppPromptLogic.test.ts b/frontend/src/lib/logic/inAppPrompt/inAppPromptLogic.test.ts index b270ad2621b1e..16d849e169079 100644 --- a/frontend/src/lib/logic/inAppPrompt/inAppPromptLogic.test.ts +++ b/frontend/src/lib/logic/inAppPrompt/inAppPromptLogic.test.ts @@ -1,6 +1,6 @@ import { expectLogic } from 'kea-test-utils' import { initKeaTests } from '~/test/init' -import { inAppPromptLogic, PromptConfig, PromptSequence, PromptUserState } from './inAppPromptLogic' +import { inAppPromptLogic, PromptConfig, PromptUserState } from './inAppPromptLogic' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { router } from 'kea-router' import { urls } from 'scenes/urls' @@ -289,7 +289,7 @@ describe('inAppPromptLogic', () => { }) .toDispatchActions([ 'closePrompts', - logic.actionCreators.runSequence(configProductTours.sequences[1] as PromptSequence, 0), + logic.actionCreators.runSequence(configProductTours.sequences[1], 0), inAppPromptEventCaptureLogic.actionCreators.reportPromptShown( 'tooltip', configProductTours.sequences[1].key, @@ -333,7 +333,7 @@ describe('inAppPromptLogic', () => { logic.actions.nextPrompt() }) .toDispatchActions([ - logic.actionCreators.runSequence(configProductTours.sequences[1] as PromptSequence, 1), + logic.actionCreators.runSequence(configProductTours.sequences[1], 1), inAppPromptEventCaptureLogic.actionCreators.reportPromptForward( configProductTours.sequences[1].key, 1, @@ -359,7 +359,7 @@ describe('inAppPromptLogic', () => { logic.actions.previousPrompt() }) .toDispatchActions([ - logic.actionCreators.runSequence(configProductTours.sequences[1] as PromptSequence, 0), + logic.actionCreators.runSequence(configProductTours.sequences[1], 0), inAppPromptEventCaptureLogic.actionCreators.reportPromptBackward( configProductTours.sequences[1].key, 0, diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index e841ec72c7323..b66cbb3358bcb 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -835,7 +835,7 @@ export function getKeyMapping( data = { ...KEY_MAPPING[type][value.replace(/^\$initial_/, '$')] } if (data.description) { data.label = `Initial ${data.label}` - data.description = `${data.description} Data from the first time this user was seen.` + data.description = `${String(data.description)} Data from the first time this user was seen.` } return data } else if (value.startsWith('$survey_responded/')) { diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index be48d5618e0e5..4daea66d2c7dc 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -1385,7 +1385,7 @@ export function humanTzOffset(timezone?: string): string { /** Join array of string into a list ("a, b, and c"). Uses the Oxford comma, but only if there are at least 3 items. */ export function humanList(arr: readonly string[]): string { - return arr.length > 2 ? arr.slice(0, -1).join(', ') + ', and ' + arr.slice(-1) : arr.join(' and ') + return arr.length > 2 ? arr.slice(0, -1).join(', ') + ', and ' + arr.at(-1) : arr.join(' and ') } export function resolveWebhookService(webhookUrl: string): string { @@ -1447,7 +1447,7 @@ export function lightenDarkenColor(hex: string, pct: number): string { return `rgb(${[r, g, b].join(',')})` } -export function toString(input?: any | null): string { +export function toString(input?: any): string { return input?.toString() || '' } diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 2ec59bf991d2d..5ce1faefa6d13 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -33,7 +33,6 @@ import type { Dayjs } from 'lib/dayjs' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { convertPropertyGroupToProperties } from 'lib/utils' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { PlatformType, Framework } from 'scenes/ingestion/types' import { now } from 'lib/dayjs' import { isFilterWithDisplay, @@ -332,7 +331,6 @@ export const eventUsageLogic = kea([ ) => ({ attribute, originalLength, newLength }), reportDashboardShareToggled: (isShared: boolean) => ({ isShared }), reportUpgradeModalShown: (featureName: string) => ({ featureName }), - reportIngestionLandingSeen: true, reportTimezoneComponentViewed: ( component: 'label' | 'indicator', project_timezone?: string, @@ -439,27 +437,11 @@ export const eventUsageLogic = kea([ reportInsightOpenedFromRecentInsightList: true, reportRecordingOpenedFromRecentRecordingList: true, reportPersonOpenedFromNewlySeenPersonsList: true, - reportIngestionSelectPlatformType: (platform: PlatformType) => ({ platform }), - reportIngestionSelectFrameworkType: (framework: Framework) => ({ framework }), - reportIngestionRecordingsTurnedOff: ( - session_recording_opt_in: boolean, - capture_console_log_opt_in: boolean, - capture_performance_opt_in: boolean - ) => ({ session_recording_opt_in, capture_console_log_opt_in, capture_performance_opt_in }), - reportIngestionAutocaptureToggled: (autocapture_opt_out: boolean) => ({ autocapture_opt_out }), - reportIngestionAutocaptureExceptionsToggled: (autocapture_opt_in: boolean) => ({ autocapture_opt_in }), - reportIngestionHelpClicked: (type: string) => ({ type }), - reportIngestionTryWithBookmarkletClicked: true, - reportIngestionTryWithDemoDataClicked: true, reportIngestionContinueWithoutVerifying: true, - reportIngestionContinueWithoutBilling: true, - reportIngestionBillingCancelled: true, - reportIngestionThirdPartyAboutClicked: (name: string) => ({ name }), - reportIngestionThirdPartyConfigureClicked: (name: string) => ({ name }), - reportIngestionThirdPartyPluginInstalled: (name: string) => ({ name }), + reportAutocaptureToggled: (autocapture_opt_out: boolean) => ({ autocapture_opt_out }), + reportAutocaptureExceptionsToggled: (autocapture_opt_in: boolean) => ({ autocapture_opt_in }), reportFailedToCreateFeatureFlagWithCohort: (code: string, detail: string) => ({ code, detail }), reportInviteMembersButtonClicked: true, - reportIngestionSidebarButtonClicked: (name: string) => ({ name }), reportDashboardLoadingTime: (loadingMilliseconds: number, dashboardId: number) => ({ loadingMilliseconds, dashboardId, @@ -794,9 +776,6 @@ export const eventUsageLogic = kea([ } posthog.capture('test account filters updated', payload) }, - reportIngestionLandingSeen: async () => { - posthog.capture('ingestion landing seen') - }, reportInsightFilterRemoved: async ({ index }) => { posthog.capture('local filter removed', { index }) @@ -1049,70 +1028,17 @@ export const eventUsageLogic = kea([ reportPersonOpenedFromNewlySeenPersonsList: () => { posthog.capture('person opened from newly seen persons list') }, - reportIngestionSelectPlatformType: ({ platform }) => { - posthog.capture('ingestion select platform type', { - platform: platform, - }) - }, - reportIngestionSelectFrameworkType: ({ framework }) => { - posthog.capture('ingestion select framework type', { - framework: framework, - }) - }, - reportIngestionRecordingsTurnedOff: ({ - session_recording_opt_in, - capture_console_log_opt_in, - capture_performance_opt_in, - }) => { - posthog.capture('ingestion recordings turned off', { - session_recording_opt_in, - capture_console_log_opt_in, - capture_performance_opt_in, - }) - }, - reportIngestionAutocaptureToggled: ({ autocapture_opt_out }) => { - posthog.capture('ingestion autocapture toggled', { - autocapture_opt_out, - }) - }, - reportIngestionAutocaptureExceptionsToggled: ({ autocapture_opt_in }) => { - posthog.capture('ingestion autocapture exceptions toggled', { - autocapture_opt_in, - }) - }, - reportIngestionHelpClicked: ({ type }) => { - posthog.capture('ingestion help clicked', { - type: type, - }) - }, - reportIngestionTryWithBookmarkletClicked: () => { - posthog.capture('ingestion try posthog with bookmarklet clicked') - }, - reportIngestionTryWithDemoDataClicked: () => { - posthog.capture('ingestion try posthog with demo data clicked') - }, reportIngestionContinueWithoutVerifying: () => { posthog.capture('ingestion continue without verifying') }, - reportIngestionContinueWithoutBilling: () => { - posthog.capture('ingestion continue without adding billing details') - }, - reportIngestionBillingCancelled: () => { - posthog.capture('ingestion billing cancelled') - }, - reportIngestionThirdPartyAboutClicked: ({ name }) => { - posthog.capture('ingestion third party about clicked', { - name: name, - }) - }, - reportIngestionThirdPartyConfigureClicked: ({ name }) => { - posthog.capture('ingestion third party configure clicked', { - name: name, + reportAutocaptureToggled: ({ autocapture_opt_out }) => { + posthog.capture('autocapture toggled', { + autocapture_opt_out, }) }, - reportIngestionThirdPartyPluginInstalled: ({ name }) => { - posthog.capture('report ingestion third party plugin installed', { - name: name, + reportAutocaptureExceptionsToggled: ({ autocapture_opt_in }) => { + posthog.capture('autocapture exceptions toggled', { + autocapture_opt_in, }) }, reportFailedToCreateFeatureFlagWithCohort: ({ detail, code }) => { @@ -1121,11 +1047,6 @@ export const eventUsageLogic = kea([ reportInviteMembersButtonClicked: () => { posthog.capture('invite members button clicked') }, - reportIngestionSidebarButtonClicked: ({ name }) => { - posthog.capture('ingestion sidebar button clicked', { - name: name, - }) - }, reportTeamSettingChange: ({ name, value }) => { posthog.capture(`${name} team setting updated`, { setting: name, diff --git a/frontend/src/models/cohortsModel.ts b/frontend/src/models/cohortsModel.ts index 6e1d00525a766..970458bb44be9 100644 --- a/frontend/src/models/cohortsModel.ts +++ b/frontend/src/models/cohortsModel.ts @@ -103,8 +103,8 @@ export const cohortsModel = kea([ } await triggerExport(exportCommand) }, - deleteCohort: ({ cohort }) => { - deleteWithUndo({ + deleteCohort: async ({ cohort }) => { + await deleteWithUndo({ endpoint: api.cohorts.determineDeleteEndpoint(), object: cohort, callback: actions.loadCohorts, diff --git a/frontend/src/models/notebooksModel.ts b/frontend/src/models/notebooksModel.ts index 057d896c0ce41..f59f697b991d2 100644 --- a/frontend/src/models/notebooksModel.ts +++ b/frontend/src/models/notebooksModel.ts @@ -1,7 +1,7 @@ import { actions, BuiltLogic, connect, kea, listeners, path, reducers } from 'kea' import { loaders } from 'kea-loaders' -import { DashboardType, NotebookListItemType, NotebookNodeType, NotebookTarget, NotebookType } from '~/types' +import { DashboardType, NotebookListItemType, NotebookNodeType, NotebookTarget } from '~/types' import api from 'lib/api' import posthog from 'posthog-js' @@ -81,7 +81,7 @@ export const notebooksModel = kea([ reducers({ scratchpadNotebook: [ - SCRATCHPAD_NOTEBOOK as NotebookListItemType, + SCRATCHPAD_NOTEBOOK, { setScratchpadNotebook: (_, { notebook }) => notebook, }, @@ -106,7 +106,7 @@ export const notebooksModel = kea([ content: defaultNotebookContent(title, content), }) - openNotebook(notebook.short_id, location, 'end', (logic) => { + await openNotebook(notebook.short_id, location, 'end', (logic) => { onCreate?.(logic) }) @@ -118,7 +118,7 @@ export const notebooksModel = kea([ }, deleteNotebook: async ({ shortId, title }) => { - deleteWithUndo({ + await deleteWithUndo({ endpoint: `projects/${values.currentTeamId}/notebooks`, object: { name: title || shortId, id: shortId }, callback: actions.loadNotebooks, @@ -138,14 +138,14 @@ export const notebooksModel = kea([ }, ], notebookTemplates: [ - LOCAL_NOTEBOOK_TEMPLATES as NotebookType[], + LOCAL_NOTEBOOK_TEMPLATES, { // In the future we can load these from remote }, ], })), - listeners(({ actions }) => ({ + listeners(({ asyncActions }) => ({ createNotebookFromDashboard: async ({ dashboard }) => { const queries = dashboard.tiles.reduce((acc, tile) => { if (!tile.insight) { @@ -186,7 +186,7 @@ export const notebooksModel = kea([ }, })) - await actions.createNotebook(NotebookTarget.Scene, dashboard.name + ' (copied)', resources) + await asyncActions.createNotebook(NotebookTarget.Scene, dashboard.name + ' (copied)', resources) }, })), ]) diff --git a/frontend/src/queries/nodes/DataNode/DateRange.tsx b/frontend/src/queries/nodes/DataNode/DateRange.tsx index ce48a8ec9d892..f28699cb34146 100644 --- a/frontend/src/queries/nodes/DataNode/DateRange.tsx +++ b/frontend/src/queries/nodes/DataNode/DateRange.tsx @@ -10,7 +10,6 @@ export function DateRange({ query, setQuery }: DateRangeProps): JSX.Element | nu if (isEventsQuery(query)) { return ( { diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index a83d0398a49d4..94c2651343b31 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -477,7 +477,7 @@ export const dataNodeLogic = kea([ abortQuery: async ({ queryId }) => { try { const { currentTeamId } = values - await api.create(`api/projects/${currentTeamId}/insights/cancel`, { client_query_id: queryId }) + await api.delete(`api/projects/${currentTeamId}/query/${queryId}/`) } catch (e) { console.warn('Failed cancelling query', e) } diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index 8154d03fd04aa..a3942c2c5c459 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -470,7 +470,7 @@ export function DataTable({ uniqueKey, query, setQuery, context, cachedResults } '::' ) /* Bust the LemonTable cache when columns change */ } - dataSource={(dataTableRows ?? []) as DataTableRow[]} + dataSource={dataTableRows ?? []} rowKey={({ result }: DataTableRow, rowIndex) => { if (result) { if ( diff --git a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx index 4970f0fd8e429..9c9cead143b70 100644 --- a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx @@ -17,10 +17,11 @@ import { useValues } from 'kea' import { LemonDivider, lemonToast } from '@posthog/lemon-ui' import { asDisplay } from 'scenes/persons/person-utils' import { urls } from 'scenes/urls' +import { copyToClipboard } from 'lib/utils' const EXPORT_MAX_LIMIT = 10000 -function startDownload(query: DataTableNode, onlySelectedColumns: boolean): void { +async function startDownload(query: DataTableNode, onlySelectedColumns: boolean): Promise { const exportContext = isPersonsNode(query.source) ? { path: getPersonsEndpoint(query.source) } : { source: query.source } @@ -45,7 +46,7 @@ function startDownload(query: DataTableNode, onlySelectedColumns: boolean): void ) } } - triggerExport({ + await triggerExport({ export_format: ExporterFormat.CSV, export_context: exportContext, }) @@ -156,9 +157,7 @@ function copyTableToCsv(dataTableRows: DataTableRow[], columns: string[], query: const csv = Papa.unparse(tableData) - navigator.clipboard.writeText(csv).then(() => { - lemonToast.success('Table copied to clipboard!') - }) + void copyToClipboard(csv, 'table') } catch { lemonToast.error('Copy failed!') } @@ -170,9 +169,7 @@ function copyTableToJson(dataTableRows: DataTableRow[], columns: string[], query const json = JSON.stringify(tableData, null, 4) - navigator.clipboard.writeText(json).then(() => { - lemonToast.success('Table copied to clipboard!') - }) + void copyToClipboard(json, 'table') } catch { lemonToast.error('Copy failed!') } @@ -204,7 +201,7 @@ export function DataTableExport({ query }: DataTableExportProps): JSX.Element | key={1} placement={'topRight'} onConfirm={() => { - startDownload(query, true) + void startDownload(query, true) }} actor={isPersonsNode(query.source) ? 'persons' : 'events'} limit={EXPORT_MAX_LIMIT} @@ -220,7 +217,7 @@ export function DataTableExport({ query }: DataTableExportProps): JSX.Element | startDownload(query, false)} + onConfirm={() => void startDownload(query, false)} actor={isPersonsNode(query.source) ? 'persons' : 'events'} limit={EXPORT_MAX_LIMIT} > diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 0ebb345ac4c6a..4b74e14e09d6a 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -27,7 +27,7 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { - createActionFromEvent( + void createActionFromEvent( getCurrentTeamId(), event, 0, @@ -46,8 +46,8 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { fullWidth sideIcon={} data-attr="events-table-event-link" - onClick={async () => - await copyToClipboard( + onClick={() => + void copyToClipboard( `${window.location.origin}${urls.event(String(event.uuid), event.timestamp)}`, 'link to event' ) diff --git a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx index fc088e7edc040..15dc608ebf9ad 100644 --- a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx +++ b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx @@ -1,5 +1,4 @@ import { LemonButton, LemonButtonWithDropdown } from 'lib/lemon-ui/LemonButton' -import { IconBookmarkBorder } from 'lib/lemon-ui/icons' import { DataTableNode } from '~/queries/schema' import equal from 'fast-deep-equal' import { useValues } from 'kea' @@ -48,7 +47,6 @@ export function SavedQueries({ query, setQuery }: SavedQueriesProps): JSX.Elemen }} type="secondary" status="primary-alt" - icon={} > {selectedTitle} diff --git a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts index 86d41a46e0fd9..cb4463a1ee400 100644 --- a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts +++ b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts @@ -110,7 +110,7 @@ export const dataTableLogic = kea([ // Add a label between results if the day changed if (orderKey === 'timestamp' && orderKeyIndex !== -1) { - let lastResult: any | null = null + let lastResult: any = null const newResults: DataTableRow[] = [] for (const result of results) { if ( diff --git a/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.ts b/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.ts index c77502435e944..31d49a241a5e5 100644 --- a/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.ts +++ b/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.ts @@ -83,7 +83,7 @@ export const hogQLQueryEditorLogic = kea([ ], aiAvailable: [() => [preflightLogic.selectors.preflight], (preflight) => preflight?.openai_available], }), - listeners(({ actions, props, values }) => ({ + listeners(({ actions, asyncActions, props, values }) => ({ saveQuery: () => { const query = values.queryInput // TODO: Is below line necessary if the only way for queryInput to change is already through setQueryInput? @@ -179,7 +179,7 @@ export const hogQLQueryEditorLogic = kea([ kind: NodeKind.HogQLQuery, query: values.queryInput, } - await actions.createDataWarehouseSavedQuery({ name, query }) + await asyncActions.createDataWarehouseSavedQuery({ name, query }) }, })), ]) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts index 53432d3c1009f..03d904476be9a 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts @@ -86,7 +86,6 @@ export const queryNodeToFilter = (query: InsightQueryNode): Partial date_from: query.dateRange?.date_from, entity_type: 'events', sampling_factor: query.samplingFactor, - aggregation_group_type_index: query.aggregation_group_type_index, }) if (!isRetentionQuery(query) && !isPathsQuery(query)) { @@ -107,6 +106,15 @@ export const queryNodeToFilter = (query: InsightQueryNode): Partial Object.assign(filters, objectClean>>(query.breakdown)) } + if (!isLifecycleQuery(query) && !isStickinessQuery(query)) { + Object.assign( + filters, + objectClean({ + aggregation_group_type_index: query.aggregation_group_type_index, + }) + ) + } + if (isTrendsQuery(query) || isStickinessQuery(query) || isLifecycleQuery(query) || isFunnelsQuery(query)) { filters.interval = query.interval } diff --git a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx index 5e365c4fcc1eb..5c726071e38a5 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx @@ -74,6 +74,7 @@ export function InsightVizDisplay({ insightDataLoading, erroredQueryId, timedOutQueryId, + vizSpecificOptions, } = useValues(insightVizDataLogic(insightProps)) const { exportContext } = useValues(insightDataLogic(insightProps)) @@ -131,7 +132,12 @@ export function InsightVizDisplay({ case InsightType.FUNNELS: return case InsightType.RETENTION: - return + return ( + + ) case InsightType.PATHS: return default: diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index df71c0b1cef4b..01c218a290e6c 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -26,13 +26,16 @@ import { isStickinessFilter, isTrendsFilter, } from 'scenes/insights/sharedUtils' -import { flattenObject, toParams } from 'lib/utils' +import { flattenObject, delay, toParams } from 'lib/utils' import { queryNodeToFilter } from './nodes/InsightQuery/utils/queryNodeToFilter' import { now } from 'lib/dayjs' import { currentSessionId } from 'lib/internalMetrics' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' +const QUERY_ASYNC_MAX_INTERVAL_SECONDS = 10 +const QUERY_ASYNC_TOTAL_POLL_SECONDS = 300 + //get export context for a given query export function queryExportContext( query: N, @@ -91,6 +94,43 @@ export function queryExportContext( throw new Error(`Unsupported query: ${query.kind}`) } +async function executeQuery( + queryNode: N, + methodOptions?: ApiMethodOptions, + refresh?: boolean, + queryId?: string +): Promise> { + const queryAsyncEnabled = Boolean(featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC]) + const excludedKinds = ['HogQLMetadata'] + const queryAsync = queryAsyncEnabled && !excludedKinds.includes(queryNode.kind) + const response = await api.query(queryNode, methodOptions, queryId, refresh, queryAsync) + + if (!queryAsync || !response.query_async) { + return response + } + + const pollStart = performance.now() + let currentDelay = 300 // start low, because all queries will take at minimum this + + while (performance.now() - pollStart < QUERY_ASYNC_TOTAL_POLL_SECONDS * 1000) { + await delay(currentDelay) + currentDelay = Math.min(currentDelay * 2, QUERY_ASYNC_MAX_INTERVAL_SECONDS * 1000) + + if (methodOptions?.signal?.aborted) { + const customAbortError = new Error('Query aborted') + customAbortError.name = 'AbortError' + throw customAbortError + } + + const statusResponse = await api.queryStatus.get(response.id) + + if (statusResponse.complete || statusResponse.error) { + return statusResponse.results + } + } + throw new Error('Query timed out') +} + // Return data for a given query export async function query( queryNode: N, @@ -216,7 +256,7 @@ export async function query( response = await fetchLegacyInsights() } } else { - response = await api.query(queryNode, methodOptions, queryId, refresh) + response = await executeQuery(queryNode, methodOptions, refresh, queryId) if (isHogQLQuery(queryNode) && response && typeof response === 'object') { logParams.clickhouse_sql = (response as HogQLQueryResponse)?.clickhouse } diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index d1eed070437ff..d7b3b2ca0da1d 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1526,6 +1526,10 @@ "enum": ["leftjoin", "subquery"], "type": "string" }, + "materializationMode": { + "enum": ["auto", "legacy_null_as_string", "legacy_null_as_null", "disabled"], + "type": "string" + }, "personsArgMaxVersion": { "enum": ["auto", "v1", "v2"], "type": "string" @@ -1732,6 +1736,9 @@ }, "suppressSessionAnalysisWarning": { "type": "boolean" + }, + "vizSpecificOptions": { + "$ref": "#/definitions/VizSpecificOptions" } }, "required": ["kind", "source"], @@ -1801,10 +1808,6 @@ "LifecycleQuery": { "additionalProperties": false, "properties": { - "aggregation_group_type_index": { - "description": "Groups aggregation", - "type": "number" - }, "dateRange": { "$ref": "#/definitions/DateRange", "description": "Date range for the query" @@ -2410,6 +2413,51 @@ } ] }, + "QueryStatus": { + "additionalProperties": false, + "properties": { + "complete": { + "default": false, + "type": "boolean" + }, + "end_time": { + "format": "date-time", + "type": "string" + }, + "error": { + "default": false, + "type": "boolean" + }, + "error_message": { + "default": "", + "type": "string" + }, + "expiration_time": { + "format": "date-time", + "type": "string" + }, + "id": { + "type": "string" + }, + "query_async": { + "default": true, + "type": "boolean" + }, + "results": {}, + "start_time": { + "format": "date-time", + "type": "string" + }, + "task_id": { + "type": "string" + }, + "team_id": { + "type": "integer" + } + }, + "required": ["id", "query_async", "team_id", "error", "complete", "error_message"], + "type": "object" + }, "QueryTiming": { "additionalProperties": false, "properties": { @@ -2644,6 +2692,9 @@ }, "suppressSessionAnalysisWarning": { "type": "boolean" + }, + "vizSpecificOptions": { + "$ref": "#/definitions/VizSpecificOptions" } }, "required": ["kind", "shortId"], @@ -2757,10 +2808,6 @@ "StickinessQuery": { "additionalProperties": false, "properties": { - "aggregation_group_type_index": { - "description": "Groups aggregation", - "type": "number" - }, "dateRange": { "$ref": "#/definitions/DateRange", "description": "Date range for the query" @@ -3089,6 +3136,28 @@ "required": ["results"], "type": "object" }, + "VizSpecificOptions": { + "additionalProperties": false, + "description": "Chart specific rendering options. Use ChartRenderingMetadata for non-serializable values, e.g. onClick handlers", + "properties": { + "RETENTION": { + "additionalProperties": false, + "properties": { + "hideLineGraph": { + "type": "boolean" + }, + "hideSizeColumn": { + "type": "boolean" + }, + "useSmallLayout": { + "type": "boolean" + } + }, + "type": "object" + } + }, + "type": "object" + }, "WebAnalyticsPropertyFilter": { "anyOf": [ { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index fdba8908a3a71..5e0f19452d438 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -12,6 +12,7 @@ import { GroupMathType, HogQLMathType, InsightShortId, + InsightType, IntervalType, LifecycleFilterType, LifecycleToggle, @@ -136,6 +137,7 @@ export interface HogQLQueryModifiers { personsOnEventsMode?: 'disabled' | 'v1_enabled' | 'v1_mixed' | 'v2_enabled' personsArgMaxVersion?: 'auto' | 'v1' | 'v2' inCohortVia?: 'leftjoin' | 'subquery' + materializationMode?: 'auto' | 'legacy_null_as_string' | 'legacy_null_as_null' | 'disabled' } export interface HogQLQueryResponse { @@ -391,6 +393,18 @@ export interface SavedInsightNode extends Node, InsightVizNodeViewProps, DataTab // Insight viz node +/** Chart specific rendering options. + * Use ChartRenderingMetadata for non-serializable values, e.g. onClick handlers + * @see ChartRenderingMetadata + * **/ +export interface VizSpecificOptions { + [InsightType.RETENTION]?: { + hideLineGraph?: boolean + hideSizeColumn?: boolean + useSmallLayout?: boolean + } +} + export interface InsightVizNode extends Node, InsightVizNodeViewProps { kind: NodeKind.InsightVizNode source: InsightQueryNode @@ -410,6 +424,8 @@ interface InsightVizNodeViewProps { embedded?: boolean suppressSessionAnalysisWarning?: boolean hidePersonsModal?: boolean + + vizSpecificOptions?: VizSpecificOptions } /** Base class for insight query nodes. Should not be used directly. */ @@ -501,7 +517,7 @@ export type StickinessFilter = Omit< StickinessFilterType & { hidden_legend_indexes?: number[] }, keyof FilterType | 'hidden_legend_keys' | 'stickiness_days' | 'shown_as' > -export interface StickinessQuery extends InsightsQueryBase { +export interface StickinessQuery extends Omit { kind: NodeKind.StickinessQuery /** Granularity of the response. Can be one of `hour`, `day`, `week` or `month` */ interval?: IntervalType @@ -526,11 +542,33 @@ export interface QueryResponse { next_allowed_client_refresh?: string } +export type QueryStatus = { + id: string + /** @default true */ + query_async: boolean + /** @asType integer */ + team_id: number + /** @default false */ + error: boolean + /** @default false */ + complete: boolean + /** @default "" */ + error_message: string + results?: any + /** @format date-time */ + start_time?: string + /** @format date-time */ + end_time?: string + /** @format date-time */ + expiration_time?: string + task_id?: string +} + export interface LifecycleQueryResponse extends QueryResponse { results: Record[] } -export interface LifecycleQuery extends InsightsQueryBase { +export interface LifecycleQuery extends Omit { kind: NodeKind.LifecycleQuery /** Granularity of the response. Can be one of `hour`, `day`, `week` or `month` */ interval?: IntervalType diff --git a/frontend/src/scenes/App.tsx b/frontend/src/scenes/App.tsx index bfeb6836c9ef0..225efc7dffeaa 100644 --- a/frontend/src/scenes/App.tsx +++ b/frontend/src/scenes/App.tsx @@ -165,9 +165,7 @@ function AppScene(): JSX.Element | null { return ( <> - - {wrappedSceneElement} - + {wrappedSceneElement} {toastContainer} diff --git a/frontend/src/scenes/PreflightCheck/PreflightCheck.scss b/frontend/src/scenes/PreflightCheck/PreflightCheck.scss index 2b83263873c2e..0628c2ac5d99f 100644 --- a/frontend/src/scenes/PreflightCheck/PreflightCheck.scss +++ b/frontend/src/scenes/PreflightCheck/PreflightCheck.scss @@ -81,7 +81,7 @@ svg, .Preflight__status-text { - color: var(--primary); + color: var(--primary-3000); } } diff --git a/frontend/src/scenes/PreflightCheck/preflightLogic.tsx b/frontend/src/scenes/PreflightCheck/preflightLogic.tsx index ae958b00c996b..2c71e29c70192 100644 --- a/frontend/src/scenes/PreflightCheck/preflightLogic.tsx +++ b/frontend/src/scenes/PreflightCheck/preflightLogic.tsx @@ -37,7 +37,7 @@ export const preflightLogic = kea([ null as PreflightStatus | null, { loadPreflight: async () => { - const response = (await api.get('_preflight/')) as PreflightStatus + const response = await api.get('_preflight/') return response }, }, diff --git a/frontend/src/scenes/actions/ActionStep.tsx b/frontend/src/scenes/actions/ActionStep.tsx index 9f41316c52ab1..c51064e66b171 100644 --- a/frontend/src/scenes/actions/ActionStep.tsx +++ b/frontend/src/scenes/actions/ActionStep.tsx @@ -1,9 +1,8 @@ import { LemonEventName } from './EventName' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { URL_MATCHING_HINTS } from 'scenes/actions/hints' -import { Radio, RadioChangeEvent } from 'antd' import { ActionStepType, StringMatching } from '~/types' -import { LemonButton, LemonInput, Link } from '@posthog/lemon-ui' +import { LemonButton, LemonInput, LemonSegmentedButton, Link } from '@posthog/lemon-ui' import { IconClose, IconOpenInApp } from 'lib/lemon-ui/icons' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { AuthorizedUrlList } from 'lib/components/AuthorizedUrlList/AuthorizedUrlList' @@ -166,9 +165,10 @@ function Option({ return (
- - {label} {extra_options} - +
+ {label} + {extra_options} +
{caption &&
{caption}
} void }): JSX.Element { - const handleChange = (e: RadioChangeEvent): void => { - const type = e.target.value + const handleChange = (type: string): void => { if (type === '$autocapture') { sendStep({ ...step, event: '$autocapture' }) } else if (type === 'event') { @@ -301,19 +300,30 @@ function TypeSwitcher({ return (
- - Autocapture - Custom event - Page view - + options={[ + { + value: '$autocapture', + label: 'Autocapture', + }, + { + value: 'event', + label: 'Custom event', + }, + { + value: '$pageview', + label: 'Page view', + }, + ]} + fullWidth + size="small" + />
) } @@ -328,15 +338,32 @@ function StringMatchingSelection({ sendStep: (stepToSend: ActionStepType) => void }): JSX.Element { const key = `${field}_matching` - const handleURLMatchChange = (e: RadioChangeEvent): void => { - sendStep({ ...step, [key]: e.target.value }) + const handleURLMatchChange = (value: string): void => { + sendStep({ ...step, [key]: value }) } const defaultValue = field === 'url' ? StringMatching.Contains : StringMatching.Exact return ( - - matches exactly - matches regex - contains - +
+ +
) } diff --git a/frontend/src/scenes/actions/EventName.tsx b/frontend/src/scenes/actions/EventName.tsx index aadaf08a4cee1..b467e2d044028 100644 --- a/frontend/src/scenes/actions/EventName.tsx +++ b/frontend/src/scenes/actions/EventName.tsx @@ -38,6 +38,7 @@ export function LemonEventName({ renderValue={(v) => (v !== null ? : null)} allowClear={allEventsOption === 'clear'} excludedProperties={allEventsOption !== 'explicit' ? { events: [null] } : undefined} + size="small" /> ) } diff --git a/frontend/src/scenes/actions/actionEditLogic.tsx b/frontend/src/scenes/actions/actionEditLogic.tsx index 396678d57014a..e1763cdaa0d29 100644 --- a/frontend/src/scenes/actions/actionEditLogic.tsx +++ b/frontend/src/scenes/actions/actionEditLogic.tsx @@ -129,8 +129,8 @@ export const actionEditLogic = kea([ })), listeners(({ values, actions }) => ({ - deleteAction: () => { - deleteWithUndo({ + deleteAction: async () => { + await deleteWithUndo({ endpoint: api.actions.determineDeleteEndpoint(), object: values.action, callback: () => { diff --git a/frontend/src/scenes/actions/actionLogic.ts b/frontend/src/scenes/actions/actionLogic.ts index e3781d37902f0..6b47df87c05a7 100644 --- a/frontend/src/scenes/actions/actionLogic.ts +++ b/frontend/src/scenes/actions/actionLogic.ts @@ -4,6 +4,8 @@ import api from 'lib/api' import type { actionLogicType } from './actionLogicType' import { ActionType, Breadcrumb } from '~/types' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from 'scenes/data-management/DataManagementScene' export interface ActionLogicProps { id?: ActionType['id'] @@ -50,14 +52,17 @@ export const actionLogic = kea([ (s) => [s.action], (action): Breadcrumb[] => [ { + key: Scene.DataManagement, name: `Data Management`, path: urls.eventDefinitions(), }, { + key: DataManagementTab.Actions, name: 'Actions', path: urls.actions(), }, { + key: action?.id || 'new', name: action?.name || 'Unnamed', path: action ? urls.action(action.id) : undefined, }, diff --git a/frontend/src/scenes/actions/actionsLogic.ts b/frontend/src/scenes/actions/actionsLogic.ts index bd4d667ae8450..f150c85111a64 100644 --- a/frontend/src/scenes/actions/actionsLogic.ts +++ b/frontend/src/scenes/actions/actionsLogic.ts @@ -9,6 +9,10 @@ import { userLogic } from 'scenes/userLogic' import { subscriptions } from 'kea-subscriptions' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from 'scenes/data-management/DataManagementScene' + +export type ActionsFilterType = 'all' | 'me' export const actionsFuse = new Fuse([], { keys: [{ name: 'name', weight: 2 }, 'description', 'tags'], @@ -30,15 +34,15 @@ export const actionsLogic = kea([ ], })), actions({ - setFilterByMe: (filterByMe: boolean) => ({ filterByMe }), + setFilterType: (filterType: ActionsFilterType) => ({ filterType }), setSearchTerm: (searchTerm: string) => ({ searchTerm }), }), reducers({ - filterByMe: [ - false, + filterType: [ + 'all' as ActionsFilterType, { persist: true }, { - setFilterByMe: (_, { filterByMe }) => filterByMe, + setFilterType: (_, { filterType }) => filterType, }, ], searchTerm: [ @@ -50,13 +54,13 @@ export const actionsLogic = kea([ }), selectors({ actionsFiltered: [ - (s) => [s.actions, s.filterByMe, s.searchTerm, s.user], - (actions, filterByMe, searchTerm, user) => { + (s) => [s.actions, s.filterType, s.searchTerm, s.user], + (actions, filterType, searchTerm, user) => { let data = actions if (searchTerm) { data = actionsFuse.search(searchTerm).map((result) => result.item) } - if (filterByMe) { + if (filterType === 'me') { data = data.filter((item) => item.created_by?.uuid === user?.uuid) } return data @@ -66,10 +70,12 @@ export const actionsLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: `Data Management`, + key: Scene.DataManagement, + name: `Data management`, path: urls.eventDefinitions(), }, { + key: DataManagementTab.Actions, name: 'Actions', path: urls.actions(), }, diff --git a/frontend/src/scenes/appContextLogic.ts b/frontend/src/scenes/appContextLogic.ts index 30a005f3c3808..4386021d2e927 100644 --- a/frontend/src/scenes/appContextLogic.ts +++ b/frontend/src/scenes/appContextLogic.ts @@ -27,7 +27,7 @@ export const appContextLogic = kea([ const preloadedUser = appContext?.current_user if (appContext && preloadedUser) { - api.get('api/users/@me/').then((remoteUser: UserType) => { + void api.get('api/users/@me/').then((remoteUser: UserType) => { if (remoteUser.uuid !== preloadedUser.uuid) { console.error(`Preloaded user ${preloadedUser.uuid} does not match remote user ${remoteUser.uuid}`) Sentry.captureException( diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 38bf3e1dedd7e..ad997bb2635a3 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -23,6 +23,7 @@ export const appScenes: Record any> = { [Scene.PersonsManagement]: () => import('./persons-management/PersonsManagementScene'), [Scene.Person]: () => import('./persons/PersonScene'), [Scene.Pipeline]: () => import('./pipeline/Pipeline'), + [Scene.PipelineApp]: () => import('./pipeline/PipelineApp'), [Scene.Group]: () => import('./groups/Group'), [Scene.Action]: () => import('./actions/Action'), [Scene.Experiments]: () => import('./experiments/Experiments'), @@ -52,7 +53,6 @@ export const appScenes: Record any> = { [Scene.PreflightCheck]: () => import('./PreflightCheck/PreflightCheck'), [Scene.Signup]: () => import('./authentication/signup/SignupContainer'), [Scene.InviteSignup]: () => import('./authentication/InviteSignup'), - [Scene.Ingestion]: () => import('./ingestion/IngestionWizard'), [Scene.Billing]: () => import('./billing/Billing'), [Scene.Apps]: () => import('./plugins/AppsScene'), [Scene.FrontendAppScene]: () => import('./apps/FrontendAppScene'), diff --git a/frontend/src/scenes/apps/AppMetricsScene.tsx b/frontend/src/scenes/apps/AppMetricsScene.tsx index d79c0d160d51e..74b847872f39e 100644 --- a/frontend/src/scenes/apps/AppMetricsScene.tsx +++ b/frontend/src/scenes/apps/AppMetricsScene.tsx @@ -80,6 +80,11 @@ export function AppMetrics(): JSX.Element { label: <>onEvent metrics, content: , }, + showTab(AppMetricsTab.ComposeWebhook) && { + key: AppMetricsTab.ComposeWebhook, + label: <>composeWebhook metrics, + content: , + }, showTab(AppMetricsTab.ExportEvents) && { key: AppMetricsTab.ExportEvents, label: <>exportEvents metrics, diff --git a/frontend/src/scenes/apps/MetricsTab.tsx b/frontend/src/scenes/apps/MetricsTab.tsx index b5761df9d9815..0af2e7984c814 100644 --- a/frontend/src/scenes/apps/MetricsTab.tsx +++ b/frontend/src/scenes/apps/MetricsTab.tsx @@ -39,7 +39,7 @@ export function MetricsTab({ tab }: MetricsTabProps): JSX.Element { setDateFrom(newValue as string)} + onChange={(newValue) => setDateFrom(newValue)} options={[ { label: 'Last 30 days', value: '-30d' }, { label: 'Last 7 days', value: '-7d' }, diff --git a/frontend/src/scenes/apps/appMetricsSceneLogic.ts b/frontend/src/scenes/apps/appMetricsSceneLogic.ts index 9122d93b48db0..1f68e22236849 100644 --- a/frontend/src/scenes/apps/appMetricsSceneLogic.ts +++ b/frontend/src/scenes/apps/appMetricsSceneLogic.ts @@ -12,6 +12,7 @@ import { HISTORICAL_EXPORT_JOB_NAME_V2 } from 'scenes/plugins/edit/interface-job import { interfaceJobsLogic, InterfaceJobsProps } from '../plugins/edit/interface-jobs/interfaceJobsLogic' import { dayjs } from 'lib/dayjs' import { router } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' export interface AppMetricsLogicProps { /** Used as the logic's key */ @@ -28,6 +29,7 @@ export enum AppMetricsTab { Logs = 'logs', ProcessEvent = 'processEvent', OnEvent = 'onEvent', + ComposeWebhook = 'composeWebhook', ExportEvents = 'exportEvents', ScheduledTask = 'scheduledTask', HistoricalExports = 'historical_exports', @@ -36,6 +38,7 @@ export enum AppMetricsTab { export const TabsWithMetrics = [ AppMetricsTab.ProcessEvent, AppMetricsTab.OnEvent, + AppMetricsTab.ComposeWebhook, AppMetricsTab.ExportEvents, AppMetricsTab.ScheduledTask, AppMetricsTab.HistoricalExports, @@ -95,6 +98,7 @@ const DEFAULT_DATE_FROM = '-30d' const INITIAL_TABS: Array = [ AppMetricsTab.ProcessEvent, AppMetricsTab.OnEvent, + AppMetricsTab.ComposeWebhook, AppMetricsTab.ExportEvents, AppMetricsTab.ScheduledTask, ] @@ -194,10 +198,12 @@ export const appMetricsSceneLogic = kea([ (s, p) => [s.pluginConfig, p.pluginConfigId], (pluginConfig, pluginConfigId: number): Breadcrumb[] => [ { + key: Scene.Apps, name: 'Apps', path: urls.projectApps(), }, { + key: pluginConfigId, name: pluginConfig?.plugin_info?.name, path: urls.appMetrics(pluginConfigId), }, diff --git a/frontend/src/scenes/apps/constants.tsx b/frontend/src/scenes/apps/constants.tsx index 4d89a8202eae1..9870936b5cfc5 100644 --- a/frontend/src/scenes/apps/constants.tsx +++ b/frontend/src/scenes/apps/constants.tsx @@ -47,6 +47,26 @@ export const DescriptionColumns: Record = { ), }, + [AppMetricsTab.ComposeWebhook]: { + successes: 'Events processed', + successes_tooltip: ( + <> + These events were successfully processed by the composeWebhook app method on the first try. + + ), + successes_on_retry: 'Events processed on retry', + successes_on_retry_tooltip: ( + <> + These events were successfully processed by the composeWebhook app method after being retried. + + ), + failures: 'Failed events', + failures_tooltip: ( + <> + These events had errors when being processed by the composeWebhook app method. + + ), + }, [AppMetricsTab.ExportEvents]: { successes: 'Events delivered', successes_tooltip: ( diff --git a/frontend/src/scenes/authentication/inviteSignupLogic.ts b/frontend/src/scenes/authentication/inviteSignupLogic.ts index 7632e74ba1373..23f70644c1ef6 100644 --- a/frontend/src/scenes/authentication/inviteSignupLogic.ts +++ b/frontend/src/scenes/authentication/inviteSignupLogic.ts @@ -88,7 +88,7 @@ export const inviteSignupLogic = kea([ first_name: !first_name ? 'Please enter your name' : undefined, }), submit: async (payload, breakpoint) => { - await breakpoint() + breakpoint() if (!values.invite) { return diff --git a/frontend/src/scenes/authentication/login2FALogic.ts b/frontend/src/scenes/authentication/login2FALogic.ts index ed76c57f79e76..796dfc764f764 100644 --- a/frontend/src/scenes/authentication/login2FALogic.ts +++ b/frontend/src/scenes/authentication/login2FALogic.ts @@ -66,7 +66,7 @@ export const login2FALogic = kea([ : null, }), submit: async ({ token }, breakpoint) => { - await breakpoint() + breakpoint() try { return await api.create('api/login/token', { token }) } catch (e) { diff --git a/frontend/src/scenes/authentication/loginLogic.ts b/frontend/src/scenes/authentication/loginLogic.ts index 1b574472e2afb..ca159812bc771 100644 --- a/frontend/src/scenes/authentication/loginLogic.ts +++ b/frontend/src/scenes/authentication/loginLogic.ts @@ -82,7 +82,7 @@ export const loginLogic = kea([ return { status: 'pending' } } - await breakpoint() + breakpoint() const response = await api.create('api/login/precheck', { email }) return { status: 'completed', ...response } }, @@ -101,7 +101,7 @@ export const loginLogic = kea([ : undefined, }), submit: async ({ email, password }, breakpoint) => { - await breakpoint() + breakpoint() try { return await api.create('api/login', { email, password }) } catch (e) { diff --git a/frontend/src/scenes/authentication/passwordResetLogic.ts b/frontend/src/scenes/authentication/passwordResetLogic.ts index b9df684b9cfa9..b4835fcce495e 100644 --- a/frontend/src/scenes/authentication/passwordResetLogic.ts +++ b/frontend/src/scenes/authentication/passwordResetLogic.ts @@ -63,7 +63,7 @@ export const passwordResetLogic = kea([ email: !email ? 'Please enter your email to continue' : undefined, }), submit: async ({ email }, breakpoint) => { - await breakpoint() + breakpoint() try { await api.create('api/reset/', { email }) diff --git a/frontend/src/scenes/authentication/setup2FALogic.ts b/frontend/src/scenes/authentication/setup2FALogic.ts index 22d6acb5b0ec5..b76de829f2cdb 100644 --- a/frontend/src/scenes/authentication/setup2FALogic.ts +++ b/frontend/src/scenes/authentication/setup2FALogic.ts @@ -42,7 +42,7 @@ export const setup2FALogic = kea([ {}, { setup: async (_, breakpoint) => { - await breakpoint() + breakpoint() await api.get('api/users/@me/start_2fa_setup/') return { status: 'completed' } }, @@ -56,7 +56,7 @@ export const setup2FALogic = kea([ token: !token ? 'Please enter a token to continue' : undefined, }), submit: async ({ token }, breakpoint) => { - await breakpoint() + breakpoint() try { return await api.create('api/users/@me/validate_2fa/', { token }) } catch (e) { diff --git a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts index a7a6b798d82d7..3806235fe9939 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts +++ b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts @@ -87,7 +87,7 @@ export const signupLogic = kea([ organization_name: !organization_name ? 'Please enter your organization name' : undefined, }), submit: async (payload, breakpoint) => { - await breakpoint() + breakpoint() try { const res = await api.create('api/signup/', { ...values.signupPanel1, ...payload }) location.href = res.redirect_url || '/' diff --git a/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx b/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx index 7f5a66fd4798d..3f6dfcf661597 100644 --- a/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx +++ b/frontend/src/scenes/batch_exports/BatchExportEditScene.tsx @@ -19,7 +19,7 @@ export function BatchExportsEditScene(): JSX.Element { return ( <> - +
diff --git a/frontend/src/scenes/batch_exports/BatchExports.scss b/frontend/src/scenes/batch_exports/BatchExports.scss index 835ed19de7ec9..507ce76135072 100644 --- a/frontend/src/scenes/batch_exports/BatchExports.scss +++ b/frontend/src/scenes/batch_exports/BatchExports.scss @@ -7,7 +7,7 @@ @keyframes BatchExportRunIcon__pulse { 0% { outline-offset: 0; - outline-color: var(--primary-light); + outline-color: var(--primary-3000-hover); } 80% { diff --git a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts index 5931b444d080d..0fa15c3651b99 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts @@ -19,9 +19,10 @@ import { beforeUnload, router } from 'kea-router' import type { batchExportsEditLogicType } from './batchExportEditLogicType' import { dayjs, Dayjs } from 'lib/dayjs' import { batchExportLogic } from './batchExportLogic' +import { Scene } from 'scenes/sceneTypes' export type BatchExportsEditLogicProps = { - id: string | 'new' + id: string } export type BatchExportConfigurationForm = Omit< @@ -267,22 +268,25 @@ export const batchExportsEditLogic = kea([ (s) => [s.batchExportConfig, s.isNew], (config, isNew): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, ...(isNew ? [ { + key: 'new', name: 'New', }, ] : [ { - name: config?.name ?? 'Loading', + key: config?.id ?? 'loading', + name: config?.name, path: config?.id ? urls.batchExport(config.id) : undefined, }, - { + key: 'edit', name: 'Edit', }, ]), diff --git a/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts index b944ad32f546a..9d9825dea2865 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditSceneLogic.ts @@ -8,6 +8,7 @@ import { batchExportLogic } from './batchExportLogic' import { BatchExportsEditLogicProps } from './batchExportEditLogic' import type { batchExportsEditSceneLogicType } from './batchExportEditSceneLogicType' +import { Scene } from 'scenes/sceneTypes' export const batchExportsEditSceneLogic = kea([ props({} as BatchExportsEditLogicProps), @@ -23,22 +24,25 @@ export const batchExportsEditSceneLogic = kea([ (s) => [s.batchExportConfig, s.id], (config, id): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, ...(id === 'new' ? [ { + key: 'new', name: 'New', }, ] : [ { - name: config?.name ?? 'Loading', + key: config?.id || 'loading', + name: config?.name, path: config?.id ? urls.batchExport(config.id) : undefined, }, - { + key: 'edit', name: 'Edit', }, ]), diff --git a/frontend/src/scenes/batch_exports/batchExportLogic.ts b/frontend/src/scenes/batch_exports/batchExportLogic.ts index 66cf9de3e2bfa..6c0c5ca8fcec1 100644 --- a/frontend/src/scenes/batch_exports/batchExportLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportLogic.ts @@ -11,6 +11,7 @@ import { dayjs, Dayjs } from 'lib/dayjs' import { urls } from 'scenes/urls' import type { batchExportLogicType } from './batchExportLogicType' import { router } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' export type BatchExportLogicProps = { id: string @@ -228,11 +229,13 @@ export const batchExportLogic = kea([ (s) => [s.batchExportConfig], (config): Breadcrumb[] => [ { + key: Scene.BatchExports, name: 'Batch Exports', path: urls.batchExports(), }, { - name: config?.name ?? 'Loading', + key: config?.id || 'loading', + name: config?.name, }, ], ], diff --git a/frontend/src/scenes/batch_exports/utils.ts b/frontend/src/scenes/batch_exports/utils.ts index 16ebf9d3176ef..56c07e26a7ccf 100644 --- a/frontend/src/scenes/batch_exports/utils.ts +++ b/frontend/src/scenes/batch_exports/utils.ts @@ -25,6 +25,10 @@ export function humanizeDestination(destination: BatchExportDestination): string return `postgresql://${destination.config.user}:***@${destination.config.host}:${destination.config.port}/${destination.config.database}` } + if (destination.type === 'Redshift') { + return `redshift://${destination.config.user}:***@${destination.config.host}:${destination.config.port}/${destination.config.database}` + } + if (destination.type === 'BigQuery') { return `bigquery:${destination.config.project_id}:${destination.config.dataset_id}:${destination.config.table_id}` } diff --git a/frontend/src/scenes/billing/PlanTable.tsx b/frontend/src/scenes/billing/PlanTable.tsx index d83611f0609f6..d406171b954e9 100644 --- a/frontend/src/scenes/billing/PlanTable.tsx +++ b/frontend/src/scenes/billing/PlanTable.tsx @@ -201,7 +201,9 @@ export function PlanTable({ redirectPath }: { redirectPath: string }): JSX.Eleme

{plans?.map((plan) => ( - {getProductTiers(plan, product.type)} + + {getProductTiers(plan, product.type)} + ))} )) diff --git a/frontend/src/scenes/billing/billingLogic.ts b/frontend/src/scenes/billing/billingLogic.ts index 4e694d0d115b2..60aa5bf9dc27b 100644 --- a/frontend/src/scenes/billing/billingLogic.ts +++ b/frontend/src/scenes/billing/billingLogic.ts @@ -12,7 +12,6 @@ import { userLogic } from 'scenes/userLogic' import { pluralize } from 'lib/utils' import type { billingLogicType } from './billingLogicType' import { forms } from 'kea-forms' -import { urls } from 'scenes/urls' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' export const ALLOCATION_THRESHOLD_ALERT = 0.85 // Threshold to show warning of event usage near limit @@ -77,9 +76,7 @@ export const billingLogic = kea([ '' as string, { setRedirectPath: () => { - return window.location.pathname.includes('/ingestion') - ? urls.ingestion() + '/billing' - : window.location.pathname.includes('/onboarding') + return window.location.pathname.includes('/onboarding') ? window.location.pathname + window.location.search : '' }, @@ -88,7 +85,7 @@ export const billingLogic = kea([ isOnboarding: [ false, { - setIsOnboarding: () => window.location.pathname.includes('/ingestion'), + setIsOnboarding: () => window.location.pathname.includes('/onboarding'), }, ], }), diff --git a/frontend/src/scenes/cohorts/Cohorts.scss b/frontend/src/scenes/cohorts/Cohorts.scss index 8dd6ee18eeee8..ba03ee05269f4 100644 --- a/frontend/src/scenes/cohorts/Cohorts.scss +++ b/frontend/src/scenes/cohorts/Cohorts.scss @@ -22,7 +22,7 @@ border-radius: 4px !important; &:hover { - border-color: var(--primary-light) !important; + border-color: var(--primary-3000-hover) !important; } .ant-upload-drag-container { diff --git a/frontend/src/scenes/cohorts/cohortEditLogic.test.ts b/frontend/src/scenes/cohorts/cohortEditLogic.test.ts index 37061f8217024..abab2aacfdae5 100644 --- a/frontend/src/scenes/cohorts/cohortEditLogic.test.ts +++ b/frontend/src/scenes/cohorts/cohortEditLogic.test.ts @@ -77,8 +77,8 @@ describe('cohortEditLogic', () => { it('delete cohort', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort(mockCohort) - await logic.actions.deleteCohort() + logic.actions.setCohort(mockCohort) + logic.actions.deleteCohort() }) .toFinishAllListeners() .toDispatchActions(['setCohort', 'deleteCohort', router.actionCreators.push(urls.cohorts())]) @@ -92,7 +92,7 @@ describe('cohortEditLogic', () => { it('save with valid cohort', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -116,7 +116,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }).toDispatchActions(['setCohort', 'submitCohort', 'submitCohortSuccess']) expect(api.update).toBeCalledTimes(1) }) @@ -124,11 +124,11 @@ describe('cohortEditLogic', () => { it('do not save with invalid name', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, name: '', }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }).toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) expect(api.update).toBeCalledTimes(0) }) @@ -137,7 +137,7 @@ describe('cohortEditLogic', () => { it('do not save on OR operator', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -171,7 +171,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -199,7 +199,7 @@ describe('cohortEditLogic', () => { it('do not save on less than one positive matching criteria', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -225,7 +225,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -252,7 +252,7 @@ describe('cohortEditLogic', () => { it('do not save on criteria cancelling each other out', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -286,7 +286,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -317,7 +317,7 @@ describe('cohortEditLogic', () => { it('do not save on invalid lower and upper bound period values - perform event regularly', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -346,7 +346,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -376,7 +376,7 @@ describe('cohortEditLogic', () => { it('do not save on invalid lower and upper bound period values - perform events in sequence', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -403,7 +403,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -435,7 +435,7 @@ describe('cohortEditLogic', () => { it(`${key} row missing all required fields`, async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, filters: { properties: { @@ -461,7 +461,7 @@ describe('cohortEditLogic', () => { }, }, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }) .toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) .toMatchValues({ @@ -496,13 +496,13 @@ describe('cohortEditLogic', () => { it('can save existing static cohort with empty csv', async () => { await initCohortLogic({ id: 1 }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, is_static: true, groups: [], csv: undefined, }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }).toDispatchActions(['setCohort', 'submitCohort', 'submitCohortSuccess']) expect(api.update).toBeCalledTimes(1) }) @@ -510,14 +510,14 @@ describe('cohortEditLogic', () => { it('do not save static cohort with empty csv', async () => { await initCohortLogic({ id: 'new' }) await expectLogic(logic, async () => { - await logic.actions.setCohort({ + logic.actions.setCohort({ ...mockCohort, is_static: true, groups: [], csv: undefined, id: 'new', }) - await logic.actions.submitCohort() + logic.actions.submitCohort() }).toDispatchActions(['setCohort', 'submitCohort', 'submitCohortFailure']) expect(api.update).toBeCalledTimes(0) }) diff --git a/frontend/src/scenes/cohorts/cohortEditLogic.ts b/frontend/src/scenes/cohorts/cohortEditLogic.ts index 0b53d5ce25e6a..6c836e0a32887 100644 --- a/frontend/src/scenes/cohorts/cohortEditLogic.ts +++ b/frontend/src/scenes/cohorts/cohortEditLogic.ts @@ -74,7 +74,7 @@ export const cohortEditLogic = kea([ reducers(({ props, selectors }) => ({ cohort: [ - NEW_COHORT as CohortType, + NEW_COHORT, { setOuterGroupsType: (state, { type }) => ({ ...state, @@ -212,7 +212,7 @@ export const cohortEditLogic = kea([ loaders(({ actions, values, key }) => ({ cohort: [ - NEW_COHORT as CohortType, + NEW_COHORT, { setCohort: ({ cohort }) => processCohort(cohort), fetchCohort: async ({ id }, breakpoint) => { @@ -316,9 +316,15 @@ export const cohortEditLogic = kea([ cohortsModel.findMounted()?.actions.deleteCohort({ id: values.cohort.id, name: values.cohort.name }) router.actions.push(urls.cohorts()) }, + submitCohort: () => { + if (values.cohortHasErrors) { + lemonToast.error('There was an error submiting this cohort. Make sure the cohort filters are correct.') + } + }, checkIfFinishedCalculating: async ({ cohort }, breakpoint) => { if (cohort.is_calculating) { actions.setPollTimeout( + // eslint-disable-next-line @typescript-eslint/no-misused-promises window.setTimeout(async () => { const newCohort = await api.cohorts.get(cohort.id) breakpoint() diff --git a/frontend/src/scenes/cohorts/cohortSceneLogic.ts b/frontend/src/scenes/cohorts/cohortSceneLogic.ts index 52ea69b500b72..1af9d5cf7adfc 100644 --- a/frontend/src/scenes/cohorts/cohortSceneLogic.ts +++ b/frontend/src/scenes/cohorts/cohortSceneLogic.ts @@ -5,6 +5,7 @@ import { cohortsModel } from '~/models/cohortsModel' import { CohortLogicProps } from './cohortEditLogic' import type { cohortSceneLogicType } from './cohortSceneLogicType' +import { Scene } from 'scenes/sceneTypes' export const cohortSceneLogic = kea([ props({} as CohortLogicProps), @@ -13,15 +14,22 @@ export const cohortSceneLogic = kea([ selectors({ breadcrumbs: [ - () => [cohortsModel.selectors.cohortsById, (_, props) => props.id], + () => [cohortsModel.selectors.cohortsById, (_, props) => props.id as CohortLogicProps['id']], (cohortsById, cohortId): Breadcrumb[] => { return [ { + key: Scene.PersonsManagement, + name: 'People', + path: urls.persons(), + }, + { + key: 'cohorts', name: 'Cohorts', path: urls.cohorts(), }, { - name: cohortId !== 'new' ? cohortsById[cohortId]?.name || 'Untitled' : 'Untitled', + key: cohortId || 'loading', + name: cohortId && cohortId !== 'new' ? cohortsById[cohortId]?.name || 'Untitled' : 'Untitled', }, ] }, diff --git a/frontend/src/scenes/dashboard/DashboardHeader.tsx b/frontend/src/scenes/dashboard/DashboardHeader.tsx index 806b6a3ada25d..113479b97d1ee 100644 --- a/frontend/src/scenes/dashboard/DashboardHeader.tsx +++ b/frontend/src/scenes/dashboard/DashboardHeader.tsx @@ -364,14 +364,14 @@ export function DashboardHeader(): JSX.Element | null { onChange={(_, tags) => triggerDashboardUpdate({ tags })} saving={dashboardLoading} tagsAvailable={tags.filter((tag) => !dashboard.tags?.includes(tag))} - className="insight-metadata-tags" + className="mt-2" /> ) : dashboard.tags.length ? ( ) : null} diff --git a/frontend/src/scenes/dashboard/DashboardItems.scss b/frontend/src/scenes/dashboard/DashboardItems.scss index 7cf51ba05a33f..8bd848c3b2705 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.scss +++ b/frontend/src/scenes/dashboard/DashboardItems.scss @@ -41,8 +41,8 @@ transition: 100ms ease; max-width: 100%; position: relative; - border: 1px solid var(--primary); - outline: 1px solid var(--primary); + border: 1px solid var(--primary-3000); + outline: 1px solid var(--primary-3000); border-radius: var(--radius); z-index: 2; user-select: none; diff --git a/frontend/src/scenes/dashboard/dashboardLogic.test.ts b/frontend/src/scenes/dashboard/dashboardLogic.test.ts index eb04138096c8f..af0243b011440 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.test.ts +++ b/frontend/src/scenes/dashboard/dashboardLogic.test.ts @@ -1,4 +1,3 @@ -/* eslint-disable @typescript-eslint/no-non-null-assertion */ // let tiles assert an insight is present in tests i.e. `tile!.insight` when it must be present for tests to pass import { expectLogic, truth } from 'kea-test-utils' import { initKeaTests } from '~/test/init' @@ -33,9 +32,9 @@ export function insightOnDashboard( insight: Partial = {} ): InsightModel { const tiles = dashboardJson.tiles.filter((tile) => !!tile.insight && tile.insight?.id === insightId) - let tile = dashboardJson.tiles[0] as DashboardTile + let tile = dashboardJson.tiles[0] if (tiles.length) { - tile = tiles[0] as DashboardTile + tile = tiles[0] } if (!tile.insight) { throw new Error('tile has no insight') @@ -218,7 +217,7 @@ describe('dashboardLogic', () => { const fromIndex = from.tiles.findIndex( (tile) => !!tile.insight && tile.insight.id === tileToUpdate.insight.id ) - const removedTile = from.tiles.splice(fromIndex, 1)[0] as DashboardTile + const removedTile = from.tiles.splice(fromIndex, 1)[0] // update the insight const insightId = tileToUpdate.insight.id @@ -354,7 +353,7 @@ describe('dashboardLogic', () => { const startingDashboard = dashboards['9'] const tiles = startingDashboard.tiles - const sourceTile = tiles[0] as DashboardTile + const sourceTile = tiles[0] await expectLogic(logic) .toFinishAllListeners() @@ -530,11 +529,11 @@ describe('dashboardLogic', () => { ]) .toMatchValues({ refreshStatus: { - [(dashboards['5'].tiles[0] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[0].insight!.short_id]: { loading: true, timer: expect.anything(), }, - [(dashboards['5'].tiles[1] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[1].insight!.short_id]: { loading: true, timer: expect.anything(), }, @@ -548,29 +547,21 @@ describe('dashboardLogic', () => { // and updates the action in the model (a) => a.type === dashboardsModel.actionTypes.updateDashboardInsight && - a.payload.insight.short_id === - (dashboards['5'].tiles[1] as DashboardTile).insight!.short_id, + a.payload.insight.short_id === dashboards['5'].tiles[1].insight!.short_id, (a) => a.type === dashboardsModel.actionTypes.updateDashboardInsight && - a.payload.insight.short_id === - (dashboards['5'].tiles[0] as DashboardTile).insight!.short_id, + a.payload.insight.short_id === dashboards['5'].tiles[0].insight!.short_id, // no longer reloading - logic.actionCreators.setRefreshStatus( - (dashboards['5'].tiles[0] as DashboardTile).insight!.short_id, - false - ), - logic.actionCreators.setRefreshStatus( - (dashboards['5'].tiles[1] as DashboardTile).insight!.short_id, - false - ), + logic.actionCreators.setRefreshStatus(dashboards['5'].tiles[0].insight!.short_id, false), + logic.actionCreators.setRefreshStatus(dashboards['5'].tiles[1].insight!.short_id, false), ]) .toMatchValues({ refreshStatus: { - [(dashboards['5'].tiles[0] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[0].insight!.short_id]: { refreshed: true, timer: expect.anything(), }, - [(dashboards['5'].tiles[1] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[1].insight!.short_id]: { refreshed: true, timer: expect.anything(), }, @@ -585,21 +576,18 @@ describe('dashboardLogic', () => { it('reloads selected items', async () => { await expectLogic(logic, () => { logic.actions.refreshAllDashboardItems({ - tiles: [dashboards['5'].tiles[0] as DashboardTile], + tiles: [dashboards['5'].tiles[0]], action: 'refresh_manual', }) }) .toFinishAllListeners() .toDispatchActions([ 'refreshAllDashboardItems', - logic.actionCreators.setRefreshStatuses( - [(dashboards['5'].tiles[0] as DashboardTile).insight!.short_id], - true - ), + logic.actionCreators.setRefreshStatuses([dashboards['5'].tiles[0].insight!.short_id], true), ]) .toMatchValues({ refreshStatus: { - [(dashboards['5'].tiles[0] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[0].insight!.short_id]: { loading: true, timer: expect.anything(), }, @@ -612,16 +600,12 @@ describe('dashboardLogic', () => { .toDispatchActionsInAnyOrder([ (a) => a.type === dashboardsModel.actionTypes.updateDashboardInsight && - a.payload.insight.short_id === - (dashboards['5'].tiles[0] as DashboardTile).insight!.short_id, - logic.actionCreators.setRefreshStatus( - (dashboards['5'].tiles[0] as DashboardTile).insight!.short_id, - false - ), + a.payload.insight.short_id === dashboards['5'].tiles[0].insight!.short_id, + logic.actionCreators.setRefreshStatus(dashboards['5'].tiles[0].insight!.short_id, false), ]) .toMatchValues({ refreshStatus: { - [(dashboards['5'].tiles[0] as DashboardTile).insight!.short_id]: { + [dashboards['5'].tiles[0].insight!.short_id]: { refreshed: true, timer: expect.anything(), }, @@ -859,4 +843,3 @@ describe('dashboardLogic', () => { ).toEqual([]) }) }) -/* eslint-enable @typescript-eslint/no-non-null-assertion */ diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 5b05ba3edf664..0ceb6c33ee07a 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -53,6 +53,7 @@ import { getResponseBytes, sortDates } from '../insights/utils' import { loaders } from 'kea-loaders' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { calculateLayouts } from 'scenes/dashboard/tileLayouts' +import { Scene } from 'scenes/sceneTypes' export const BREAKPOINTS: Record = { sm: 1024, @@ -231,7 +232,7 @@ export const dashboardLogic = kea([ filters: values.filters, }) } catch (e) { - lemonToast.error('Could not update dashboardFilters: ' + e) + lemonToast.error('Could not update dashboardFilters: ' + String(e)) return values.dashboard } }, @@ -265,7 +266,7 @@ export const dashboardLogic = kea([ tiles: values.tiles.filter((t) => t.id !== tile.id), } as DashboardType } catch (e) { - lemonToast.error('Could not remove tile from dashboard: ' + e) + lemonToast.error('Could not remove tile from dashboard: ' + String(e)) return values.dashboard } }, @@ -280,7 +281,7 @@ export const dashboardLogic = kea([ tiles: [newTile], } as Partial) } catch (e) { - lemonToast.error('Could not duplicate tile: ' + e) + lemonToast.error('Could not duplicate tile: ' + String(e)) return values.dashboard } }, @@ -464,7 +465,7 @@ export const dashboardLogic = kea([ tiles[tileIndex] = { ...tiles[tileIndex], insight: { - ...((tiles[tileIndex] as DashboardTile).insight as InsightModel), + ...(tiles[tileIndex].insight as InsightModel), name: item.name, last_modified_at: item.last_modified_at, }, @@ -734,11 +735,22 @@ export const dashboardLogic = kea([ (s) => [s.dashboard], (dashboard): Breadcrumb[] => [ { + key: Scene.Dashboards, name: 'Dashboards', path: urls.dashboards(), }, { + key: dashboard?.id || 'new', name: dashboard?.id ? dashboard.name || 'Unnamed' : null, + onRename: async (name) => { + if (dashboard) { + await dashboardsModel.asyncActions.updateDashboard({ + id: dashboard.id, + name, + allowUndo: true, + }) + } + }, }, ], ], @@ -961,7 +973,7 @@ export const dashboardLogic = kea([ ) actions.setRefreshStatus(insight.short_id) - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { type: 'insight_load', context: 'dashboard', primary_interaction_id: dashboardQueryId, @@ -1000,13 +1012,13 @@ export const dashboardLogic = kea([ insights_fetched: insights.length, insights_fetched_cached: 0, } - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { ...payload, is_primary_interaction: !initialLoad, }) if (initialLoad) { const { startTime, responseBytes } = values.dashboardLoadTimerData - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { ...payload, action: 'initial_load_full', time_to_see_data_ms: Math.floor(performance.now() - startTime), @@ -1017,9 +1029,13 @@ export const dashboardLogic = kea([ } }) - function loadNextPromise(): void { + async function loadNextPromise(): Promise { if (!cancelled && fetchItemFunctions.length > 0) { - fetchItemFunctions.shift()?.().then(loadNextPromise) + const nextPromise = fetchItemFunctions.shift() + if (nextPromise) { + await nextPromise() + await loadNextPromise() + } } } @@ -1066,7 +1082,7 @@ export const dashboardLogic = kea([ } }, loadDashboardItemsSuccess: function (...args) { - sharedListeners.reportLoadTiming(...args) + void sharedListeners.reportLoadTiming(...args) const dashboard = values.dashboard as DashboardType const { action, dashboardQueryId, startTime, responseBytes } = values.dashboardLoadTimerData @@ -1123,9 +1139,9 @@ export const dashboardLogic = kea([ is_primary_interaction: !initialLoad, } - captureTimeToSeeData(values.currentTeamId, payload) + void captureTimeToSeeData(values.currentTeamId, payload) if (initialLoad && allLoaded) { - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { ...payload, action: 'initial_load_full', is_primary_interaction: true, diff --git a/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.ts b/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.ts index 015a792e1b3af..321d7f9a5cd4d 100644 --- a/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.ts +++ b/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.ts @@ -47,7 +47,7 @@ export const dashboardsLogic = kea([ ], filters: [ - DEFAULT_FILTERS as DashboardsFilters, + DEFAULT_FILTERS, { setFilters: (state, { filters }) => objectClean({ diff --git a/frontend/src/scenes/data-management/DataManagementScene.tsx b/frontend/src/scenes/data-management/DataManagementScene.tsx index 0513de6e11e5e..5a3bf879ea225 100644 --- a/frontend/src/scenes/data-management/DataManagementScene.tsx +++ b/frontend/src/scenes/data-management/DataManagementScene.tsx @@ -9,7 +9,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs' import React from 'react' -import { SceneExport } from 'scenes/sceneTypes' +import { Scene, SceneExport } from 'scenes/sceneTypes' import { PageHeader } from 'lib/components/PageHeader' import { NewActionButton } from 'scenes/actions/NewActionButton' import { Annotations } from 'scenes/annotations' @@ -96,7 +96,7 @@ const tabs: Record< }, [DataManagementTab.IngestionWarnings]: { url: urls.ingestionWarnings(), - label: 'Ingestion Warnings', + label: 'Ingestion warnings', content: , }, [DataManagementTab.Database]: { @@ -135,10 +135,12 @@ const dataManagementSceneLogic = kea([ (tab): Breadcrumb[] => { return [ { + key: Scene.DataManagement, name: `Data Management`, path: tabs.events.url, }, { + key: tab, name: capitalizeFirstLetter(tab), path: tabs[tab].url, }, diff --git a/frontend/src/scenes/data-management/actions/ActionsTable.tsx b/frontend/src/scenes/data-management/actions/ActionsTable.tsx index aa9bf6aad9876..d7255f347ba9f 100644 --- a/frontend/src/scenes/data-management/actions/ActionsTable.tsx +++ b/frontend/src/scenes/data-management/actions/ActionsTable.tsx @@ -1,5 +1,4 @@ import { Link } from 'lib/lemon-ui/Link' -import { Radio } from 'antd' import { deleteWithUndo, stripHTTP } from 'lib/utils' import { useActions, useValues } from 'kea' import { actionsModel } from '~/models/actionsModel' @@ -17,7 +16,7 @@ import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { More } from 'lib/lemon-ui/LemonButton/More' import { combineUrl } from 'kea-router' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' -import { LemonInput } from '@posthog/lemon-ui' +import { LemonInput, LemonSegmentedButton } from '@posthog/lemon-ui' import { actionsLogic } from 'scenes/actions/actionsLogic' import { IconCheckmark, IconPlayCircle } from 'lib/lemon-ui/icons' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' @@ -28,9 +27,9 @@ export function ActionsTable(): JSX.Element { const { actionsLoading } = useValues(actionsModel({ params: 'include_count=1' })) const { loadActions } = useActions(actionsModel) - const { filterByMe, searchTerm, actionsFiltered, shouldShowProductIntroduction, shouldShowEmptyState } = + const { filterType, searchTerm, actionsFiltered, shouldShowProductIntroduction, shouldShowEmptyState } = useValues(actionsLogic) - const { setFilterByMe, setSearchTerm } = useActions(actionsLogic) + const { setFilterType, setSearchTerm } = useActions(actionsLogic) const { hasAvailableFeature } = useValues(userLogic) const { updateHasSeenProductIntroFor } = useActions(userLogic) @@ -202,7 +201,7 @@ export function ActionsTable(): JSX.Element { - deleteWithUndo({ + void deleteWithUndo({ endpoint: api.actions.determineDeleteEndpoint(), object: action, callback: loadActions, @@ -237,7 +236,7 @@ export function ActionsTable(): JSX.Element { } /> )} - {(shouldShowEmptyState && filterByMe) || !shouldShowEmptyState ? ( + {(shouldShowEmptyState && filterType === 'me') || !shouldShowEmptyState ? (
- setFilterByMe(e.target.value)}> - All actions - My actions - +
) : null} - {(!shouldShowEmptyState || filterByMe) && ( + {(!shouldShowEmptyState || filterType === 'me') && ( <> ([ (definition, isEvent): Breadcrumb[] => { return [ { + key: Scene.DataManagement, name: `Data Management`, path: isEvent ? urls.eventDefinitions() : urls.propertyDefinitions(), }, { + key: isEvent ? DataManagementTab.EventDefinitions : DataManagementTab.PropertyDefinitions, name: isEvent ? 'Events' : 'Properties', path: isEvent ? urls.eventDefinitions() : urls.propertyDefinitions(), }, { + key: definition?.id || 'new', name: definition?.id !== 'new' ? getPropertyLabel(definition?.name) || 'Untitled' : 'Untitled', }, ] diff --git a/frontend/src/scenes/data-management/events/DefinitionHeader.tsx b/frontend/src/scenes/data-management/events/DefinitionHeader.tsx index cc34e2f364f9e..8edb6f208faab 100644 --- a/frontend/src/scenes/data-management/events/DefinitionHeader.tsx +++ b/frontend/src/scenes/data-management/events/DefinitionHeader.tsx @@ -1,19 +1,10 @@ import { EventDefinition, PropertyDefinition } from '~/types' -import { - IconAutocapture, - IconPageleave, - IconPreview, - PropertyIcon, - IconUnverifiedEvent, - IconVerifiedEvent, - VerifiedPropertyIcon, - IconSelectAll, -} from 'lib/lemon-ui/icons' +import { IconSelectAll } from 'lib/lemon-ui/icons' +import { IconBadge, IconBolt, IconCursor, IconEye, IconLeave, IconList, IconLogomark } from '@posthog/icons' import { getKeyMapping, KEY_MAPPING } from 'lib/taxonomy' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { TaxonomicFilterGroup, TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import clsx from 'clsx' import { Link } from 'lib/lemon-ui/Link' import { urls } from 'scenes/urls' import { @@ -25,20 +16,20 @@ export function getPropertyDefinitionIcon(definition: PropertyDefinition): JSX.E if (KEY_MAPPING.event[definition.name]) { return ( - + ) } if (definition.verified) { return ( - + ) } return ( - + ) } @@ -47,29 +38,32 @@ export function getEventDefinitionIcon(definition: EventDefinition & { value: st // Rest are events if (definition.name === '$pageview' || definition.name === '$screen') { return ( - - + + ) } if (definition.name === '$pageleave') { return ( - + ) } if (definition.name === '$autocapture') { + return + } + if (definition.name && definition.verified) { return ( - - + + ) } - if (definition.name && (definition.verified || !!KEY_MAPPING.event[definition.name])) { + if (definition.name && !!KEY_MAPPING.event[definition.name]) { return ( - - + + ) } @@ -81,8 +75,8 @@ export function getEventDefinitionIcon(definition: EventDefinition & { value: st ) } return ( - - + + ) } @@ -108,7 +102,7 @@ function RawDefinitionHeader({ const isLink = asLink && fullDetailUrl const innerContent = ( - + ) @@ -127,7 +121,27 @@ function RawDefinitionHeader({ {!hideIcon && icon &&
{icon}
} {!hideText && (
-
{linkedInnerContent}
+
+ {linkedInnerContent} + {definition.verified && ( + <> + + + + + )} + {!!KEY_MAPPING.event[definition.name] && ( + + + + )} +
{description ?
{description}
: null}
)} diff --git a/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss b/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss index 6f510e589a9c7..9cc22ad4acc2a 100644 --- a/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss +++ b/frontend/src/scenes/data-management/events/EventDefinitionsTable.scss @@ -1,6 +1,7 @@ .events-definition-table { .LemonTable__content > table > tbody { td.definition-column-icon { + padding-right: 0.5rem; width: 36px; .definition-column-name-icon { @@ -8,7 +9,7 @@ align-items: center; justify-content: center; width: 30px; - font-size: 1.5rem; + font-size: 1.2rem; svg.taxonomy-icon { flex-shrink: 0; @@ -37,16 +38,15 @@ justify-content: center; .definition-column-name-content-title { + align-items: center; + display: flex; font-weight: 600; - cursor: pointer; - position: relative; + gap: 0.25rem; overflow: visible; + position: relative; - &::before { - content: ''; - position: absolute; - inset: -5px -50px -5px -10px; - height: 22px; + svg { + color: var(--success); } } } diff --git a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts index 58ce1a766852c..af7c766e1889f 100644 --- a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts +++ b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.ts @@ -103,7 +103,7 @@ export const eventDefinitionsTableLogic = kea([ }), reducers({ filters: [ - cleanFilters({}) as Filters, + cleanFilters({}), { setFilters: (state, { filters }) => ({ ...state, diff --git a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts index c3509561e1a42..c5ff0132e7a13 100644 --- a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts +++ b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts @@ -8,6 +8,8 @@ import type { ingestionWarningsLogicType } from './ingestionWarningsLogicType' import { teamLogic } from '../../teamLogic' import { range } from 'lib/utils' import { dayjs, dayjsUtcToTimezone } from 'lib/dayjs' +import { Scene } from 'scenes/sceneTypes' +import { DataManagementTab } from '../DataManagementScene' export interface IngestionWarningSummary { type: string @@ -47,11 +49,13 @@ export const ingestionWarningsLogic = kea([ (): Breadcrumb[] => { return [ { - name: `Data Management`, + key: Scene.DataManagement, + name: `Data management`, path: urls.eventDefinitions(), }, { - name: 'Ingestion Warnings', + key: DataManagementTab.IngestionWarnings, + name: 'Ingestion warnings', path: urls.ingestionWarnings(), }, ] diff --git a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss index 50b3d7bfb9394..68dfef351b3c0 100644 --- a/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss +++ b/frontend/src/scenes/data-management/properties/PropertyDefinitionsTable.scss @@ -1,6 +1,7 @@ .event-properties-definition-table { .LemonTable__content > table > tbody { td.definition-column-icon { + padding-right: 0.5rem; width: 36px; .definition-column-name-icon { @@ -8,6 +9,7 @@ align-items: center; justify-content: center; width: 30px; + font-size: 1.2rem; svg.taxonomy-icon { flex-shrink: 0; @@ -32,16 +34,14 @@ justify-content: center; .definition-column-name-content-title { - font-weight: 600; + align-items: center; cursor: pointer; + display: flex; + font-weight: 600; + gap: 0.25rem; - &::before { - content: ''; - position: absolute; - top: -5px; - left: -10px; - right: -50px; - height: 22px; + svg { + color: var(--success); } } } diff --git a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts index f83419217e84b..3aa493789acb8 100644 --- a/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts +++ b/frontend/src/scenes/data-management/properties/propertyDefinitionsTableLogic.ts @@ -193,7 +193,7 @@ export const propertyDefinitionsTableLogic = kea { const [type, index] = propertyType.split('::') actions.setFilters({ - type: type as string, + type: type, group_type_index: index ? +index : null, }) }, diff --git a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx index 7f0a7044b150c..168b307e29935 100644 --- a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx +++ b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx @@ -45,7 +45,7 @@ export function DataWarehouseTablesContainer(): JSX.Element { { - deleteWithUndo({ + void deleteWithUndo({ endpoint: `projects/${currentTeamId}/warehouse_tables`, object: { name: warehouseTable.name, id: warehouseTable.id }, callback: loadDataWarehouse, diff --git a/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx b/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx index 910336cef0de6..7ca0685b35192 100644 --- a/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new_table/dataWarehouseTableLogic.tsx @@ -10,9 +10,11 @@ import { DataTableNode } from '~/queries/schema' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import type { dataWarehouseTableLogicType } from './dataWarehouseTableLogicType' import { dataWarehouseSceneLogic } from '../external/dataWarehouseSceneLogic' +import { Scene } from 'scenes/sceneTypes' export interface TableLogicProps { - id: string | 'new' + /** A UUID or 'new'. */ + id: string } const NEW_WAREHOUSE_TABLE: DataWarehouseTable = { @@ -99,10 +101,12 @@ export const dataWarehouseTableLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: `Data Warehouse`, + key: Scene.DataWarehouse, + name: `Data warehouse`, path: urls.dataWarehouseExternal(), }, { + key: 'new', name: 'New', }, ], diff --git a/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx b/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx index 236486c58dd6d..c45ec66c3431b 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/DataWarehouseSavedQueriesContainer.tsx @@ -67,7 +67,7 @@ export function DataWarehouseSavedQueriesContainer(): JSX.Element { { - deleteWithUndo({ + void deleteWithUndo({ endpoint: `projects/${currentTeamId}/warehouse_saved_queries`, object: { name: warehouseView.name, id: warehouseView.id }, callback: loadDataWarehouseSavedQueries, diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts index a7c40b36401b3..2941effa9d151 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts @@ -5,6 +5,7 @@ import { loaders } from 'kea-loaders' import api, { PaginatedResponse } from 'lib/api' import { ExternalDataStripeSource, Breadcrumb } from '~/types' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export interface DataWarehouseSource {} @@ -49,10 +50,12 @@ export const dataWarehouseSettingsLogic = kea([ () => [], (): Breadcrumb[] => [ { + key: Scene.DataWarehouse, name: `Data Warehouse`, path: urls.dataWarehouseExternal(), }, { + key: Scene.DataWarehouseSettings, name: 'Data Warehouse Settings', path: urls.dataWarehouseSettings(), }, diff --git a/frontend/src/scenes/debug/HogQLDebug.tsx b/frontend/src/scenes/debug/HogQLDebug.tsx index 21cc7b0b13104..4ef2a6656bede 100644 --- a/frontend/src/scenes/debug/HogQLDebug.tsx +++ b/frontend/src/scenes/debug/HogQLDebug.tsx @@ -80,7 +80,25 @@ export function HogQLDebug({ query, setQuery, queryKey }: HogQLDebugProps): JSX. } value={query.modifiers?.inCohortVia ?? response?.modifiers?.inCohortVia} /> - {' '} + + + Materialization Mode: + + setQuery({ + ...query, + modifiers: { ...query.modifiers, materializationMode: value }, + } as HogQLQuery) + } + value={query.modifiers?.materializationMode ?? response?.modifiers?.materializationMode} + /> +
{dataLoading ? ( <> diff --git a/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts b/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts index df5aabcc583b9..0ea2885e60c3f 100644 --- a/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts +++ b/frontend/src/scenes/early-access-features/earlyAccessFeatureLogic.ts @@ -15,6 +15,7 @@ import type { earlyAccessFeatureLogicType } from './earlyAccessFeatureLogicType' import { earlyAccessFeaturesLogic } from './earlyAccessFeaturesLogic' import { teamLogic } from 'scenes/teamLogic' import { lemonToast } from '@posthog/lemon-ui' +import { Scene } from 'scenes/sceneTypes' export const NEW_EARLY_ACCESS_FEATURE: NewEarlyAccessFeatureType = { name: '', @@ -121,10 +122,14 @@ export const earlyAccessFeatureLogic = kea([ (s) => [s.earlyAccessFeature], (earlyAccessFeature: EarlyAccessFeatureType): Breadcrumb[] => [ { + key: Scene.EarlyAccessFeatures, name: 'Early Access Management', path: urls.earlyAccessFeatures(), }, - ...(earlyAccessFeature?.name ? [{ name: earlyAccessFeature.name }] : []), + { + key: earlyAccessFeature.id || 'new', + name: earlyAccessFeature.name, + }, ], ], }), @@ -170,9 +175,9 @@ export const earlyAccessFeatureLogic = kea([ } }, })), - afterMount(async ({ props, actions }) => { + afterMount(({ props, actions }) => { if (props.id !== 'new') { - await actions.loadEarlyAccessFeature() + actions.loadEarlyAccessFeature() } }), ]) diff --git a/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts b/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts index 707a5df337acc..b7032217494b4 100644 --- a/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts +++ b/frontend/src/scenes/early-access-features/earlyAccessFeaturesLogic.ts @@ -5,6 +5,7 @@ import { Breadcrumb, EarlyAccessFeatureType } from '~/types' import type { earlyAccessFeaturesLogicType } from './earlyAccessFeaturesLogicType' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export const earlyAccessFeaturesLogic = kea([ path(['scenes', 'features', 'featuresLogic']), @@ -22,13 +23,14 @@ export const earlyAccessFeaturesLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: 'Early Access Management', + key: Scene.EarlyAccessFeatures, + name: 'Early access features', path: urls.earlyAccessFeatures(), }, ], ], }), - afterMount(async ({ actions }) => { - await actions.loadEarlyAccessFeatures() + afterMount(({ actions }) => { + actions.loadEarlyAccessFeatures() }), ]) diff --git a/frontend/src/scenes/events/Events.tsx b/frontend/src/scenes/events/Events.tsx index 164363a7b35cc..aa30ee553c381 100644 --- a/frontend/src/scenes/events/Events.tsx +++ b/frontend/src/scenes/events/Events.tsx @@ -14,7 +14,7 @@ export const scene: SceneExport = { export function Events(): JSX.Element { return ( <> - +
diff --git a/frontend/src/scenes/experiments/Experiment.tsx b/frontend/src/scenes/experiments/Experiment.tsx index 381aa91a8e51d..ae2a525b7d99e 100644 --- a/frontend/src/scenes/experiments/Experiment.tsx +++ b/frontend/src/scenes/experiments/Experiment.tsx @@ -250,7 +250,7 @@ export function Experiment(): JSX.Element { name={['parameters', 'feature_flag_variants', index]} >
- - {experiment.feature_flag?.key} - + {experiment.feature_flag && ( + + {experiment.feature_flag.key} + + )} diff --git a/frontend/src/scenes/experiments/Experiments.tsx b/frontend/src/scenes/experiments/Experiments.tsx index 5595ba60f5bca..ee702a4b77451 100644 --- a/frontend/src/scenes/experiments/Experiments.tsx +++ b/frontend/src/scenes/experiments/Experiments.tsx @@ -41,7 +41,7 @@ export function Experiments(): JSX.Element { const { hasAvailableFeature } = useValues(userLogic) const EXPERIMENTS_PRODUCT_DESCRIPTION = - 'Experiments help you test changes to your product to see which changes will lead to optimal results. Automatic statistical calculations let you see if the results are valid or if they are likely just a chance occurrence.' + 'A/B testing help you test changes to your product to see which changes will lead to optimal results. Automatic statistical calculations let you see if the results are valid or if they are likely just a chance occurrence.' const getExperimentDuration = (experiment: Experiment): number | undefined => { return experiment.end_date @@ -144,7 +144,7 @@ export function Experiments(): JSX.Element { return (
Experiments
} + title={
A/B testing
} buttons={ hasAvailableFeature(AvailableFeature.EXPERIMENTATION) ? ( @@ -154,14 +154,13 @@ export function Experiments(): JSX.Element { } caption={ <> - Check out our {' '} - Experimentation user guide + Visit the guide {' '} to learn more. @@ -182,7 +181,7 @@ export function Experiments(): JSX.Element { {(shouldShowEmptyState || shouldShowProductIntroduction) && (tab === ExperimentsTabs.Archived ? ( ) : ( diff --git a/frontend/src/scenes/experiments/SecondaryMetrics.tsx b/frontend/src/scenes/experiments/SecondaryMetrics.tsx index 75d22a10d19ec..9c22022c5efa4 100644 --- a/frontend/src/scenes/experiments/SecondaryMetrics.tsx +++ b/frontend/src/scenes/experiments/SecondaryMetrics.tsx @@ -1,4 +1,3 @@ -import { Col, Row } from 'antd' import { useActions, useValues } from 'kea' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { Form } from 'kea-forms' @@ -163,11 +162,11 @@ export function SecondaryMetrics({ {experimentId == 'new' || editingExistingExperiment ? ( - - +
+
{metrics.map((metric, idx) => ( - - +
+
{metric.name}
@@ -185,7 +184,7 @@ export function SecondaryMetrics({ onClick={() => deleteMetric(idx)} />
- +
{metric.filters.insight === InsightType.FUNNELS && ( )} -
+
))} {metrics && !(metrics.length > 2) && ( - -
- - Add metric - -
- +
+ + Add metric + +
)} - - +
+
) : ( <>
Secondary metrics
diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 6f28ea14006b9..7b314e42f979a 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -41,6 +41,7 @@ import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' import { InsightVizNode } from '~/queries/schema' import { groupsModel } from '~/models/groupsModel' +import { Scene } from 'scenes/sceneTypes' export const DEFAULT_DURATION = 14 // days @@ -634,10 +635,12 @@ export const experimentLogic = kea([ (s) => [s.experiment, s.experimentId], (experiment, experimentId): Breadcrumb[] => [ { + key: Scene.Experiments, name: 'Experiments', path: urls.experiments(), }, { + key: experimentId, name: experiment?.name || 'New', path: urls.experiment(experimentId || 'new'), }, diff --git a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts b/frontend/src/scenes/experiments/secondaryMetricsLogic.ts index 9687b9e3b7627..a746107873357 100644 --- a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts +++ b/frontend/src/scenes/experiments/secondaryMetricsLogic.ts @@ -121,7 +121,7 @@ export const secondaryMetricsLogic = kea([ })), forms(({ props }) => ({ secondaryMetricModal: { - defaults: defaultFormValuesGenerator(props.defaultAggregationType) as SecondaryMetricForm, + defaults: defaultFormValuesGenerator(props.defaultAggregationType), errors: () => ({}), submit: async () => { // We don't use the form submit anymore diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.scss b/frontend/src/scenes/feature-flags/FeatureFlag.scss index 730181e4ad083..0b72506c172cb 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.scss +++ b/frontend/src/scenes/feature-flags/FeatureFlag.scss @@ -1,4 +1,4 @@ -.variant-form-list { +.VariantFormList { font-size: 13px; border: 1px solid var(--border); border-radius: var(--radius); @@ -19,6 +19,10 @@ align-items: center; } } + + .VariantFormList__row { + grid-template-columns: repeat(24, minmax(0, 1fr)); + } } .feature-flag-property-display { @@ -61,6 +65,6 @@ .FeatureConditionCard { .FeatureConditionCard--border--highlight { - border-color: var(--primary); + border-color: var(--primary-3000); } } diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 1118c87a2b31d..9944499e472fa 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react' import { Form, Group } from 'kea-forms' -import { Row, Col, Radio, Popconfirm, Skeleton, Card } from 'antd' +import { Radio, Popconfirm, Skeleton, Card } from 'antd' import { useActions, useValues } from 'kea' import { alphabet, capitalizeFirstLetter } from 'lib/utils' import { featureFlagLogic } from './featureFlagLogic' @@ -78,7 +78,7 @@ export const scene: SceneExport = { function focusVariantKeyField(index: number): void { setTimeout( - () => document.querySelector(`.variant-form-list input[data-key-index="${index}"]`)?.focus(), + () => document.querySelector(`.VariantFormList input[data-key-index="${index}"]`)?.focus(), 50 ) } @@ -332,7 +332,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { tagsAvailable={tags.filter( (tag) => !featureFlag.tags?.includes(tag) )} - className="insight-metadata-tags" + className="mt-2" /> ) }} @@ -510,14 +510,14 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { tagsAvailable={tags.filter( (tag) => !featureFlag.tags?.includes(tag) )} - className="insight-metadata-tags" + className="mt-2" /> ) : featureFlag.tags.length ? ( ) : null} @@ -732,17 +732,17 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { <>

Variant keys

- - Key - Description - Payload - Rollout - +
+
Key
+
Description
+
Payload
+
Rollout
+
{variants.map((variant, index) => (
- - +
+
{variant.key} - - +
+
{variant.name || 'There is no description for this variant key'} - - +
+
{featureFlag.filters.payloads?.[index] ? ( )} - - {variant.rollout_percentage}% - +
+
{variant.rollout_percentage}%
+
{index !== variants.length - 1 && }
))} @@ -874,24 +874,22 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { No payload associated with this flag ) ) : ( - - -
- Specify a payload to be returned when the served value is{' '} - - true - -
- - - - - - -
+
+
+ Specify a payload to be returned when the served value is{' '} + + true + +
+ + + + + +
)}
)} @@ -899,33 +897,33 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {

Variant keys

The rollout percentage of feature flag variants must add up to 100% -
- - - Variant key - Description - -
+
+
+
+
Variant key
+
Description
+
+
Payload - + Specify return payload when the variant key matches
- - - Rollout +
+
+ Rollout (Redistribute) - - +
+
{variants.map((variant, index) => ( - - +
+
- - +
+
- - +
+
- - +
+
{({ value, onChange }) => { return ( @@ -966,8 +964,8 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { ) }} - - +
+
{({ value, onChange }) => (
@@ -1012,28 +1010,25 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {
)}
- - - - {variants.length > 1 && ( - } - status="primary-alt" - data-attr={`delete-prop-filter-${index}`} - noPadding - onClick={() => removeVariant(index)} - disabledReason={ - featureFlag.experiment_set && - featureFlag.experiment_set?.length > 0 - ? 'Cannot delete variants from a feature flag that is part of an experiment' - : undefined - } - tooltipPlacement="topRight" - /> - )} - - - +
+
+ {variants.length > 1 && ( + } + status="primary-alt" + data-attr={`delete-prop-filter-${index}`} + noPadding + onClick={() => removeVariant(index)} + disabledReason={ + featureFlag.experiment_set && featureFlag.experiment_set?.length > 0 + ? 'Cannot delete variants from a feature flag that is part of an experiment' + : undefined + } + tooltipPlacement="topRight" + /> + )} +
+
))} {variants.length > 0 && !areVariantRolloutsValid && ( diff --git a/frontend/src/scenes/feature-flags/FeatureFlagAutoRollout.tsx b/frontend/src/scenes/feature-flags/FeatureFlagAutoRollout.tsx index 93d251e7e4aa4..256e75c53dd60 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagAutoRollout.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagAutoRollout.tsx @@ -168,8 +168,8 @@ export function FeatureFlagAutoRollback({ readOnly }: FeatureFlagAutoRollbackPro
{sentryErrorCount ? ( - {humanFriendlyNumber(sentryErrorCount as number)} sentry - errors in the past 24 hours.{' '} + {humanFriendlyNumber(sentryErrorCount)} Sentry errors in the + past 24 hours.{' '} ) : ( @@ -204,7 +204,7 @@ export function FeatureFlagAutoRollback({ readOnly }: FeatureFlagAutoRollbackPro {humanFriendlyNumber( Math.round( - (sentryErrorCount as number) * + sentryErrorCount * (1 + (featureFlag.rollback_conditions[index] .threshold || 0) / diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx index 2178b250c9dbc..b7f21572aca70 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx @@ -1,4 +1,4 @@ -import { Row, Col, InputNumber, Select } from 'antd' +import { InputNumber, Select } from 'antd' import { useActions, useValues } from 'kea' import { capitalizeFirstLetter, humanFriendlyNumber } from 'lib/utils' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' @@ -75,7 +75,7 @@ export function FeatureFlagReleaseConditions({ const renderReleaseConditionGroup = (group: FeatureFlagGroupType, index: number): JSX.Element => { return ( - +
{index > 0 &&
OR
}
@@ -244,7 +244,7 @@ export function FeatureFlagReleaseConditions({ { - updateConditionSet(index, value as number) + updateConditionSet(index, value) }} value={group.rollout_percentage != null ? group.rollout_percentage : 100} min={0} @@ -314,7 +314,7 @@ export function FeatureFlagReleaseConditions({ )}
- +
) } @@ -327,7 +327,7 @@ export function FeatureFlagReleaseConditions({ const hasMatchingEarlyAccessFeature = featureFlag.features?.find((f: any) => f.flagKey === featureFlag.key) return ( - +
{index > 0 &&
OR
}
@@ -386,7 +386,7 @@ export function FeatureFlagReleaseConditions({
- +
) } @@ -456,11 +456,11 @@ export function FeatureFlagReleaseConditions({
)}
- +
{filterGroups.map((group, index) => isSuper ? renderSuperReleaseConditionGroup(group, index) : renderReleaseConditionGroup(group, index) )} - +
{!readOnly && ( }> Add condition set diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.tsx index b021107a84379..55ed1d2af5636 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.tsx @@ -158,8 +158,8 @@ export function OverViewTab({ <> { - await copyToClipboard(featureFlag.key, 'feature flag key') + onClick={() => { + void copyToClipboard(featureFlag.key, 'feature flag key') }} fullWidth > @@ -210,7 +210,7 @@ export function OverViewTab({ { - deleteWithUndo({ + void deleteWithUndo({ endpoint: `projects/${currentTeamId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: loadFeatureFlags, diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index aa331b35cbc12..3a232a5a25947 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -45,6 +45,7 @@ import { dashboardsLogic } from 'scenes/dashboard/dashboards/dashboardsLogic' import { organizationLogic } from '../organizationLogic' import { NEW_EARLY_ACCESS_FEATURE } from 'scenes/early-access-features/earlyAccessFeatureLogic' import { NEW_SURVEY, NewSurvey } from 'scenes/surveys/constants' +import { Scene } from 'scenes/sceneTypes' const getDefaultRollbackCondition = (): FeatureFlagRollbackConditions => ({ operator: 'gt', @@ -638,7 +639,7 @@ export const featureFlagLogic = kea([ actions.editFeatureFlag(false) }, deleteFeatureFlag: async ({ featureFlag }) => { - deleteWithUndo({ + await deleteWithUndo({ endpoint: `projects/${values.currentTeamId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: () => { @@ -779,7 +780,9 @@ export const featureFlagLogic = kea([ : 'copied' lemonToast.success(`Feature flag ${operation} successfully!`) } else { - lemonToast.error(`Error while saving feature flag: ${featureFlagCopy?.failed || featureFlagCopy}`) + lemonToast.error( + `Error while saving feature flag: ${JSON.stringify(featureFlagCopy?.failed) || featureFlagCopy}` + ) } actions.loadProjectsWithCurrentFlag() @@ -834,10 +837,11 @@ export const featureFlagLogic = kea([ (s) => [s.featureFlag], (featureFlag): Breadcrumb[] => [ { + key: Scene.FeatureFlags, name: 'Feature Flags', path: urls.featureFlags(), }, - ...(featureFlag ? [{ name: featureFlag.key || 'Unnamed' }] : []), + { key: featureFlag.id || 'unknown', name: featureFlag.key || 'Unnamed' }, ], ], propertySelectErrors: [ diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index e88e418635a2c..da81fca11890e 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -8,6 +8,7 @@ import { teamLogic } from '../teamLogic' import { urls } from 'scenes/urls' import { router, actionToUrl, urlToAction } from 'kea-router' import { LemonSelectOption } from 'lib/lemon-ui/LemonSelect' +import { Scene } from 'scenes/sceneTypes' export enum FeatureFlagsTab { OVERVIEW = 'overview', @@ -154,7 +155,8 @@ export const featureFlagsLogic = kea([ () => [], (): Breadcrumb[] => [ { - name: 'Feature Flags', + key: Scene.FeatureFlags, + name: 'Feature flags', path: urls.featureFlags(), }, ], diff --git a/frontend/src/scenes/feedback/inAppFeedbackLogic.ts b/frontend/src/scenes/feedback/inAppFeedbackLogic.ts index 8af2d525e9433..d6183c4b98d27 100644 --- a/frontend/src/scenes/feedback/inAppFeedbackLogic.ts +++ b/frontend/src/scenes/feedback/inAppFeedbackLogic.ts @@ -61,7 +61,7 @@ export const inAppFeedbackLogic = kea([ }, ], dataTableQuery: [ - DEFAULT_DATATABLE_QUERY as DataTableNode, + DEFAULT_DATATABLE_QUERY, { setDataTableQuery: (_, { query }) => { if (query.kind === NodeKind.DataTableNode) { @@ -74,7 +74,7 @@ export const inAppFeedbackLogic = kea([ }, ], trendQuery: [ - DEFAULT_TREND_INSIGHT_VIZ_NODE as InsightVizNode, + DEFAULT_TREND_INSIGHT_VIZ_NODE, { setDataTableQuery: (_, { query }) => { if (query.kind === NodeKind.DataTableNode) { @@ -114,7 +114,7 @@ export const inAppFeedbackLogic = kea([ event: eventName, orderBy: ['-timestamp'], }) - return response.results as EventType[] + return response.results }, }, ], diff --git a/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss b/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss index 7f2e0024ca851..ad4a376864a61 100644 --- a/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss +++ b/frontend/src/scenes/funnels/FunnelBarGraph/FunnelBarGraph.scss @@ -120,7 +120,7 @@ $glyph_height: 23px; // Based on .funnel-step-glyph .funnel-bar { position: relative; height: 100%; - background: var(--funnel-default); + background: var(--primary-3000); transition: width 0.2s ease, height 0.2s ease; &.first { @@ -148,7 +148,7 @@ $glyph_height: 23px; // Based on .funnel-step-glyph &.outside { left: calc(100% + #{$label_position_offset}); - color: var(--funnel-default); + color: var(--primary-3000); } } } diff --git a/frontend/src/scenes/funnels/FunnelStepMore.tsx b/frontend/src/scenes/funnels/FunnelStepMore.tsx index 722f1d54aeb14..0381afb70ad7c 100644 --- a/frontend/src/scenes/funnels/FunnelStepMore.tsx +++ b/frontend/src/scenes/funnels/FunnelStepMore.tsx @@ -15,7 +15,7 @@ type FunnelStepMoreProps = { export function FunnelStepMore({ stepIndex }: FunnelStepMoreProps): JSX.Element | null { const { insightProps } = useValues(insightLogic) const { querySource } = useValues(funnelDataLogic(insightProps)) - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const filterProps = cleanFilters(queryNodeToFilter(querySource!)) const aggregationGroupTypeIndex = querySource?.aggregation_group_type_index diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts index 024c41ac1e0d1..16b0c8e35924d 100644 --- a/frontend/src/scenes/funnels/funnelDataLogic.ts +++ b/frontend/src/scenes/funnels/funnelDataLogic.ts @@ -2,7 +2,6 @@ import { kea, path, props, key, connect, selectors, actions, reducers } from 'ke import { FunnelResultType, FunnelVizType, - FunnelStep, FunnelStepReference, FunnelStepWithNestedBreakdown, InsightLogicProps, @@ -164,7 +163,7 @@ export const funnelDataLogic = kea([ : breakdown?.breakdown ?? undefined return aggregateBreakdownResult(results, breakdownProperty).sort((a, b) => a.order - b.order) } - return (results as FunnelStep[]).sort((a, b) => a.order - b.order) + return results.sort((a, b) => a.order - b.order) } else { return [] } diff --git a/frontend/src/scenes/funnels/funnelUtils.ts b/frontend/src/scenes/funnels/funnelUtils.ts index 8dfc6a0539e73..dcfd68f91470f 100644 --- a/frontend/src/scenes/funnels/funnelUtils.ts +++ b/frontend/src/scenes/funnels/funnelUtils.ts @@ -600,7 +600,7 @@ export const parseDisplayNameForCorrelation = ( first_value = autoCaptureEventToDescription({ ...record.event, event: '$autocapture', - }) as string + }) return { first_value, second_value } } else { // FunnelCorrelationResultsType.EventWithProperties diff --git a/frontend/src/scenes/groups/groupLogic.ts b/frontend/src/scenes/groups/groupLogic.ts index dce2cfdc6e04b..bbdc071690c73 100644 --- a/frontend/src/scenes/groups/groupLogic.ts +++ b/frontend/src/scenes/groups/groupLogic.ts @@ -15,6 +15,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' import { loaders } from 'kea-loaders' import { urlToAction } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' function getGroupEventsQuery(groupTypeIndex: number, groupKey: string): DataTableNode { return { @@ -104,10 +105,17 @@ export const groupLogic = kea([ (s, p) => [s.groupTypeName, p.groupTypeIndex, p.groupKey, s.groupData], (groupTypeName, groupTypeIndex, groupKey, groupData): Breadcrumb[] => [ { + key: Scene.DataManagement, + name: 'People', + path: urls.persons(), + }, + { + key: groupTypeIndex, name: capitalizeFirstLetter(groupTypeName), path: urls.groups(String(groupTypeIndex)), }, { + key: `${groupTypeIndex}-${groupKey}`, name: groupDisplayId(groupKey, groupData?.group_properties || {}), path: urls.group(String(groupTypeIndex), groupKey), }, diff --git a/frontend/src/scenes/ingestion/CardContainer.tsx b/frontend/src/scenes/ingestion/CardContainer.tsx deleted file mode 100644 index ed26c428d8101..0000000000000 --- a/frontend/src/scenes/ingestion/CardContainer.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { PanelFooter } from './panels/PanelComponents' -import './panels/Panels.scss' -import { IngestionState } from 'scenes/ingestion/ingestionLogic' - -export function CardContainer({ - children, - nextProps, - onContinue, - finalStep = false, - showInviteTeamMembers = true, -}: { - children: React.ReactNode - nextProps?: Partial - onContinue?: () => void - finalStep?: boolean - showInviteTeamMembers?: boolean -}): JSX.Element { - return ( - // We want a forced width for this view only - // eslint-disable-next-line react/forbid-dom-props -
- {children} -
- {nextProps && ( - - )} -
-
- ) -} diff --git a/frontend/src/scenes/ingestion/IngestionInviteMembersButton.tsx b/frontend/src/scenes/ingestion/IngestionInviteMembersButton.tsx deleted file mode 100644 index 439390bd83164..0000000000000 --- a/frontend/src/scenes/ingestion/IngestionInviteMembersButton.tsx +++ /dev/null @@ -1,26 +0,0 @@ -import { LemonButton } from '@posthog/lemon-ui' -import { useActions } from 'kea' -import { IconArrowRight } from 'lib/lemon-ui/icons' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { inviteLogic } from 'scenes/settings/organization/inviteLogic' - -export function IngestionInviteMembersButton(): JSX.Element { - const { showInviteModal } = useActions(inviteLogic) - const { reportInviteMembersButtonClicked } = useActions(eventUsageLogic) - - return ( - } - className="mt-6" - onClick={() => { - showInviteModal() - reportInviteMembersButtonClicked() - }} - > - Invite a team member to help with this step - - ) -} diff --git a/frontend/src/scenes/ingestion/IngestionWizard.scss b/frontend/src/scenes/ingestion/IngestionWizard.scss deleted file mode 100644 index e1d2b422bdf7c..0000000000000 --- a/frontend/src/scenes/ingestion/IngestionWizard.scss +++ /dev/null @@ -1,77 +0,0 @@ -.IngestionContainer { - display: flex; - height: 100%; - align-items: center; - justify-content: center; - padding: 2rem; - flex-direction: column; - width: 100%; -} - -.IngestionContent { - .BridgePage__content { - max-width: 700px; - } -} - -.IngestionTopbar { - border-bottom: 1px solid var(--border); - padding: 0.25rem 1rem; - display: flex; - justify-content: space-between; - position: sticky; - top: 0; - background-color: white; - width: 100%; - z-index: 10; - - .help-button { - margin-right: 1rem; - } -} - -.platform-item { - margin-right: 10px; - padding: 10px; - padding-left: 20px; - padding-right: 20px; - border: 1px solid gray; - border-radius: 2px; - cursor: pointer; -} - -.platform-item:hover { - background-color: gainsboro; -} - -.selectable-item:hover { - background-color: gainsboro; - cursor: pointer; -} - -.IngestionSidebar__bottom { - margin-top: auto; - - .popover { - padding-left: 0.5rem; - padding-right: 0.5rem; - } -} - -.IngestionSidebar__help { - display: flex; - flex-direction: column; - font-weight: 500; - color: var(--primary); - margin-top: 1rem; -} - -.IngestionSidebar__steps { - color: var(--muted-alt); - font-size: 14px; - - .LemonButton { - font-weight: 600; - margin-bottom: 0.5rem; - } -} diff --git a/frontend/src/scenes/ingestion/IngestionWizard.tsx b/frontend/src/scenes/ingestion/IngestionWizard.tsx deleted file mode 100644 index 80c8d8c8690a2..0000000000000 --- a/frontend/src/scenes/ingestion/IngestionWizard.tsx +++ /dev/null @@ -1,94 +0,0 @@ -import { useEffect } from 'react' -import './IngestionWizard.scss' - -import { VerificationPanel } from 'scenes/ingestion/panels/VerificationPanel' -import { InstructionsPanel } from 'scenes/ingestion/panels/InstructionsPanel' -import { useValues, useActions } from 'kea' -import { ingestionLogic, INGESTION_VIEWS } from 'scenes/ingestion/ingestionLogic' -import { FrameworkPanel } from 'scenes/ingestion/panels/FrameworkPanel' -import { PlatformPanel } from 'scenes/ingestion/panels/PlatformPanel' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { GeneratingDemoDataPanel } from './panels/GeneratingDemoDataPanel' -import { ThirdPartyPanel } from './panels/ThirdPartyPanel' -import { BillingPanel } from './panels/BillingPanel' -import { Sidebar } from './Sidebar' -import { InviteModal } from 'scenes/settings/organization/InviteModal' -import { inviteLogic } from 'scenes/settings/organization/inviteLogic' -import { Logo } from '~/toolbar/assets/Logo' -import { SitePopover } from '~/layout/navigation/TopBar/SitePopover' -import { HelpButton } from 'lib/components/HelpButton/HelpButton' -import { BridgePage } from 'lib/components/BridgePage/BridgePage' -import { PanelHeader } from './panels/PanelComponents' -import { InviteTeamPanel } from './panels/InviteTeamPanel' -import { TeamInvitedPanel } from './panels/TeamInvitedPanel' -import { NoDemoIngestionPanel } from './panels/NoDemoIngestionPanel' -import { SuperpowersPanel } from 'scenes/ingestion/panels/SuperpowersPanel' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { FEATURE_FLAGS } from 'lib/constants' -import { router } from 'kea-router' -import { urls } from 'scenes/urls' - -export function IngestionWizard(): JSX.Element { - const { currentView, platform } = useValues(ingestionLogic) - const { reportIngestionLandingSeen } = useActions(eventUsageLogic) - const { featureFlags } = useValues(featureFlagLogic) - - useEffect(() => { - if (!platform) { - reportIngestionLandingSeen() - } - }, [platform]) - - if (featureFlags[FEATURE_FLAGS.PRODUCT_SPECIFIC_ONBOARDING] === 'test') { - router.actions.replace(urls.products()) - } - - return ( - - {currentView === INGESTION_VIEWS.BILLING && } - {currentView === INGESTION_VIEWS.SUPERPOWERS && } - {currentView === INGESTION_VIEWS.INVITE_TEAM && } - {currentView === INGESTION_VIEWS.TEAM_INVITED && } - {currentView === INGESTION_VIEWS.CHOOSE_PLATFORM && } - {currentView === INGESTION_VIEWS.CHOOSE_FRAMEWORK && } - {currentView === INGESTION_VIEWS.WEB_INSTRUCTIONS && } - {currentView === INGESTION_VIEWS.VERIFICATION && } - {currentView === INGESTION_VIEWS.GENERATING_DEMO_DATA && } - {currentView === INGESTION_VIEWS.CHOOSE_THIRD_PARTY && } - {currentView === INGESTION_VIEWS.NO_DEMO_INGESTION && } - - ) -} - -function IngestionContainer({ children }: { children: React.ReactNode }): JSX.Element { - const { isInviteModalShown } = useValues(inviteLogic) - const { hideInviteModal } = useActions(inviteLogic) - const { isSmallScreen } = useValues(ingestionLogic) - - return ( -
-
- -
- - -
-
-
- {!isSmallScreen && } - {/*
} - className="IngestionContent h-full" - fullScreen={false} - > - {children} - -
- -
- ) -} diff --git a/frontend/src/scenes/ingestion/Sidebar.tsx b/frontend/src/scenes/ingestion/Sidebar.tsx deleted file mode 100644 index b3f656cee12e7..0000000000000 --- a/frontend/src/scenes/ingestion/Sidebar.tsx +++ /dev/null @@ -1,95 +0,0 @@ -import { ingestionLogic } from './ingestionLogic' -import { useActions, useValues } from 'kea' -import './IngestionWizard.scss' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { LemonButton, LemonButtonWithDropdown } from 'lib/lemon-ui/LemonButton' -import { IconArticle, IconQuestionAnswer } from 'lib/lemon-ui/icons' -import { HelpType } from '~/types' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { ProjectSwitcherOverlay } from '~/layout/navigation/ProjectSwitcher' -import { Lettermark } from 'lib/lemon-ui/Lettermark' -import { organizationLogic } from 'scenes/organizationLogic' -import { Link } from '@posthog/lemon-ui' - -const HELP_UTM_TAGS = '?utm_medium=in-product-onboarding&utm_campaign=help-button-sidebar' - -export function Sidebar(): JSX.Element { - const { currentStep, sidebarSteps, isProjectSwitcherShown } = useValues(ingestionLogic) - const { sidebarStepClick, toggleProjectSwitcher, hideProjectSwitcher } = useActions(ingestionLogic) - const { reportIngestionHelpClicked, reportIngestionSidebarButtonClicked } = useActions(eventUsageLogic) - const { currentOrganization } = useValues(organizationLogic) - - const currentIndex = sidebarSteps.findIndex((x) => x === currentStep) - - return ( -
-
-
- {sidebarSteps.map((step: string, index: number) => ( - currentIndex} - onClick={() => { - sidebarStepClick(step) - reportIngestionSidebarButtonClicked(step) - }} - > - {step} - - ))} -
-
- {currentOrganization?.teams && currentOrganization.teams.length > 1 && ( - <> - } - onClick={() => toggleProjectSwitcher()} - dropdown={{ - visible: isProjectSwitcherShown, - onClickOutside: hideProjectSwitcher, - overlay: , - actionable: true, - placement: 'top-end', - }} - type="secondary" - fullWidth - > - Switch project - - - - )} -
- - } - fullWidth - onClick={() => { - reportIngestionHelpClicked(HelpType.Slack) - }} - > - Get support on Slack - - - - } - fullWidth - onClick={() => { - reportIngestionHelpClicked(HelpType.Docs) - }} - > - Read our documentation - - -
-
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/constants.tsx b/frontend/src/scenes/ingestion/constants.tsx deleted file mode 100644 index 50fefffadbf4a..0000000000000 --- a/frontend/src/scenes/ingestion/constants.tsx +++ /dev/null @@ -1,87 +0,0 @@ -import { PlatformType } from 'scenes/ingestion/types' -import { Segment, RSS } from './panels/ThirdPartyIcons' - -export const TECHNICAL = 'TECHNICAL' -export const PLATFORM_TYPE = 'PLATFORM_TYPE' -export const FRAMEWORK = 'FRAMEWORK' -export const INSTRUCTIONS = 'INSTRUCTIONS' -export const VERIFICATION = 'VERIFICATION' - -export const WEB = 'web' -export const MOBILE = 'mobile' -export const BACKEND = 'backend' -export const THIRD_PARTY = 'third-party' -export const platforms: PlatformType[] = [WEB, MOBILE, BACKEND] - -export const NODEJS = 'NODEJS' -export const GO = 'GO' -export const RUBY = 'RUBY' -export const PYTHON = 'PYTHON' -export const PHP = 'PHP' -export const ELIXIR = 'ELIXIR' -export const API = 'API' - -export const ANDROID = 'ANDROID' -export const IOS = 'IOS' -export const REACT_NATIVE = 'REACT_NATIVE' -export const FLUTTER = 'FLUTTER' - -export const httpFrameworks = { - [API]: 'HTTP API', -} -export const webFrameworks = { - [NODEJS]: 'Node.js', - [GO]: 'Go', - [RUBY]: 'Ruby', - [PYTHON]: 'Python', - [PHP]: 'PHP', - [ELIXIR]: 'Elixir', -} - -export const mobileFrameworks = { - [ANDROID]: 'Android', - [IOS]: 'iOS', - [REACT_NATIVE]: 'React Native', - [FLUTTER]: 'Flutter', -} - -export const allFrameworks = { - ...webFrameworks, - ...mobileFrameworks, - ...httpFrameworks, -} -export interface ThirdPartySource { - name: string - icon: JSX.Element - docsLink: string - aboutLink: string - labels?: string[] - description?: string -} - -export const thirdPartySources: ThirdPartySource[] = [ - { - name: 'Segment', - icon: , - docsLink: 'https://posthog.com/docs/integrate/third-party/segment', - aboutLink: 'https://segment.com', - }, - { - name: 'Rudderstack', - icon: ( - - ), - docsLink: 'https://posthog.com/docs/integrate/third-party/rudderstack', - aboutLink: 'https://rudderstack.com', - }, - { - name: 'RSS items', - description: 'Send events from releases, blog posts, status pages, or any other RSS feed into PostHog', - icon: , - docsLink: 'https://posthog.com/tutorials/rss-item-capture', - aboutLink: 'https://en.wikipedia.org/wiki/RSS', - }, -] diff --git a/frontend/src/scenes/ingestion/frameworks/APIInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/APIInstructions.tsx deleted file mode 100644 index 3ac66f7281952..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/APIInstructions.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function APISnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - const url = window.location.origin - - return ( - - {'POST ' + - url + - '/capture/\nContent-Type: application/json\n\n{\n\t"api_key": "' + - currentTeam?.api_token + - '",\n\t"event": "[event name]",\n\t"properties": {\n\t\t"distinct_id": "[your users\' distinct id]",\n\t\t"key1": "value1",\n\t\t"key2": "value2"\n\t},\n\t"timestamp": "[optional timestamp in ISO 8601 format]"\n}'} - - ) -} - -export function APIInstructions(): JSX.Element { - return ( - <> -

Usage

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/AndroidInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/AndroidInstructions.tsx deleted file mode 100644 index 1e7d262f36640..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/AndroidInstructions.tsx +++ /dev/null @@ -1,57 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function AndroidInstallSnippet(): JSX.Element { - return ( - - {`dependencies { - implementation 'com.posthog.android:posthog:1.+' -}`} - - ) -} - -function AndroidSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`public class SampleApp extends Application { - private static final String POSTHOG_API_KEY = "${currentTeam?.api_token}"; - private static final String POSTHOG_HOST = "${window.location.origin}"; - - @Override - public void onCreate() { - // Create a PostHog client with the given context, API key and host - PostHog posthog = new PostHog.Builder(this, POSTHOG_API_KEY, POSTHOG_HOST) - .captureApplicationLifecycleEvents() // Record certain application events automatically! - .recordScreenViews() // Record screen views automatically! - .build(); - - // Set the initialized instance as a globally accessible instance - PostHog.setSingletonInstance(posthog); - - // Now any time you call PostHog.with, the custom instance will be returned - PostHog posthog = PostHog.with(this); - }`} - - ) -} - -function AndroidCaptureSnippet(): JSX.Element { - return PostHog.with(this).capture("test-event"); -} - -export function AndroidInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/ElixirInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/ElixirInstructions.tsx deleted file mode 100644 index 7d476a6630db0..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/ElixirInstructions.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function ElixirInstallSnippet(): JSX.Element { - return ( - - {'def deps do\n [\n {:posthog, "~> 0.1"}\n ]\nend'} - - ) -} - -function ElixirSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - const url = window.location.origin - - return ( - - {'config :posthog,\n api_url: "' + url + '",\n api_key: "' + currentTeam?.api_token + '"'} - - ) -} - -export function ElixirInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/FlutterInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/FlutterInstructions.tsx deleted file mode 100644 index 46d496917a9b9..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/FlutterInstructions.tsx +++ /dev/null @@ -1,64 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function FlutterInstallSnippet(): JSX.Element { - return {'posthog_flutter: # insert version number'} -} - -function FlutterCaptureSnippet(): JSX.Element { - return ( - - { - "import 'package:posthog_flutter/posthog_flutter.dart';\n\nPosthog().screen(\n\tscreenName: 'Example Screen',\n);" - } - - ) -} - -function FlutterAndroidSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - const url = window.location.origin - - return ( - - {'\n\t\n\t\t[...]\n\t\n\t\n\t\n\t\n\t\n'} - - ) -} - -function FlutterIOSSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - const url = window.location.origin - - return ( - - {'\n\t[...]\n\tcom.posthog.posthog.API_KEY\n\t' + - currentTeam?.api_token + - '\n\tcom.posthog.posthog.POSTHOG_HOST\n\t' + - url + - '\n\tcom.posthog.posthog.TRACK_APPLICATION_LIFECYCLE_EVENTS\n\t\n\t[...]\n'} - - ) -} - -export function FlutterInstructions(): JSX.Element { - return ( - <> -

Install

- -

Android Setup

-

{'Add these values in AndroidManifest.xml'}

- -

iOS Setup

-

{'Add these values in Info.plist'}

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/GoInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/GoInstructions.tsx deleted file mode 100644 index 5dc33e57499c4..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/GoInstructions.tsx +++ /dev/null @@ -1,45 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function GoInstallSnippet(): JSX.Element { - return {'go get "github.com/posthog/posthog-go"'} -} - -function GoSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`package main -import ( - "github.com/posthog/posthog-go" -) -func main() { - client, _ := posthog.NewWithConfig("${currentTeam?.api_token}", posthog.Config{Endpoint: "${window.location.origin}"}) - defer client.Close() -}`} - - ) -} - -function GoCaptureSnippet(): JSX.Element { - return ( - - {'client.Enqueue(posthog.Capture{\n DistinctId: "test-user",\n Event: "test-snippet",\n})'} - - ) -} - -export function GoInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/NodeInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/NodeInstructions.tsx deleted file mode 100644 index 46551b2627d32..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/NodeInstructions.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function NodeInstallSnippet(): JSX.Element { - return ( - - {`npm install posthog-node -# OR -yarn add posthog-node -# OR -pnpm add posthog-node`} - - ) -} - -function NodeSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`import { PostHog } from 'posthog-node' - -const client = new PostHog( - '${currentTeam?.api_token}', - { host: '${window.location.origin}' } -)`} - - ) -} - -function NodeCaptureSnippet(): JSX.Element { - return ( - - {`client.capture({ - distinctId: 'test-id', - event: 'test-event' -}) - -// Send queued events immediately. Use for example in a serverless environment -// where the program may terminate before everything is sent -client.flush()`} - - ) -} - -export function NodeInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/PHPInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/PHPInstructions.tsx deleted file mode 100644 index c9dc1665ab977..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/PHPInstructions.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function PHPConfigSnippet(): JSX.Element { - return ( - - {`{ - "require": { - "posthog/posthog-php": "1.0.*" - } -}`} - - ) -} - -function PHPInstallSnippet(): JSX.Element { - return {'php composer.phar install'} -} - -function PHPSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`PostHog::init('${currentTeam?.api_token}', - array('host' => '${window.location.origin}') -);`} - - ) -} - -function PHPCaptureSnippet(): JSX.Element { - return ( - - {"PostHog::capture(array(\n 'distinctId' => 'test-user',\n 'event' => 'test-event'\n));"} - - ) -} - -export function PHPInstructions(): JSX.Element { - return ( - <> -

Dependency Setup

- -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/PythonInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/PythonInstructions.tsx deleted file mode 100644 index 0d596f57e50e1..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/PythonInstructions.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function PythonInstallSnippet(): JSX.Element { - return {'pip install posthog'} -} - -function PythonSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`from posthog import Posthog - -posthog = Posthog(project_api_key='${currentTeam?.api_token}', host='${window.location.origin}') - - `} - - ) -} - -function PythonCaptureSnippet(): JSX.Element { - return {"posthog.capture('test-id', 'test-event')"} -} - -export function PythonInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/ReactNativeInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/ReactNativeInstructions.tsx deleted file mode 100644 index 76298cc842821..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/ReactNativeInstructions.tsx +++ /dev/null @@ -1,64 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' -import { Link } from '@posthog/lemon-ui' - -export function RNInstructions(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - const url = window.location.origin - - return ( - <> -

Install

- - {`# Expo apps -expo install posthog-react-native expo-file-system expo-application expo-device expo-localization - -# Standard React Native apps -yarn add posthog-react-native @react-native-async-storage/async-storage react-native-device-info -# or -npm i -s posthog-react-native @react-native-async-storage/async-storage react-native-device-info - -# for iOS -cd ios -pod install`} - -

Configure

-

- PostHog is most easily used via the PostHogProvider component but if you need to - instantiate it directly,{' '} - - check out the docs - {' '} - which explain how to do this correctly. -

- - {`// App.(js|ts) -import { PostHogProvider } from 'posthog-react-native' -... - -export function MyApp() { - return ( - - - - ) -}`} - -

Send an Event

- {`// With hooks -import { usePostHog } from 'posthog-react-native' - -const MyComponent = () => { - const posthog = usePostHog() - - useEffect(() => { - posthog.capture("MyComponent loaded", { foo: "bar" }) - }, []) -} - `} - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/RubyInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/RubyInstructions.tsx deleted file mode 100644 index b3a944a8ff11f..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/RubyInstructions.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function RubyInstallSnippet(): JSX.Element { - return {'gem "posthog-ruby"'} -} - -function RubySetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`posthog = PostHog::Client.new({ - api_key: "${currentTeam?.api_token}", - host: "${window.location.origin}", - on_error: Proc.new { |status, msg| print msg } -})`} - - ) -} - -function RubyCaptureSnippet(): JSX.Element { - return ( - - {"posthog.capture({\n distinct_id: 'test-id',\n event: 'test-event'})"} - - ) -} - -export function RubyInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure

- -

Send an Event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/WebInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/WebInstructions.tsx deleted file mode 100644 index 4decaf1051d98..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/WebInstructions.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import { Link } from 'lib/lemon-ui/Link' -import { JSSnippet } from 'lib/components/JSSnippet' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function JSInstallSnippet(): JSX.Element { - return ( - - {['npm install posthog-js', '# OR', 'yarn add posthog-js', '# OR', 'pnpm add posthog-js'].join('\n')} - - ) -} - -function JSSetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {[ - "import posthog from 'posthog-js'", - '', - `posthog.init('${currentTeam?.api_token}', { api_host: '${window.location.origin}' })`, - ].join('\n')} - - ) -} - -function JSEventSnippet(): JSX.Element { - return ( - {`posthog.capture('my event', { property: 'value' })`} - ) -} - -export function WebInstructions(): JSX.Element { - return ( - <> -

Connect your web app or product

-
-

Option 1. Code snippet

-
- Recommended -
-
-

- Just add this snippet to your website and we'll automatically capture page views, sessions and all - relevant interactions within your website.{' '} - - Learn more - - . -

-

Install the snippet

-

- Insert this snippet in your website within the <head> tag. -

-

Send events

-

Visit your site and click around to generate some initial events.

- -
-

Option 2. Javascript Library

-
-

- Use this option if you want more granular control of how PostHog runs in your website and the events you - capture. Recommended for teams with more stable products and more defined analytics requirements.{' '} - - Learn more - - . -

-

Install the package

- -

- Configure & initialize (see more{' '} - - configuration options - - ) -

- -

Send your first event

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/iOSInstructions.tsx b/frontend/src/scenes/ingestion/frameworks/iOSInstructions.tsx deleted file mode 100644 index 7ccc1ae487fc3..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/iOSInstructions.tsx +++ /dev/null @@ -1,60 +0,0 @@ -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { useValues } from 'kea' -import { teamLogic } from 'scenes/teamLogic' - -function IOSInstallSnippet(): JSX.Element { - return ( - - {'pod "PostHog", "~> 1.0" # Cocoapods \n# OR \ngithub "posthog/posthog-ios" # Carthage'} - - ) -} - -function IOS_OBJ_C_SetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`#import \n#import \n\nPHGPostHogConfiguration *configuration = [PHGPostHogConfiguration configurationWithApiKey:@"${currentTeam?.api_token}" host:@"${window.location.origin}"];\n\nconfiguration.captureApplicationLifecycleEvents = YES; // Record certain application events automatically!\nconfiguration.recordScreenViews = YES; // Record screen views automatically!\n\n[PHGPostHog setupWithConfiguration:configuration];`} - - ) -} - -function IOS_SWIFT_SetupSnippet(): JSX.Element { - const { currentTeam } = useValues(teamLogic) - - return ( - - {`import PostHog\n\nlet configuration = PHGPostHogConfiguration(apiKey: "${currentTeam?.api_token}", host: "${window.location.origin}")\n\nconfiguration.captureApplicationLifecycleEvents = true; // Record certain application events automatically!\nconfiguration.recordScreenViews = true; // Record screen views automatically!\n\nPHGPostHog.setup(with: configuration)\nlet posthog = PHGPostHog.shared()`} - - ) -} - -function IOS_OBJ_C_CaptureSnippet(): JSX.Element { - return ( - - {'[[PHGPostHog sharedPostHog] capture:@"Test Event"];'} - - ) -} - -function IOS_SWIFT_CaptureSnippet(): JSX.Element { - return {'posthog.capture("Test Event")'} -} - -export function IOSInstructions(): JSX.Element { - return ( - <> -

Install

- -

Configure Swift

- -

Or configure Objective-C

- -

Send an event with swift

- -

Send an event with Objective-C

- - - ) -} diff --git a/frontend/src/scenes/ingestion/frameworks/index.tsx b/frontend/src/scenes/ingestion/frameworks/index.tsx deleted file mode 100644 index 71597b3648c6b..0000000000000 --- a/frontend/src/scenes/ingestion/frameworks/index.tsx +++ /dev/null @@ -1,11 +0,0 @@ -export * from './AndroidInstructions' -export * from './GoInstructions' -export * from './NodeInstructions' -export * from './iOSInstructions' -export * from './PHPInstructions' -export * from './PythonInstructions' -export * from './ReactNativeInstructions' -export * from './RubyInstructions' -export * from './APIInstructions' -export * from './ElixirInstructions' -export * from './FlutterInstructions' diff --git a/frontend/src/scenes/ingestion/ingestionLogic.ts b/frontend/src/scenes/ingestion/ingestionLogic.ts deleted file mode 100644 index 500821474d69b..0000000000000 --- a/frontend/src/scenes/ingestion/ingestionLogic.ts +++ /dev/null @@ -1,717 +0,0 @@ -import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' -import { Framework, PlatformType } from 'scenes/ingestion/types' -import { API, MOBILE, BACKEND, WEB, thirdPartySources, THIRD_PARTY, ThirdPartySource } from './constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { teamLogic } from 'scenes/teamLogic' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { urls } from 'scenes/urls' -import { actionToUrl, combineUrl, router, urlToAction } from 'kea-router' -import { getBreakpoint } from 'lib/utils/responsiveUtils' -import { windowValues } from 'kea-window-values' -import { subscriptions } from 'kea-subscriptions' -import { TeamType } from '~/types' -import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' -import { inviteLogic } from 'scenes/settings/organization/inviteLogic' -import api from 'lib/api' -import { loaders } from 'kea-loaders' -import type { ingestionLogicType } from './ingestionLogicType' - -export enum INGESTION_STEPS { - START = 'Get started', - PLATFORM = 'Select your platform', - CONNECT_PRODUCT = 'Connect your product', - VERIFY = 'Listen for events', - SUPERPOWERS = 'Enable superpowers', - BILLING = 'Add payment method', - DONE = 'Done!', -} - -export enum INGESTION_STEPS_WITHOUT_BILLING { - START = 'Get started', - PLATFORM = 'Select your platform', - CONNECT_PRODUCT = 'Connect your product', - VERIFY = 'Listen for events', - SUPERPOWERS = 'Enable superpowers', - DONE = 'Done!', -} - -export enum INGESTION_VIEWS { - BILLING = 'billing', - SUPERPOWERS = 'superpowers', - INVITE_TEAM = 'invite-team', - TEAM_INVITED = 'post-invite-team', - CHOOSE_PLATFORM = 'choose-platform', - VERIFICATION = 'verification', - WEB_INSTRUCTIONS = 'web-instructions', - CHOOSE_FRAMEWORK = 'choose-framework', - GENERATING_DEMO_DATA = 'generating-demo-data', - CHOOSE_THIRD_PARTY = 'choose-third-party', - NO_DEMO_INGESTION = 'no-demo-ingestion', -} - -export const INGESTION_VIEW_TO_STEP = { - [INGESTION_VIEWS.BILLING]: INGESTION_STEPS.BILLING, - [INGESTION_VIEWS.SUPERPOWERS]: INGESTION_STEPS.SUPERPOWERS, - [INGESTION_VIEWS.INVITE_TEAM]: INGESTION_STEPS.START, - [INGESTION_VIEWS.TEAM_INVITED]: INGESTION_STEPS.START, - [INGESTION_VIEWS.NO_DEMO_INGESTION]: INGESTION_STEPS.START, - [INGESTION_VIEWS.CHOOSE_PLATFORM]: INGESTION_STEPS.PLATFORM, - [INGESTION_VIEWS.VERIFICATION]: INGESTION_STEPS.VERIFY, - [INGESTION_VIEWS.WEB_INSTRUCTIONS]: INGESTION_STEPS.CONNECT_PRODUCT, - [INGESTION_VIEWS.CHOOSE_FRAMEWORK]: INGESTION_STEPS.CONNECT_PRODUCT, - [INGESTION_VIEWS.GENERATING_DEMO_DATA]: INGESTION_STEPS.CONNECT_PRODUCT, - [INGESTION_VIEWS.CHOOSE_THIRD_PARTY]: INGESTION_STEPS.CONNECT_PRODUCT, -} - -export type IngestionState = { - platform: PlatformType - framework: Framework - readyToVerify: boolean - showSuperpowers: boolean - showBilling: boolean - hasInvitedMembers: boolean | null - isTechnicalUser: boolean | null - isDemoProject: boolean | null - generatingDemoData: boolean | null -} - -const viewToState = (view: string, props: IngestionState): IngestionState => { - switch (view) { - case INGESTION_VIEWS.INVITE_TEAM: - return { - isTechnicalUser: null, - hasInvitedMembers: null, - platform: null, - framework: null, - readyToVerify: false, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - case INGESTION_VIEWS.TEAM_INVITED: - return { - isTechnicalUser: false, - hasInvitedMembers: true, - platform: null, - framework: null, - readyToVerify: false, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - case INGESTION_VIEWS.BILLING: - return { - isTechnicalUser: null, - hasInvitedMembers: null, - platform: props.platform, - framework: props.framework, - readyToVerify: false, - showSuperpowers: false, - showBilling: true, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - case INGESTION_VIEWS.VERIFICATION: - return { - isTechnicalUser: true, - hasInvitedMembers: null, - platform: props.platform, - framework: props.framework, - readyToVerify: true, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - case INGESTION_VIEWS.SUPERPOWERS: - return { - isTechnicalUser: null, - hasInvitedMembers: null, - platform: props.platform, - framework: props.framework, - readyToVerify: false, - showSuperpowers: true, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - case INGESTION_VIEWS.CHOOSE_PLATFORM: - return { - isTechnicalUser: true, - hasInvitedMembers: null, - platform: null, - framework: null, - readyToVerify: false, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - - case INGESTION_VIEWS.CHOOSE_FRAMEWORK: - return { - isTechnicalUser: true, - hasInvitedMembers: null, - platform: props.platform, - framework: null, - readyToVerify: false, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } - } - return { - isTechnicalUser: null, - hasInvitedMembers: null, - platform: null, - framework: null, - readyToVerify: false, - showSuperpowers: false, - showBilling: false, - isDemoProject: props.isDemoProject, - generatingDemoData: false, - } -} - -export const ingestionLogic = kea([ - path(['scenes', 'ingestion', 'ingestionLogic']), - connect({ - values: [ - featureFlagLogic, - ['featureFlags'], - teamLogic, - ['currentTeam'], - preflightLogic, - ['preflight'], - inviteLogic, - ['isInviteModalShown'], - ], - actions: [ - teamLogic, - ['updateCurrentTeamSuccess', 'createTeamSuccess'], - inviteLogic, - ['inviteTeamMembersSuccess'], - ], - }), - actions({ - setState: ({ - isTechnicalUser, - hasInvitedMembers, - platform, - framework, - readyToVerify, - showSuperpowers, - showBilling, - isDemoProject, - generatingDemoData, - }: IngestionState) => ({ - isTechnicalUser, - hasInvitedMembers, - platform, - framework, - readyToVerify, - showSuperpowers, - showBilling, - isDemoProject, - generatingDemoData, - }), - setInstructionsModal: (isOpen: boolean) => ({ isOpen }), - setThirdPartySource: (sourceIndex: number) => ({ sourceIndex }), - completeOnboarding: true, - setCurrentStep: (currentStep: string) => ({ currentStep }), - sidebarStepClick: (step: string) => ({ step }), - next: (props: Partial) => props, - onBack: true, - goToView: (view: INGESTION_VIEWS) => ({ view }), - setSidebarSteps: (steps: string[]) => ({ steps }), - setPollTimeout: (pollTimeout: number) => ({ pollTimeout }), - toggleProjectSwitcher: true, - hideProjectSwitcher: true, - }), - windowValues({ - isSmallScreen: (window: Window) => window.innerWidth < getBreakpoint('md'), - }), - reducers({ - isTechnicalUser: [ - null as null | boolean, - { - setState: (_, { isTechnicalUser }) => isTechnicalUser, - }, - ], - hasInvitedMembers: [ - null as null | boolean, - { - setState: (_, { hasInvitedMembers }) => hasInvitedMembers, - }, - ], - platform: [ - null as null | PlatformType, - { - setState: (_, { platform }) => platform, - }, - ], - framework: [ - null as null | Framework, - { - setState: (_, { framework }) => (framework ? (framework.toUpperCase() as Framework) : null), - }, - ], - readyToVerify: [ - false, - { - setState: (_, { readyToVerify }) => readyToVerify, - }, - ], - showSuperpowers: [ - false, - { - setState: (_, { showSuperpowers }) => showSuperpowers, - }, - ], - showBilling: [ - false, - { - setState: (_, { showBilling }) => showBilling, - }, - ], - instructionsModalOpen: [ - false as boolean, - { - setInstructionsModal: (_, { isOpen }) => isOpen, - }, - ], - thirdPartyIntegrationSource: [ - null as ThirdPartySource | null, - { - setThirdPartySource: (_, { sourceIndex }) => thirdPartySources[sourceIndex], - }, - ], - sidebarSteps: [ - Object.values(INGESTION_STEPS_WITHOUT_BILLING) as string[], - { - setSidebarSteps: (_, { steps }) => steps, - }, - ], - isDemoProject: [ - false as null | boolean, - { - setState: (_, { isDemoProject }) => isDemoProject, - }, - ], - generatingDemoData: [ - false as boolean | null, - { - setState: (_, { generatingDemoData }) => generatingDemoData, - }, - ], - pollTimeout: [ - 0, - { - setPollTimeout: (_, payload) => payload.pollTimeout, - }, - ], - isProjectSwitcherShown: [ - false, - { - toggleProjectSwitcher: (state) => !state, - hideProjectSwitcher: () => false, - }, - ], - }), - loaders(({ actions, values }) => ({ - isDemoDataReady: [ - false as boolean, - { - checkIfDemoDataIsReady: async (_, breakpoint) => { - await breakpoint(1) - - clearTimeout(values.pollTimeout) - - try { - const res = await api.get('api/projects/@current/is_generating_demo_data') - if (!res.is_generating_demo_data) { - return true - } - const pollTimeoutMilliseconds = 1000 - const timeout = window.setTimeout(actions.checkIfDemoDataIsReady, pollTimeoutMilliseconds) - actions.setPollTimeout(timeout) - return false - } catch (e) { - return false - } - }, - }, - ], - })), - selectors(() => ({ - currentState: [ - (s) => [ - s.platform, - s.framework, - s.readyToVerify, - s.showSuperpowers, - s.showBilling, - s.isTechnicalUser, - s.hasInvitedMembers, - s.isDemoProject, - s.generatingDemoData, - ], - ( - platform, - framework, - readyToVerify, - showSuperpowers, - showBilling, - isTechnicalUser, - hasInvitedMembers, - isDemoProject, - generatingDemoData - ) => ({ - platform, - framework, - readyToVerify, - showSuperpowers, - showBilling, - isTechnicalUser, - hasInvitedMembers, - isDemoProject, - generatingDemoData, - }), - ], - currentView: [ - (s) => [s.currentState], - ({ - isTechnicalUser, - platform, - framework, - readyToVerify, - showSuperpowers, - showBilling, - hasInvitedMembers, - isDemoProject, - generatingDemoData, - }) => { - if (isDemoProject) { - return INGESTION_VIEWS.NO_DEMO_INGESTION - } - if (showBilling) { - return INGESTION_VIEWS.BILLING - } - if (showSuperpowers) { - return INGESTION_VIEWS.SUPERPOWERS - } - if (readyToVerify) { - return INGESTION_VIEWS.VERIFICATION - } - if (isTechnicalUser) { - if (!platform) { - return INGESTION_VIEWS.CHOOSE_PLATFORM - } - if (framework || platform === WEB) { - return INGESTION_VIEWS.WEB_INSTRUCTIONS - } - if (platform === MOBILE || platform === BACKEND) { - return INGESTION_VIEWS.CHOOSE_FRAMEWORK - } - if (platform === THIRD_PARTY) { - return INGESTION_VIEWS.CHOOSE_THIRD_PARTY - } - // could be null, so we check that it's set to false - } else if (isTechnicalUser === false) { - if (generatingDemoData) { - return INGESTION_VIEWS.GENERATING_DEMO_DATA - } - if (hasInvitedMembers) { - return INGESTION_VIEWS.TEAM_INVITED - } - if (!platform && !readyToVerify) { - return INGESTION_VIEWS.INVITE_TEAM - } - } - return INGESTION_VIEWS.INVITE_TEAM - }, - ], - currentStep: [ - (s) => [s.currentView], - (currentView) => { - return INGESTION_VIEW_TO_STEP[currentView] - }, - ], - previousStep: [ - (s) => [s.currentStep], - (currentStep) => { - const currentStepIndex = Object.values(INGESTION_STEPS).indexOf(currentStep) - return Object.values(INGESTION_STEPS)[currentStepIndex - 1] - }, - ], - frameworkString: [ - (s) => [s.framework], - (framework): string => { - if (framework) { - const frameworkStrings = { - NODEJS: 'Node.js', - GO: 'Go', - RUBY: 'Ruby', - PYTHON: 'Python', - PHP: 'PHP', - ELIXIR: 'Elixir', - ANDROID: 'Android', - IOS: 'iOS', - REACT_NATIVE: 'React Native', - FLUTTER: 'Flutter', - API: 'HTTP API', - } - return frameworkStrings[framework] || framework - } - return '' - }, - ], - showBillingStep: [ - (s) => [s.preflight], - (preflight): boolean => { - return !!preflight?.cloud && !preflight?.demo - }, - ], - })), - - actionToUrl(({ values }) => ({ - setState: () => getUrl(values), - updateCurrentTeamSuccess: (val) => { - if ( - (router.values.location.pathname.includes( - values.showBillingStep ? '/ingestion/billing' : '/ingestion/superpowers' - ) || - router.values.location.pathname.includes('/ingestion/invites-sent')) && - val.payload?.completed_snippet_onboarding - ) { - return combineUrl(urls.events(), { onboarding_completed: true }).url - } - }, - })), - - urlToAction(({ actions, values }) => ({ - '/ingestion': () => actions.goToView(INGESTION_VIEWS.INVITE_TEAM), - '/ingestion/invites-sent': () => actions.goToView(INGESTION_VIEWS.TEAM_INVITED), - '/ingestion/superpowers': () => actions.goToView(INGESTION_VIEWS.SUPERPOWERS), - '/ingestion/billing': () => actions.goToView(INGESTION_VIEWS.BILLING), - '/ingestion/verify': () => actions.goToView(INGESTION_VIEWS.VERIFICATION), - '/ingestion/platform': () => actions.goToView(INGESTION_VIEWS.CHOOSE_FRAMEWORK), - '/ingestion(/:platform)(/:framework)': (pathParams, searchParams) => { - const platform = pathParams.platform || searchParams.platform || null - const framework = pathParams.framework || searchParams.framework || null - actions.setState({ - isTechnicalUser: true, - hasInvitedMembers: null, - platform: platform, - framework: framework, - readyToVerify: false, - showBilling: false, - showSuperpowers: false, - isDemoProject: values.isDemoProject, - generatingDemoData: false, - }) - }, - })), - listeners(({ actions, values }) => ({ - next: (props) => { - actions.setState({ ...values.currentState, ...props } as IngestionState) - }, - goToView: ({ view }) => { - actions.setState(viewToState(view, values.currentState as IngestionState)) - }, - completeOnboarding: () => { - teamLogic.actions.updateCurrentTeam({ - completed_snippet_onboarding: true, - }) - if ( - !values.currentTeam?.session_recording_opt_in || - !values.currentTeam?.capture_console_log_opt_in || - !values.currentTeam?.capture_performance_opt_in - ) { - eventUsageLogic.actions.reportIngestionRecordingsTurnedOff( - !!values.currentTeam?.session_recording_opt_in, - !!values.currentTeam?.capture_console_log_opt_in, - !!values.currentTeam?.capture_performance_opt_in - ) - } - if (values.currentTeam?.autocapture_opt_out) { - eventUsageLogic.actions.reportIngestionAutocaptureToggled(!!values.currentTeam?.autocapture_opt_out) - } - }, - setPlatform: ({ platform }) => { - eventUsageLogic.actions.reportIngestionSelectPlatformType(platform) - }, - setFramework: ({ framework }) => { - eventUsageLogic.actions.reportIngestionSelectFrameworkType(framework) - }, - sidebarStepClick: ({ step }) => { - switch (step) { - case INGESTION_STEPS.START: - actions.goToView(INGESTION_VIEWS.INVITE_TEAM) - return - case INGESTION_STEPS.PLATFORM: - actions.goToView(INGESTION_VIEWS.CHOOSE_PLATFORM) - return - case INGESTION_STEPS.CONNECT_PRODUCT: - actions.goToView(INGESTION_VIEWS.CHOOSE_FRAMEWORK) - return - case INGESTION_STEPS.VERIFY: - actions.goToView(INGESTION_VIEWS.VERIFICATION) - return - case INGESTION_STEPS.BILLING: - actions.goToView(INGESTION_VIEWS.BILLING) - return - case INGESTION_STEPS.SUPERPOWERS: - actions.goToView(INGESTION_VIEWS.SUPERPOWERS) - return - default: - return - } - }, - onBack: () => { - switch (values.currentView) { - case INGESTION_VIEWS.BILLING: - return actions.goToView(INGESTION_VIEWS.VERIFICATION) - case INGESTION_VIEWS.SUPERPOWERS: - return actions.goToView(INGESTION_VIEWS.CHOOSE_FRAMEWORK) - case INGESTION_VIEWS.TEAM_INVITED: - return actions.goToView(INGESTION_VIEWS.INVITE_TEAM) - case INGESTION_VIEWS.CHOOSE_PLATFORM: - return actions.goToView(INGESTION_VIEWS.INVITE_TEAM) - case INGESTION_VIEWS.VERIFICATION: - return actions.goToView(INGESTION_VIEWS.SUPERPOWERS) - case INGESTION_VIEWS.WEB_INSTRUCTIONS: - return actions.goToView(INGESTION_VIEWS.CHOOSE_PLATFORM) - case INGESTION_VIEWS.CHOOSE_FRAMEWORK: - return actions.goToView(INGESTION_VIEWS.CHOOSE_PLATFORM) - // If they're on the InviteTeam step, but on the Team Invited panel, - // we still want them to be able to go back to the previous step. - // So this resets the state for that panel so they can go back. - case INGESTION_VIEWS.INVITE_TEAM: - return actions.goToView(INGESTION_VIEWS.INVITE_TEAM) - case INGESTION_VIEWS.CHOOSE_THIRD_PARTY: - return actions.goToView(INGESTION_VIEWS.CHOOSE_PLATFORM) - default: - return actions.goToView(INGESTION_VIEWS.INVITE_TEAM) - } - }, - inviteTeamMembersSuccess: () => { - if (router.values.location.pathname.includes(urls.ingestion())) { - actions.setState(viewToState(INGESTION_VIEWS.TEAM_INVITED, values.currentState as IngestionState)) - } - }, - createTeamSuccess: ({ currentTeam }) => { - if (window.location.href.includes(urls.ingestion()) && currentTeam.is_demo) { - actions.checkIfDemoDataIsReady(null) - } else { - window.location.href = urls.ingestion() - } - }, - checkIfDemoDataIsReadySuccess: ({ isDemoDataReady }) => { - if (isDemoDataReady) { - window.location.href = urls.default() - } - }, - })), - subscriptions(({ actions, values }) => ({ - showBillingStep: (value) => { - const steps = value ? INGESTION_STEPS : INGESTION_STEPS_WITHOUT_BILLING - actions.setSidebarSteps(Object.values(steps)) - }, - currentTeam: (currentTeam: TeamType) => { - if (currentTeam?.ingested_event && values.readyToVerify && !values.showBillingStep) { - actions.setCurrentStep(INGESTION_STEPS.DONE) - } - }, - })), -]) - -function getUrl(values: ingestionLogicType['values']): string | [string, Record] { - const { - isTechnicalUser, - platform, - framework, - readyToVerify, - showBilling, - showSuperpowers, - hasInvitedMembers, - generatingDemoData, - } = values - - let url = '/ingestion' - - if (showBilling) { - return url + '/billing' - } - - if (showSuperpowers) { - url += '/superpowers' - return [ - url, - { - platform: platform || undefined, - framework: framework?.toLowerCase() || undefined, - }, - ] - } - - if (readyToVerify) { - url += '/verify' - return [ - url, - { - platform: platform || undefined, - framework: framework?.toLowerCase() || undefined, - }, - ] - } - - if (isTechnicalUser) { - if (framework === API) { - url += '/api' - return [ - url, - { - platform: platform || undefined, - }, - ] - } - - if (platform === MOBILE) { - url += '/mobile' - } - - if (platform === WEB) { - url += '/web' - } - - if (platform === BACKEND) { - url += '/backend' - } - - if (generatingDemoData) { - url += '/just-exploring' - } - - if (platform === THIRD_PARTY) { - url += '/third-party' - } - - if (!platform) { - url += '/platform' - } - - if (framework) { - url += `/${framework.toLowerCase()}` - } - } else { - if (!platform && hasInvitedMembers) { - url += '/invites-sent' - } - } - - return url -} diff --git a/frontend/src/scenes/ingestion/panels/BillingPanel.tsx b/frontend/src/scenes/ingestion/panels/BillingPanel.tsx deleted file mode 100644 index b42fef3eb9de0..0000000000000 --- a/frontend/src/scenes/ingestion/panels/BillingPanel.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import { useActions, useValues } from 'kea' -import { CardContainer } from 'scenes/ingestion/CardContainer' -import { ingestionLogic } from 'scenes/ingestion/ingestionLogic' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { LemonDivider } from '@posthog/lemon-ui' -import { billingLogic } from 'scenes/billing/billingLogic' -import { Billing } from 'scenes/billing/Billing' -import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' - -export function BillingPanel(): JSX.Element { - const { completeOnboarding } = useActions(ingestionLogic) - const { reportIngestionContinueWithoutBilling } = useActions(eventUsageLogic) - const { billing } = useValues(billingLogic) - - if (!billing) { - return ( - -
- - - -
-
- - -
- - ) - } - - const hasSubscribedToAllProducts = billing.products - .filter((product) => !product.contact_support) - .every((product) => product.subscribed) - const hasSubscribedToAnyProduct = billing.products.some((product) => product.subscribed) - - return ( - - {hasSubscribedToAllProducts ? ( -
-

You're good to go!

- -

- Your organisation is setup for billing with premium features and the increased free tiers - enabled. -

- { - completeOnboarding() - }} - > - Complete - -
- ) : ( -
-

Subscribe for access to all features

- - - - - { - completeOnboarding() - !hasSubscribedToAnyProduct && reportIngestionContinueWithoutBilling() - }} - > - {hasSubscribedToAnyProduct ? 'Continue' : 'Skip for now'} - -
- )} -
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/FrameworkPanel.tsx b/frontend/src/scenes/ingestion/panels/FrameworkPanel.tsx deleted file mode 100644 index 598012915577d..0000000000000 --- a/frontend/src/scenes/ingestion/panels/FrameworkPanel.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import { useActions, useValues } from 'kea' -import { CardContainer } from 'scenes/ingestion/CardContainer' -import { ingestionLogic } from '../ingestionLogic' -import { API, mobileFrameworks, BACKEND, webFrameworks } from 'scenes/ingestion/constants' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { IngestionInviteMembersButton } from '../IngestionInviteMembersButton' - -export function FrameworkPanel(): JSX.Element { - const { next } = useActions(ingestionLogic) - const { platform } = useValues(ingestionLogic) - const frameworks = platform === BACKEND ? webFrameworks : mobileFrameworks - - return ( - -
-

- {platform === BACKEND ? 'Choose the framework your app is built in' : 'Pick a mobile platform'} -

-

- We'll provide you with snippets that you can easily add to your codebase to get started! -

-
- {(Object.keys(frameworks) as (keyof typeof frameworks)[]).map((item) => ( - next({ framework: item })} - > - {frameworks[item]} - - ))} - next({ framework: API })} - > - Other - - -
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/GeneratingDemoDataPanel.tsx b/frontend/src/scenes/ingestion/panels/GeneratingDemoDataPanel.tsx deleted file mode 100644 index a568dd9a82340..0000000000000 --- a/frontend/src/scenes/ingestion/panels/GeneratingDemoDataPanel.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import { useValues } from 'kea' -import { LemonBanner } from 'lib/lemon-ui/LemonBanner' -import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' -import { organizationLogic } from 'scenes/organizationLogic' -import { CardContainer } from '../CardContainer' -import './Panels.scss' - -export function GeneratingDemoDataPanel(): JSX.Element { - const { currentOrganization } = useValues(organizationLogic) - return ( - -
-
-
- -
-

Generating demo data...

-

- Your demo data is on the way! This can take up to one minute - we'll redirect you when your demo - data is ready. -

- - We're using a demo project. Your other {currentOrganization?.name} projects won't be - affected. - -
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/InstructionsPanel.scss b/frontend/src/scenes/ingestion/panels/InstructionsPanel.scss deleted file mode 100644 index 5d7b1c7ce4408..0000000000000 --- a/frontend/src/scenes/ingestion/panels/InstructionsPanel.scss +++ /dev/null @@ -1,30 +0,0 @@ -.InstructionsPanel { - max-width: 50rem; - - h1 { - font-size: 28px; - font-weight: 800; - line-height: 40px; - letter-spacing: -0.02em; - } - - h2 { - font-size: 20px; - font-weight: 800; - line-height: 24px; - letter-spacing: -0.02em; - margin-top: 0.5rem; - } - - h3 { - font-size: 16px; - font-weight: 700; - line-height: 24px; - letter-spacing: 0; - margin-top: 0.5rem; - } - - ol { - padding-left: 1rem; - } -} diff --git a/frontend/src/scenes/ingestion/panels/InstructionsPanel.tsx b/frontend/src/scenes/ingestion/panels/InstructionsPanel.tsx deleted file mode 100644 index 390620eaf2493..0000000000000 --- a/frontend/src/scenes/ingestion/panels/InstructionsPanel.tsx +++ /dev/null @@ -1,78 +0,0 @@ -import './InstructionsPanel.scss' -import { CardContainer } from 'scenes/ingestion/CardContainer' -import { - AndroidInstructions, - APIInstructions, - ElixirInstructions, - FlutterInstructions, - GoInstructions, - IOSInstructions, - NodeInstructions, - PHPInstructions, - PythonInstructions, - RNInstructions, - RubyInstructions, -} from 'scenes/ingestion/frameworks' -import { API, MOBILE, BACKEND, WEB } from '../constants' -import { useValues } from 'kea' -import { ingestionLogic } from '../ingestionLogic' -import { WebInstructions } from '../frameworks/WebInstructions' -import { Link } from '@posthog/lemon-ui' - -const frameworksSnippet: Record = { - NODEJS: NodeInstructions, - GO: GoInstructions, - RUBY: RubyInstructions, - PYTHON: PythonInstructions, - PHP: PHPInstructions, - ELIXIR: ElixirInstructions, - ANDROID: AndroidInstructions, - IOS: IOSInstructions, - REACT_NATIVE: RNInstructions, - FLUTTER: FlutterInstructions, - API: APIInstructions, -} - -export function InstructionsPanel(): JSX.Element { - const { platform, framework, frameworkString } = useValues(ingestionLogic) - - if (platform !== WEB && !framework) { - return <> - } - - const FrameworkSnippet: React.ComponentType = frameworksSnippet[framework as string] as React.ComponentType - - return ( -
- {platform === WEB ? ( - - - - ) : framework === API ? ( - -

{frameworkString}

-

- Need a different framework? Our HTTP API is a flexible way to use PostHog anywhere. Try the - endpoint below to send your first event, and view our API docs{' '} - here. -

- -
- ) : ( - -

{`Setup ${frameworkString}`}

- - {platform === BACKEND ? ( - <> -

- Follow the instructions below to send custom events from your {frameworkString} backend. -

- - - ) : null} - {platform === MOBILE ? : null} -
- )} -
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/InviteTeamPanel.tsx b/frontend/src/scenes/ingestion/panels/InviteTeamPanel.tsx deleted file mode 100644 index c6a30c5cf484f..0000000000000 --- a/frontend/src/scenes/ingestion/panels/InviteTeamPanel.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import { useActions } from 'kea' -import { ingestionLogic } from 'scenes/ingestion/ingestionLogic' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { IconChevronRight } from 'lib/lemon-ui/icons' -import { inviteLogic } from 'scenes/settings/organization/inviteLogic' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { DemoProjectButton } from './PanelComponents' - -export function InviteTeamPanel(): JSX.Element { - const { next } = useActions(ingestionLogic) - const { showInviteModal } = useActions(inviteLogic) - const { reportInviteMembersButtonClicked } = useActions(eventUsageLogic) - - return ( -
-

Welcome to PostHog

-

- PostHog enables you to understand your customers, answer product questions, and test new features{' '} - - all in our comprehensive product suite. To get started, we'll need to add a code snippet to your - product. -

- -
- next({ isTechnicalUser: true })} - fullWidth - size="large" - className="mb-4" - type="primary" - sideIcon={} - > -
-

I can add a code snippet to my product.

-

- Available for JavaScript, Android, iOS, React Native, Node.js, Ruby, Go, and more. -

-
-
- { - showInviteModal() - reportInviteMembersButtonClicked() - }} - fullWidth - size="large" - className="mb-4" - type="secondary" - sideIcon={} - > -
-

I'll need a team member to add the code snippet to our product.

-

- We'll send an invite and instructions for getting the code snippet added. -

-
-
- -
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/NoDemoIngestionPanel.tsx b/frontend/src/scenes/ingestion/panels/NoDemoIngestionPanel.tsx deleted file mode 100644 index 722ba20e9e603..0000000000000 --- a/frontend/src/scenes/ingestion/panels/NoDemoIngestionPanel.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import { LemonButton } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' -import { IconArrowRight } from 'lib/lemon-ui/icons' -import { organizationLogic } from 'scenes/organizationLogic' -import { userLogic } from 'scenes/userLogic' -import { CardContainer } from '../CardContainer' -import './Panels.scss' - -export function NoDemoIngestionPanel(): JSX.Element { - const { currentOrganization } = useValues(organizationLogic) - const { updateCurrentTeam } = useActions(userLogic) - - return ( - -
-

Whoops!

-

- New events can't be ingested into a demo project. But, you can switch to another project if you'd - like: -

-
- {currentOrganization?.teams - ?.filter((team) => !team.is_demo) - .map((team) => ( -

- } - fullWidth - onClick={() => updateCurrentTeam(team.id)} - > - {team.name} - -

- ))} -
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/PanelComponents.tsx b/frontend/src/scenes/ingestion/panels/PanelComponents.tsx deleted file mode 100644 index 4d817149a280b..0000000000000 --- a/frontend/src/scenes/ingestion/panels/PanelComponents.tsx +++ /dev/null @@ -1,123 +0,0 @@ -import { useActions, useValues } from 'kea' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { ingestionLogic, INGESTION_STEPS, IngestionState } from '../ingestionLogic' -import './Panels.scss' -import { IconArrowLeft, IconChevronRight } from 'lib/lemon-ui/icons' -import { IngestionInviteMembersButton } from '../IngestionInviteMembersButton' -import { teamLogic } from 'scenes/teamLogic' -import { organizationLogic } from 'scenes/organizationLogic' -import { userLogic } from 'scenes/userLogic' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' - -const DEMO_TEAM_NAME: string = 'Hedgebox' - -export function PanelFooter({ - nextProps, - onContinue, - finalStep = false, - showInviteTeamMembers = true, -}: { - nextProps: Partial - onContinue?: () => void - finalStep?: boolean - showInviteTeamMembers?: boolean -}): JSX.Element { - const { next } = useActions(ingestionLogic) - - return ( -
- -
- { - onContinue && onContinue() - next(nextProps) - }} - > - {finalStep ? 'Complete' : 'Continue'} - - {showInviteTeamMembers && } -
-
- ) -} - -export function PanelHeader(): JSX.Element | null { - const { isSmallScreen, previousStep, currentStep, hasInvitedMembers } = useValues(ingestionLogic) - const { onBack } = useActions(ingestionLogic) - - // no back buttons on the Getting Started step - // but only if it's not the MembersInvited panel - // (since they'd want to be able to go back from there) - if (currentStep === INGESTION_STEPS.START && !hasInvitedMembers) { - return null - } - - return ( -
- } size="small"> - {isSmallScreen - ? '' - : // If we're on the MembersInvited panel, they "go back" to - // the Get Started step, even though it's technically the same step - currentStep === INGESTION_STEPS.START && hasInvitedMembers - ? currentStep - : previousStep} - -
- ) -} - -export function DemoProjectButton({ text, subtext }: { text: string; subtext?: string }): JSX.Element { - const { next } = useActions(ingestionLogic) - const { createTeam } = useActions(teamLogic) - const { currentOrganization } = useValues(organizationLogic) - const { updateCurrentTeam } = useActions(userLogic) - const { reportIngestionTryWithDemoDataClicked, reportProjectCreationSubmitted } = useActions(eventUsageLogic) - const { featureFlags } = useValues(featureFlagLogic) - - if (featureFlags[FEATURE_FLAGS.ONBOARDING_V2_DEMO] !== 'test') { - return <> - } - return ( - { - // If the current org has a demo team, just navigate there - if (currentOrganization?.teams && currentOrganization.teams.filter((team) => team.is_demo).length > 0) { - updateCurrentTeam(currentOrganization.teams.filter((team) => team.is_demo)[0].id) - } else { - // Create a new demo team - createTeam({ name: DEMO_TEAM_NAME, is_demo: true }) - next({ isTechnicalUser: false, generatingDemoData: true }) - reportProjectCreationSubmitted( - currentOrganization?.teams ? currentOrganization.teams.length : 0, - DEMO_TEAM_NAME.length - ) - } - reportIngestionTryWithDemoDataClicked() - }} - fullWidth - size="large" - className="ingestion-view-demo-data mb-4" - type="secondary" - sideIcon={} - > -
-

- {currentOrganization?.teams && currentOrganization.teams.filter((team) => team.is_demo).length > 0 - ? 'Explore the demo project' - : text} -

- {subtext ?

{subtext}

: null} -
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/Panels.scss b/frontend/src/scenes/ingestion/panels/Panels.scss deleted file mode 100644 index ca98aa806c405..0000000000000 --- a/frontend/src/scenes/ingestion/panels/Panels.scss +++ /dev/null @@ -1,37 +0,0 @@ -.FrameworkPanel { - max-width: 400px; -} - -.panel-footer { - background-color: white; - margin-bottom: 1rem; - bottom: 0; -} - -.ingestion-title { - font-size: 28px; - font-weight: 700; - line-height: 40px; - display: flex; - align-items: center; - gap: 0.5rem; - margin: 0; -} - -.IngestionSubtitle { - font-size: 20px; - font-weight: 800; - margin: 1rem 0; -} - -.prompt-text { - margin-top: 1rem; -} - -.ingestion-listening-for-events { - display: flex; - flex-direction: column; - align-items: center; - text-align: center; - margin-bottom: 1rem; -} diff --git a/frontend/src/scenes/ingestion/panels/PlatformPanel.tsx b/frontend/src/scenes/ingestion/panels/PlatformPanel.tsx deleted file mode 100644 index 5ee33d73597c5..0000000000000 --- a/frontend/src/scenes/ingestion/panels/PlatformPanel.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { useActions } from 'kea' -import { ingestionLogic } from '../ingestionLogic' -import { THIRD_PARTY, platforms } from '../constants' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { IngestionInviteMembersButton } from '../IngestionInviteMembersButton' - -export function PlatformPanel(): JSX.Element { - const { next } = useActions(ingestionLogic) - - return ( -
-

Where do you want to send events from?

-

- With PostHog, you can collect events from nearly anywhere. Select one to start, and you can always add - more sources later. -

- -
- {platforms.map((platform) => ( - next({ platform })} - > - {platform} - - ))} - next({ platform: THIRD_PARTY })} - fullWidth - center - size="large" - className="mb-2" - type="primary" - > - Import events from a third party - - -
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/SuperpowersPanel.tsx b/frontend/src/scenes/ingestion/panels/SuperpowersPanel.tsx deleted file mode 100644 index 5ec5cad19e87e..0000000000000 --- a/frontend/src/scenes/ingestion/panels/SuperpowersPanel.tsx +++ /dev/null @@ -1,87 +0,0 @@ -import { LemonSwitch, Link } from '@posthog/lemon-ui' -import { CardContainer } from 'scenes/ingestion/CardContainer' -import { useActions, useValues } from 'kea' -import { SupportHeroHog } from 'lib/components/hedgehogs' -import { useState } from 'react' -import { teamLogic } from 'scenes/teamLogic' -import { ingestionLogic } from '../ingestionLogic' - -export function SuperpowersPanel(): JSX.Element { - const { updateCurrentTeam } = useActions(teamLogic) - const { showBillingStep } = useValues(ingestionLogic) - const { completeOnboarding } = useActions(ingestionLogic) - const [sessionRecordingsChecked, setSessionRecordingsChecked] = useState(true) - const [autocaptureChecked, setAutocaptureChecked] = useState(true) - - return ( - { - updateCurrentTeam({ - session_recording_opt_in: sessionRecordingsChecked, - capture_console_log_opt_in: sessionRecordingsChecked, - capture_performance_opt_in: sessionRecordingsChecked, - autocapture_opt_out: !autocaptureChecked, - }) - if (!showBillingStep) { - completeOnboarding() - } - }} - finalStep={!showBillingStep} - > -
-
-

Enable your product superpowers

-

- Collecting events from your app is just the first step toward building great products. PostHog - gives you other superpowers, too, like recording user sessions and automagically capturing - frontend interactions. -

-
-
- -
-
-
- { - setSessionRecordingsChecked(checked) - }} - label="Record user sessions" - fullWidth={true} - labelClassName={'text-base font-semibold'} - checked={sessionRecordingsChecked} - /> -

- See recordings of how your users are really using your product with powerful features like error - tracking, filtering, and analytics.{' '} - - Learn more - {' '} - about Session recordings. -

-
-
- { - setAutocaptureChecked(checked) - }} - label="Autocapture frontend interactions" - fullWidth={true} - labelClassName={'text-base font-semibold'} - checked={autocaptureChecked} - /> -

- If you use our JavaScript or React Native libraries, we'll automagically capture frontend - interactions like pageviews, clicks, and more.{' '} - - Fine-tune what you capture - {' '} - directly in your code snippet. -

-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/TeamInvitedPanel.tsx b/frontend/src/scenes/ingestion/panels/TeamInvitedPanel.tsx deleted file mode 100644 index 5d6365d22c335..0000000000000 --- a/frontend/src/scenes/ingestion/panels/TeamInvitedPanel.tsx +++ /dev/null @@ -1,45 +0,0 @@ -import { useActions } from 'kea' -import { ingestionLogic } from 'scenes/ingestion/ingestionLogic' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { IconChevronRight } from 'lib/lemon-ui/icons' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { DemoProjectButton } from './PanelComponents' - -export function TeamInvitedPanel(): JSX.Element { - const { completeOnboarding } = useActions(ingestionLogic) - const { reportIngestionContinueWithoutVerifying } = useActions(eventUsageLogic) - - return ( -
-

Help is on the way!

-

You can still explore PostHog while you wait for your team members to join.

- -
- - { - completeOnboarding() - reportIngestionContinueWithoutVerifying() - }} - fullWidth - size="large" - className="mb-4" - type="secondary" - sideIcon={} - > -
-

Continue without any events.

-

- It might look a little empty in there, but we'll do our best. -

-
-
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/panels/ThirdPartyIcons.tsx b/frontend/src/scenes/ingestion/panels/ThirdPartyIcons.tsx deleted file mode 100644 index 1ebb0e6545346..0000000000000 --- a/frontend/src/scenes/ingestion/panels/ThirdPartyIcons.tsx +++ /dev/null @@ -1,58 +0,0 @@ -export const Segment = (props: React.SVGProps): JSX.Element => { - return ( - - - - - - - - - - - - - ) -} - -export const RSS = (props: React.SVGProps): JSX.Element => { - return ( - - - - - - - - - - - - - - - - - - - - ) -} diff --git a/frontend/src/scenes/ingestion/panels/ThirdPartyPanel.tsx b/frontend/src/scenes/ingestion/panels/ThirdPartyPanel.tsx deleted file mode 100644 index 8fd51654aafb9..0000000000000 --- a/frontend/src/scenes/ingestion/panels/ThirdPartyPanel.tsx +++ /dev/null @@ -1,156 +0,0 @@ -import { useValues, useActions } from 'kea' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { CardContainer } from '../CardContainer' -import { ingestionLogic } from '../ingestionLogic' -import './Panels.scss' -import { LemonModal } from 'lib/lemon-ui/LemonModal' -import { thirdPartySources } from '../constants' -import { IconOpenInNew } from 'lib/lemon-ui/icons' -import { CodeSnippet } from 'lib/components/CodeSnippet' -import { teamLogic } from 'scenes/teamLogic' -import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { Link } from '@posthog/lemon-ui' - -export function ThirdPartyPanel(): JSX.Element { - const { setInstructionsModal, setThirdPartySource } = useActions(ingestionLogic) - const { reportIngestionThirdPartyAboutClicked, reportIngestionThirdPartyConfigureClicked } = - useActions(eventUsageLogic) - - return ( - -
-

Set up third-party integrations

- {thirdPartySources.map((source, idx) => { - return ( -
-
-
-
{source.icon}
-
-

- {source.name} Import - {source.labels?.map((label, labelIdx) => ( - - {label} - - ))} -

-

- {source.description - ? source.description - : `Send events from ${source.name} into PostHog`} -

-
-
-
- { - reportIngestionThirdPartyAboutClicked(source.name) - }} - > - About - - { - setThirdPartySource(idx) - setInstructionsModal(true) - reportIngestionThirdPartyConfigureClicked(source.name) - }} - > - Configure - -
-
-
- ) - })} -
- -
- ) -} - -export function IntegrationInstructionsModal(): JSX.Element { - const { instructionsModalOpen, thirdPartyIntegrationSource } = useValues(ingestionLogic) - const { setInstructionsModal } = useActions(ingestionLogic) - const { currentTeam } = useValues(teamLogic) - - return ( - <> - {thirdPartyIntegrationSource?.name && ( - setInstructionsModal(false)} - title="Configure integration" - footer={ - setInstructionsModal(false)}> - Done - - } - > -
-

- {thirdPartyIntegrationSource.icon} - Integrate with {thirdPartyIntegrationSource.name} -

-
-
-

- The{' '} - - {thirdPartyIntegrationSource.name} docs page for the PostHog integration - {' '} - provides a detailed overview of how to set up this integration. -

- PostHog Project API Key - {currentTeam?.api_token || ''} -
-
- window.open(thirdPartyIntegrationSource.aboutLink)} - sideIcon={} - > - Take me to the {thirdPartyIntegrationSource.name} docs - -
-

Steps:

-
    -
  1. Complete the steps for the {thirdPartyIntegrationSource.name} integration.
  2. -
  3. - Close this step and click continue to begin listening for events. -
  4. -
-
-
-
- )} - - ) -} diff --git a/frontend/src/scenes/ingestion/panels/VerificationPanel.tsx b/frontend/src/scenes/ingestion/panels/VerificationPanel.tsx deleted file mode 100644 index a6138f22fa438..0000000000000 --- a/frontend/src/scenes/ingestion/panels/VerificationPanel.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { useActions, useValues } from 'kea' -import { useInterval } from 'lib/hooks/useInterval' -import { CardContainer } from '../CardContainer' -import { ingestionLogic } from '../ingestionLogic' -import { teamLogic } from 'scenes/teamLogic' -import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import './Panels.scss' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { IngestionInviteMembersButton } from '../IngestionInviteMembersButton' - -export function VerificationPanel(): JSX.Element { - const { loadCurrentTeam } = useActions(teamLogic) - const { currentTeam } = useValues(teamLogic) - const { next } = useActions(ingestionLogic) - const { reportIngestionContinueWithoutVerifying } = useActions(eventUsageLogic) - - useInterval(() => { - if (!currentTeam?.ingested_event) { - loadCurrentTeam() - } - }, 2000) - - return !currentTeam?.ingested_event ? ( - -
-
- -

Listening for events...

-

- Once you have integrated the snippet and sent an event, we will verify it was properly received - and continue. -

- - { - next({ showSuperpowers: true }) - reportIngestionContinueWithoutVerifying() - }} - > - or continue without verifying - -
-
-
- ) : ( - -
-
-

Successfully sent events!

-

- You will now be able to explore PostHog and take advantage of all its features to understand - your users. -

-
-
-
- ) -} diff --git a/frontend/src/scenes/ingestion/types.ts b/frontend/src/scenes/ingestion/types.ts deleted file mode 100644 index 7d64652d8cdb5..0000000000000 --- a/frontend/src/scenes/ingestion/types.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { API, MOBILE, mobileFrameworks, BACKEND, WEB, webFrameworks, THIRD_PARTY } from 'scenes/ingestion/constants' - -export type Framework = keyof typeof webFrameworks | keyof typeof mobileFrameworks | typeof API | null - -export type PlatformType = typeof WEB | typeof MOBILE | typeof BACKEND | typeof THIRD_PARTY | null diff --git a/frontend/src/scenes/insights/Insight.scss b/frontend/src/scenes/insights/Insight.scss index 72c588db18842..e0c6b1dd0f13b 100644 --- a/frontend/src/scenes/insights/Insight.scss +++ b/frontend/src/scenes/insights/Insight.scss @@ -10,11 +10,3 @@ } } } - -.insight-metadata-tags { - margin-top: 0.5rem; - - .ant-tag { - margin-top: 0; - } -} diff --git a/frontend/src/scenes/insights/InsightPageHeader.tsx b/frontend/src/scenes/insights/InsightPageHeader.tsx index e6ba4ea5b9f2f..c123af4ac782e 100644 --- a/frontend/src/scenes/insights/InsightPageHeader.tsx +++ b/frontend/src/scenes/insights/InsightPageHeader.tsx @@ -256,7 +256,7 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In - deleteWithUndo({ + void deleteWithUndo({ object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: () => { @@ -345,14 +345,14 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In saving={insightSaving} onChange={(_, tags) => setInsightMetadata({ tags: tags ?? [] })} tagsAvailable={tags} - className="insight-metadata-tags" + className="mt-2" data-attr="insight-tags" /> ) : insight.tags?.length ? ( diff --git a/frontend/src/scenes/insights/aggregationAxisFormats.test.ts b/frontend/src/scenes/insights/aggregationAxisFormats.test.ts index 7989f851c3396..95582e5b81aa6 100644 --- a/frontend/src/scenes/insights/aggregationAxisFormats.test.ts +++ b/frontend/src/scenes/insights/aggregationAxisFormats.test.ts @@ -25,7 +25,9 @@ describe('formatAggregationAxisValue', () => { }, ] formatTestcases.forEach((testcase) => { - it(`correctly formats "${testcase.candidate}" as ${testcase.expected} when filters are ${testcase.filters}`, () => { + it(`correctly formats "${testcase.candidate}" as ${testcase.expected} when filters are ${JSON.stringify( + testcase.filters + )}`, () => { expect(formatAggregationAxisValue(testcase.filters as Partial, testcase.candidate)).toEqual( testcase.expected ) diff --git a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts index 9c59d413c9a21..6e6cfc856b639 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts +++ b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts @@ -113,7 +113,7 @@ export const entityFilterLogic = kea([ }, ], localFilters: [ - toLocalFilters(props.filters ?? {}) as LocalFilter[], + toLocalFilters(props.filters ?? {}), { setLocalFilters: (_, { filters }) => toLocalFilters(filters), }, @@ -176,9 +176,7 @@ export const entityFilterLogic = kea([ }, updateFilterProperty: async ({ properties, index }) => { actions.setFilters( - values.localFilters.map( - (filter, i) => (i === index ? { ...filter, properties } : filter) as LocalFilter - ) + values.localFilters.map((filter, i) => (i === index ? { ...filter, properties } : filter)) ) }, updateFilterMath: async ({ index, ...mathProperties }) => { diff --git a/frontend/src/scenes/insights/filters/AggregationSelect.tsx b/frontend/src/scenes/insights/filters/AggregationSelect.tsx index 766860da847a8..97104d7f22315 100644 --- a/frontend/src/scenes/insights/filters/AggregationSelect.tsx +++ b/frontend/src/scenes/insights/filters/AggregationSelect.tsx @@ -4,7 +4,7 @@ import { LemonSelect, LemonSelectSection } from '@posthog/lemon-ui' import { groupsAccessLogic } from 'lib/introductions/groupsAccessLogic' import { GroupIntroductionFooter } from 'scenes/groups/GroupsIntroduction' import { InsightLogicProps } from '~/types' -import { isFunnelsQuery, isInsightQueryNode } from '~/queries/utils' +import { isFunnelsQuery, isInsightQueryNode, isLifecycleQuery, isStickinessQuery } from '~/queries/utils' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { FunnelsQuery } from '~/queries/schema' import { HogQLEditor } from 'lib/components/HogQLEditor/HogQLEditor' @@ -51,7 +51,9 @@ export function AggregationSelect({ } const value = getHogQLValue( - querySource.aggregation_group_type_index, + isLifecycleQuery(querySource) || isStickinessQuery(querySource) + ? undefined + : querySource.aggregation_group_type_index, isFunnelsQuery(querySource) ? querySource.funnelsFilter?.funnel_aggregate_by_hogql : undefined ) const onChange = (value: string): void => { diff --git a/frontend/src/scenes/insights/insightDataTimingLogic.ts b/frontend/src/scenes/insights/insightDataTimingLogic.ts index d84ce7b8e240f..688c0bf4a7fba 100644 --- a/frontend/src/scenes/insights/insightDataTimingLogic.ts +++ b/frontend/src/scenes/insights/insightDataTimingLogic.ts @@ -51,8 +51,7 @@ export const insightDataTimingLogic = kea([ } const duration = performance.now() - values.queryStartTimes[payload.queryId] - - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { type: 'insight_load', context: 'insight', primary_interaction_id: payload.queryId, @@ -66,6 +65,7 @@ export const insightDataTimingLogic = kea([ insight: values.query.kind, is_primary_interaction: true, }) + actions.removeQuery(payload.queryId) }, loadDataFailure: ({ errorObject }) => { @@ -75,8 +75,7 @@ export const insightDataTimingLogic = kea([ } const duration = performance.now() - values.queryStartTimes[errorObject.queryId] - - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { type: 'insight_load', context: 'insight', primary_interaction_id: errorObject.queryId, @@ -90,12 +89,12 @@ export const insightDataTimingLogic = kea([ insight: values.query.kind, is_primary_interaction: true, }) + actions.removeQuery(errorObject.queryId) }, loadDataCancellation: (payload) => { const duration = performance.now() - values.queryStartTimes[payload.queryId] - - captureTimeToSeeData(values.currentTeamId, { + void captureTimeToSeeData(values.currentTeamId, { type: 'insight_load', context: 'insight', primary_interaction_id: payload.queryId, @@ -107,6 +106,7 @@ export const insightDataTimingLogic = kea([ api_response_bytes: 0, insight: values.query.kind, }) + actions.removeQuery(payload.queryId) }, })), diff --git a/frontend/src/scenes/insights/insightSceneLogic.test.ts b/frontend/src/scenes/insights/insightSceneLogic.test.ts index 15749a0d4caf4..1d8b623a087c2 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.test.ts +++ b/frontend/src/scenes/insights/insightSceneLogic.test.ts @@ -49,7 +49,7 @@ describe('insightSceneLogic', () => { location: partial({ pathname: urls.insightNew(), search: '', hash: '' }), }) - await expect(logic.values.insightLogicRef?.logic.values.filters.insight).toEqual(InsightType.FUNNELS) + expect(logic.values.insightLogicRef?.logic.values.filters.insight).toEqual(InsightType.FUNNELS) }) it('persists edit mode in the url', async () => { diff --git a/frontend/src/scenes/insights/insightSceneLogic.tsx b/frontend/src/scenes/insights/insightSceneLogic.tsx index 3ff71b3e53720..e626919fe5c95 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.tsx +++ b/frontend/src/scenes/insights/insightSceneLogic.tsx @@ -85,14 +85,19 @@ export const insightSceneLogic = kea([ insightSelector: [(s) => [s.insightLogicRef], (insightLogicRef) => insightLogicRef?.logic.selectors.insight], insight: [(s) => [(state, props) => s.insightSelector?.(state, props)?.(state, props)], (insight) => insight], breadcrumbs: [ - (s) => [s.insight], - (insight): Breadcrumb[] => [ + (s) => [s.insight, s.insightLogicRef], + (insight, insightLogicRef): Breadcrumb[] => [ { - name: 'Insights', + key: Scene.SavedInsights, + name: 'Product analytics', path: urls.savedInsights(), }, { + key: insight?.short_id || 'new', name: insight?.name || insight?.derived_name || 'Unnamed', + onRename: async (name: string) => { + await insightLogicRef?.logic.asyncActions.setInsightMetadata({ name }) + }, }, ], ], diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index 62fd9dddc8c5a..4b9c09fb8bc00 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -16,7 +16,6 @@ import { FunnelsQuery, InsightFilter, InsightQueryNode, - InsightVizNode, Node, NodeKind, TrendsFilter, @@ -169,6 +168,7 @@ export const insightVizDataLogic = kea([ showLegend: [(s) => [s.querySource], (q) => (q ? getShowLegend(q) : null)], showValueOnSeries: [(s) => [s.querySource], (q) => (q ? getShowValueOnSeries(q) : null)], showPercentStackView: [(s) => [s.querySource], (q) => (q ? getShowPercentStackView(q) : null)], + vizSpecificOptions: [(s) => [s.query], (q: Node) => (isInsightVizNode(q) ? q.vizSpecificOptions : null)], insightFilter: [(s) => [s.querySource], (q) => (q ? filterForQuery(q) : null)], trendsFilter: [(s) => [s.querySource], (q) => (isTrendsQuery(q) ? q.trendsFilter : null)], @@ -343,7 +343,7 @@ export const insightVizDataLogic = kea([ setQuery: ({ query }) => { if (isInsightVizNode(query)) { if (props.setQuery) { - props.setQuery(query as InsightVizNode) + props.setQuery(query) } const querySource = query.source diff --git a/frontend/src/scenes/insights/sharedUtils.ts b/frontend/src/scenes/insights/sharedUtils.ts index 109a77e398aed..fa0c33b799745 100644 --- a/frontend/src/scenes/insights/sharedUtils.ts +++ b/frontend/src/scenes/insights/sharedUtils.ts @@ -45,7 +45,7 @@ export function filterTrendsClientSideParams( return newFilters } -export function isTrendsInsight(insight?: InsightType | InsightType): boolean { +export function isTrendsInsight(insight?: InsightType): boolean { return insight === InsightType.TRENDS || insight === InsightType.LIFECYCLE || insight === InsightType.STICKINESS } diff --git a/frontend/src/scenes/insights/summarizeInsight.ts b/frontend/src/scenes/insights/summarizeInsight.ts index 2ab9196c52862..2efd49cf79677 100644 --- a/frontend/src/scenes/insights/summarizeInsight.ts +++ b/frontend/src/scenes/insights/summarizeInsight.ts @@ -1,5 +1,5 @@ import { AnyPartialFilterType, EntityFilter, FilterType, FunnelVizType, StepOrderValue } from '~/types' -import { BreakdownFilter, InsightQueryNode, Node, StickinessQuery } from '~/queries/schema' +import { BreakdownFilter, InsightQueryNode, Node } from '~/queries/schema' import { KEY_MAPPING } from 'lib/taxonomy' import { toLocalFilters } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { @@ -272,7 +272,7 @@ function summarizeInsightQuery(query: InsightQueryNode, context: SummaryContext) return summary } else if (isStickinessQuery(query)) { return capitalizeFirstLetter( - (query as StickinessQuery).series + query.series .map((s) => { const actor = context.aggregationLabel(s.math_group_type_index, true).singular return `${actor} stickiness based on ${getDisplayNameFromEntityNode(s)}` diff --git a/frontend/src/scenes/insights/views/Histogram/Histogram.scss b/frontend/src/scenes/insights/views/Histogram/Histogram.scss index e6d3138958889..2742347145d09 100644 --- a/frontend/src/scenes/insights/views/Histogram/Histogram.scss +++ b/frontend/src/scenes/insights/views/Histogram/Histogram.scss @@ -61,7 +61,7 @@ * Bars */ g#bars { - fill: var(--funnel-default); + fill: var(--primary-3000); } g#labels { diff --git a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss index 1ce975beb6fd1..4158e729c4915 100644 --- a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss +++ b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.scss @@ -3,7 +3,7 @@ align-items: center; .edit-icon { - color: var(--primary); + color: var(--primary-3000); cursor: pointer; font-size: 1rem; } @@ -13,7 +13,7 @@ cursor: pointer; .EntityFilterInfo { - color: var(--primary); + color: var(--primary-3000); font-weight: 500; } } diff --git a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx index dd8cb8a418aa7..568ecadebd755 100644 --- a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx @@ -37,6 +37,7 @@ import { SeriesLetter } from 'lib/components/SeriesGlyph' import { TrendsFilter } from '~/queries/schema' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import ChartjsPluginStacked100, { ExtendedChartData } from 'chartjs-plugin-stacked100' +import clsx from 'clsx' let tooltipRoot: Root @@ -189,7 +190,7 @@ function createPinstripePattern(color: string): CanvasPattern { const canvas = document.createElement('canvas') canvas.width = 1 canvas.height = stripeWidth * 2 - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const ctx = canvas.getContext('2d')! // fill the canvas with given color @@ -201,7 +202,7 @@ function createPinstripePattern(color: string): CanvasPattern { ctx.fillRect(0, stripeWidth, 1, 2 * stripeWidth) // create a canvas pattern and rotate it - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const pattern = ctx.createPattern(canvas, 'repeat')! const xAx = Math.cos(stripeAngle) const xAy = Math.sin(stripeAngle) @@ -697,11 +698,19 @@ export function LineGraph_({ precision, autoSkip: true, callback: function _renderYLabel(_, i) { - const labelDescriptors = [ - datasets?.[0]?.actions?.[i]?.custom_name ?? datasets?.[0]?.actions?.[i]?.name, // action name - datasets?.[0]?.breakdownValues?.[i], // breakdown value - datasets?.[0]?.compareLabels?.[i], // compare value - ].filter((l) => !!l) + const labelDescriptors = ( + datasets?.[0]?.labels?.[i] + ? [ + // prefer to use the label over the action name if it exists + datasets?.[0]?.labels?.[i], + datasets?.[0]?.compareLabels?.[i], + ] + : [ + datasets?.[0]?.actions?.[i]?.custom_name ?? datasets?.[0]?.actions?.[i]?.name, // action name + datasets?.[0]?.breakdownValues?.[i], // breakdown value + datasets?.[0]?.compareLabels?.[i], // compare value + ] + ).filter((l) => !!l) return labelDescriptors.join(' - ') }, }, @@ -725,7 +734,10 @@ export function LineGraph_({ }, [datasets, hiddenLegendKeys, isDarkModeOn, trendsFilter, formula, showValueOnSeries, showPercentStackView]) return ( -
+
{showAnnotations && myLineChart && chartWidth && chartHeight ? ( ([ ], }), events(({ actions, values }) => ({ - afterMount: async () => { + afterMount: () => { if (values.featureFlags[FEATURE_FLAGS.FUNNELS_CUE_OPT_OUT]) { actions.setPermanentOptOut() } diff --git a/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss b/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss index d8f9bffe605e7..e4ae155aede2f 100644 --- a/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss +++ b/frontend/src/scenes/insights/views/WorldMap/WorldMap.scss @@ -10,7 +10,7 @@ fill-rule: evenodd; &:hover { - color: var(--primary-dark) !important; + color: var(--primary-3000-hover) !important; } } } diff --git a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss index 2ee530fe2d87a..8f90c06671cf0 100644 --- a/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss +++ b/frontend/src/scenes/notebooks/AddToNotebook/DraggableToNotebook.scss @@ -5,7 +5,7 @@ // Weird hack - this fixes chrome from not correctly identifying the bounds of the component for the drag preview // https://github.com/react-dnd/react-dnd/issues/832#issuecomment-442071628 transform: translate3d(0, 0, 0); - outline: 1px solid var(--primary); + outline: 1px solid var(--primary-3000); background-color: var(--bg-light); } diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx index 9dad747b777d3..2490bbea971c9 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx @@ -43,7 +43,7 @@ const Component = (props: NodeViewProps): JSX.Element => { > openNotebook(shortId, NotebookTarget.Popover)} + onClick={() => void openNotebook(shortId, NotebookTarget.Popover)} target={undefined} className="space-x-1" > diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx index 60fa028e0814a..fd5aad3420b5e 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeCohort.tsx @@ -9,7 +9,7 @@ import { useEffect, useMemo } from 'react' import clsx from 'clsx' import { NotFound } from 'lib/components/NotFound' import { cohortEditLogic } from 'scenes/cohorts/cohortEditLogic' -import { IconCohort, IconPerson, InsightsTrendsIcon } from 'lib/lemon-ui/icons' +import { IconPeople, IconPerson, IconTrends } from '@posthog/icons' import { Query } from '~/queries/Query/Query' import { LemonDivider, LemonTag } from '@posthog/lemon-ui' import { DataTableNode, NodeKind } from '~/queries/schema' @@ -71,7 +71,7 @@ const Component = ({ attributes }: NotebookNodeProps, + icon: , onClick: () => { setExpanded(false) insertAfter({ @@ -130,7 +130,7 @@ const Component = ({ attributes }: NotebookNodeProps ) : (
- + {cohort.name} ({cohort.count} persons) {cohort.is_static ? 'Static' : 'Dynamic'} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx index 94304f7f7e2f4..1d220f64c2214 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx @@ -131,7 +131,7 @@ export const NotebookNodeEarlyAccessFeature = createPostHogWidgetNode { - return { id: match[1] as EarlyAccessFeatureLogicProps['id'] } + return { id: match[1] } }, }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts index e7b397f5e9d50..9fa2e7013196e 100644 --- a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts +++ b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts @@ -265,7 +265,7 @@ export const notebookNodeLogic = kea([ }, })), - afterMount(async (logic) => { + afterMount((logic) => { const { props, actions, values } = logic props.notebookLogic.actions.registerNodeLogic(values.nodeId, logic as any) diff --git a/frontend/src/scenes/notebooks/Nodes/utils.tsx b/frontend/src/scenes/notebooks/Nodes/utils.tsx index bae565e55ce49..ede8239000f4d 100644 --- a/frontend/src/scenes/notebooks/Nodes/utils.tsx +++ b/frontend/src/scenes/notebooks/Nodes/utils.tsx @@ -28,14 +28,13 @@ export function posthogNodePasteRule(options: { handler: ({ match, chain, range }) => { if (match.input) { chain().deleteRange(range).run() - Promise.resolve(options.getAttributes(match)).then((attributes) => { - if (attributes) { - options.editor.commands.insertContent({ - type: options.type.name, - attrs: attributes, - }) - } - }) + const attributes = options.getAttributes(match) + if (attributes) { + options.editor.commands.insertContent({ + type: options.type.name, + attrs: attributes, + }) + } } }, }) diff --git a/frontend/src/scenes/notebooks/Notebook/MentionsExtension.tsx b/frontend/src/scenes/notebooks/Notebook/MentionsExtension.tsx index c6b90ee6a781b..f18450d3b3525 100644 --- a/frontend/src/scenes/notebooks/Notebook/MentionsExtension.tsx +++ b/frontend/src/scenes/notebooks/Notebook/MentionsExtension.tsx @@ -122,7 +122,7 @@ export const Mentions = forwardRef(function SlashCom status="primary-alt" icon={} active={index === selectedIndex} - onClick={async () => await execute(member)} + onClick={() => void execute(member)} > {`${member.user.first_name} <${member.user.email}>`} diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index 1ba202f5a0348..ed584c90842d8 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -262,7 +262,7 @@ // overriding ::selection is necessary here because // antd makes it invisible otherwise span::selection { - color: var(--primary); + color: var(--primary-3000); } // Overrides for insight controls diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx index c40031ad20dfc..8eda1fc7d11da 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx @@ -31,7 +31,7 @@ export function NotebookShare({ shortId }: NotebookShareProps): JSX.Element { fullWidth center sideIcon={} - onClick={async () => await copyToClipboard(url, 'notebook link')} + onClick={() => void copyToClipboard(url, 'notebook link')} title={url} > {url} diff --git a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx index 878364f0d8abc..6fb6e14c67654 100644 --- a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx @@ -3,21 +3,19 @@ import Suggestion from '@tiptap/suggestion' import { ReactRenderer } from '@tiptap/react' import { LemonButton, LemonDivider, lemonToast } from '@posthog/lemon-ui' +import { IconBold, IconCohort, IconItalic } from 'lib/lemon-ui/icons' import { - IconBold, - IconCohort, - IconItalic, - IconRecording, - IconTableChart, - IconUploadFile, - InsightSQLIcon, - InsightsFunnelsIcon, - InsightsLifecycleIcon, - InsightsPathsIcon, - InsightsRetentionIcon, - InsightsStickinessIcon, - InsightsTrendsIcon, -} from 'lib/lemon-ui/icons' + IconCursor, + IconFunnels, + IconHogQL, + IconLifecycle, + IconRetention, + IconRewindPlay, + IconStickiness, + IconTrends, + IconUpload, + IconUserPaths, +} from '@posthog/icons' import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useState } from 'react' import { EditorCommands, EditorRange } from './utils' import { BaseMathType, ChartDisplayType, FunnelVizType, NotebookNodeType, PathType, RetentionPeriod } from '~/types' @@ -100,7 +98,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Trend', search: 'trend insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -125,7 +123,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Funnel', search: 'funnel insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -152,7 +150,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Retention', search: 'retention insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -179,7 +177,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Paths', search: 'paths insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -194,7 +192,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Stickiness', search: 'stickiness insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -215,7 +213,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'Lifecycle', search: 'lifecycle insight', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -235,7 +233,7 @@ const SLASH_COMMANDS: SlashCommandsItem[] = [ { title: 'HogQL', search: 'sql', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -267,7 +265,7 @@ order by count() desc { title: 'Events', search: 'data explore', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt( pos, @@ -303,13 +301,13 @@ order by count() desc { title: 'Session Replays', search: 'recordings video', - icon: , + icon: , command: (chain, pos) => chain.insertContentAt(pos, { type: NotebookNodeType.RecordingPlaylist, attrs: {} }), }, { title: 'Image', search: 'picture', - icon: , + icon: , command: async (chain, pos) => { // Trigger upload followed by insert try { @@ -464,7 +462,7 @@ export const SlashCommands = forwardRef(fu status="primary-alt" size="small" active={selectedIndex === -1 && selectedHorizontalIndex === index} - onClick={async () => await execute(item)} + onClick={() => void execute(item)} icon={item.icon} /> ))} @@ -479,7 +477,7 @@ export const SlashCommands = forwardRef(fu status="primary-alt" icon={item.icon} active={index === selectedIndex} - onClick={async () => await execute(item)} + onClick={() => void execute(item)} > {item.title} diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index e87521c26ad87..0ffea49bab2d3 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -80,6 +80,7 @@ export const notebookLogic = kea([ clearPreviewContent: true, loadNotebook: true, saveNotebook: (notebook: Pick) => ({ notebook }), + renameNotebook: (title: string) => ({ title }), setEditingNodeId: (editingNodeId: string | null) => ({ editingNodeId }), exportJSON: true, showConflictWarning: true, @@ -265,6 +266,13 @@ export const notebookLogic = kea([ } } }, + renameNotebook: async ({ title }) => { + if (!values.notebook) { + return values.notebook + } + const response = await api.notebooks.update(values.notebook.short_id, { title }) + return response + }, }, ], diff --git a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss index d4a2460692e24..5cd1a52a8ca02 100644 --- a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss +++ b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPanel.scss @@ -40,7 +40,7 @@ } &--active { - border-color: var(--primary); + border-color: var(--primary-3000); height: 8rem; .NotebookPanelDropzone__message { diff --git a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss index ba0ab7a664877..4534f030b9bf5 100644 --- a/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss +++ b/frontend/src/scenes/notebooks/NotebookPanel/NotebookPopover.scss @@ -121,7 +121,7 @@ } &--active { - border-color: var(--primary); + border-color: var(--primary-3000); height: 8rem; .NotebookPanelDropzone__message { @@ -131,7 +131,7 @@ &--dropped { padding: 1rem; - border-color: var(--primary); + border-color: var(--primary-3000); background-color: var(--bg-light); height: 100%; justify-content: flex-start; diff --git a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx index d240cf1179a53..a22692be85990 100644 --- a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx @@ -85,9 +85,9 @@ export function NotebookSelectList(props: NotebookSelectProps): JSX.Element { const { setShowPopover, setSearchQuery, loadNotebooksContainingResource, loadAllNotebooks } = useActions(logic) const { createNotebook } = useActions(notebooksModel) - const openAndAddToNotebook = async (notebookShortId: string, exists: boolean): Promise => { + const openAndAddToNotebook = (notebookShortId: string, exists: boolean): void => { const position = props.resource ? 'end' : 'start' - await openNotebook(notebookShortId, NotebookTarget.Popover, position, (theNotebookLogic) => { + void openNotebook(notebookShortId, NotebookTarget.Popover, position, (theNotebookLogic) => { if (!exists && props.resource) { theNotebookLogic.actions.insertAfterLastNode([props.resource]) } @@ -168,9 +168,9 @@ export function NotebookSelectList(props: NotebookSelectProps): JSX.Element { emptyState={ searchQuery.length ? 'No matching notebooks' : 'Not already in any notebooks' } - onClick={async (notebookShortId) => { + onClick={(notebookShortId) => { setShowPopover(false) - await openAndAddToNotebook(notebookShortId, true) + openAndAddToNotebook(notebookShortId, true) }} /> @@ -180,9 +180,9 @@ export function NotebookSelectList(props: NotebookSelectProps): JSX.Element { { + onClick={(notebookShortId) => { setShowPopover(false) - await openAndAddToNotebook(notebookShortId, false) + openAndAddToNotebook(notebookShortId, false) }} /> diff --git a/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx b/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx index 169fb46aeced8..1a2a5ddba8919 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx +++ b/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx @@ -115,7 +115,7 @@ export function NotebooksTable(): JSX.Element { Created by: ({ value: x.user.uuid, label: x.user.first_name, diff --git a/frontend/src/scenes/notebooks/NotebooksTable/notebooksTableLogic.ts b/frontend/src/scenes/notebooks/NotebooksTable/notebooksTableLogic.ts index d65d055617987..8bc07b7f2bee8 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/notebooksTableLogic.ts +++ b/frontend/src/scenes/notebooks/NotebooksTable/notebooksTableLogic.ts @@ -32,7 +32,7 @@ export const notebooksTableLogic = kea([ }), reducers({ filters: [ - DEFAULT_FILTERS as NotebooksListFilters, + DEFAULT_FILTERS, { setFilters: (state, { filters }) => objectClean({ diff --git a/frontend/src/scenes/notebooks/notebookSceneLogic.ts b/frontend/src/scenes/notebooks/notebookSceneLogic.ts index 2d8656ddd5447..073dabbabe899 100644 --- a/frontend/src/scenes/notebooks/notebookSceneLogic.ts +++ b/frontend/src/scenes/notebooks/notebookSceneLogic.ts @@ -5,6 +5,7 @@ import type { notebookSceneLogicType } from './notebookSceneLogicType' import { notebookLogic } from './Notebook/notebookLogic' import { urls } from 'scenes/urls' import { notebooksModel } from '~/models/notebooksModel' +import { Scene } from 'scenes/sceneTypes' export type NotebookSceneLogicProps = { shortId: string @@ -17,7 +18,7 @@ export const notebookSceneLogic = kea([ values: [notebookLogic(props), ['notebook', 'notebookLoading'], notebooksModel, ['notebooksLoading']], actions: [notebookLogic(props), ['loadNotebook'], notebooksModel, ['createNotebook']], })), - selectors(() => ({ + selectors(({ props }) => ({ notebookId: [() => [(_, props) => props], (props): string => props.shortId], loading: [ @@ -29,11 +30,18 @@ export const notebookSceneLogic = kea([ (s) => [s.notebook, s.loading], (notebook, loading): Breadcrumb[] => [ { + key: Scene.Notebooks, name: 'Notebooks', path: urls.notebooks(), }, { - name: notebook ? notebook?.title || 'Unnamed' : loading ? 'Loading...' : 'Notebook not found', + key: notebook?.short_id || 'new', + name: notebook ? notebook?.title || 'Unnamed' : loading ? null : 'Notebook not found', + onRename: !notebook?.is_template + ? async (title: string) => { + await notebookLogic(props).asyncActions.renameNotebook(title) + } + : undefined, }, ], ], diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index 50a1ff9f25956..96badaa09ce95 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -1,9 +1,6 @@ import { SceneExport } from 'scenes/sceneTypes' import { useActions, useValues } from 'kea' import { useEffect, useState } from 'react' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { FEATURE_FLAGS } from 'lib/constants' -import { urls } from 'scenes/urls' import { OnboardingStepKey, onboardingLogic } from './onboardingLogic' import { SDKs } from './sdks/SDKs' import { ProductKey } from '~/types' @@ -120,15 +117,8 @@ const SurveysOnboarding = (): JSX.Element => { } export function Onboarding(): JSX.Element | null { - const { featureFlags } = useValues(featureFlagLogic) const { product } = useValues(onboardingLogic) - useEffect(() => { - if (featureFlags[FEATURE_FLAGS.PRODUCT_SPECIFIC_ONBOARDING] !== 'test') { - location.href = urls.ingestion() - } - }, []) - if (!product) { return <> } diff --git a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx index 552a490688887..70e7852a70e44 100644 --- a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx +++ b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx @@ -6,7 +6,7 @@ import { OnboardingStepKey, onboardingLogic } from './onboardingLogic' import { BillingProductV2Type } from '~/types' import { Spinner } from 'lib/lemon-ui/Spinner' import { BillingHero } from 'scenes/billing/BillingHero' -import { LemonButton } from '@posthog/lemon-ui' +import { LemonBanner, LemonButton } from '@posthog/lemon-ui' import { getUpgradeProductLink } from 'scenes/billing/billing-utils' import { billingProductLogic } from 'scenes/billing/billingProductLogic' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' @@ -25,6 +25,7 @@ export const OnboardingBillingStep = ({ const { currentAndUpgradePlans } = useValues(billingProductLogic({ product })) const { reportBillingUpgradeClicked } = useActions(eventUsageLogic) const plan = currentAndUpgradePlans?.upgradePlan + const currentPlan = currentAndUpgradePlans?.currentPlan return ( {product.subscribed ? (
-
+
@@ -64,6 +65,15 @@ export const OnboardingBillingStep = ({
+ {currentPlan?.initial_billing_limit && ( +
+ + To protect your costs and ours, this product has an initial billing limit of $ + {currentPlan.initial_billing_limit}. You can change or remove this limit on the + Billing page. + +
+ )}
) : ( <> diff --git a/frontend/src/scenes/onboarding/onboardingLogic.tsx b/frontend/src/scenes/onboarding/onboardingLogic.tsx index c484105b60e7a..c5c02e1db0c60 100644 --- a/frontend/src/scenes/onboarding/onboardingLogic.tsx +++ b/frontend/src/scenes/onboarding/onboardingLogic.tsx @@ -74,7 +74,7 @@ export const onboardingLogic = kea([ allOnboardingSteps: [ [] as AllOnboardingSteps, { - setAllOnboardingSteps: (_, { allOnboardingSteps }) => allOnboardingSteps as AllOnboardingSteps, + setAllOnboardingSteps: (_, { allOnboardingSteps }) => allOnboardingSteps, }, ], stepKey: [ @@ -84,7 +84,7 @@ export const onboardingLogic = kea([ }, ], onCompleteOnboardingRedirectUrl: [ - urls.default() as string, + urls.default(), { setProductKey: (_, { productKey }) => { return productKey ? getProductUri(productKey as ProductKey) : urls.default() @@ -153,7 +153,9 @@ export const onboardingLogic = kea([ }), listeners(({ actions, values }) => ({ loadBillingSuccess: () => { - actions.setProduct(values.billing?.products.find((p) => p.type === values.productKey) || null) + if (window.location.pathname.startsWith('/onboarding')) { + actions.setProduct(values.billing?.products.find((p) => p.type === values.productKey) || null) + } }, setProduct: ({ product }) => { if (!product) { @@ -205,7 +207,7 @@ export const onboardingLogic = kea([ } }, resetStepKey: () => { - actions.setStepKey(values.allOnboardingSteps[0].props.stepKey) + values.allOnboardingSteps[0] && actions.setStepKey(values.allOnboardingSteps[0]?.props.stepKey) }, })), actionToUrl(({ values }) => ({ diff --git a/frontend/src/scenes/paths/PathNodeCardButton.tsx b/frontend/src/scenes/paths/PathNodeCardButton.tsx index 9923b6f2eac19..0bb86e403890e 100644 --- a/frontend/src/scenes/paths/PathNodeCardButton.tsx +++ b/frontend/src/scenes/paths/PathNodeCardButton.tsx @@ -9,6 +9,7 @@ import { copyToClipboard } from 'lib/utils' import { pageUrl, PathNodeData } from './pathUtils' import { pathsDataLogicType } from './pathsDataLogicType' +import { captureException } from '@sentry/react' type PathNodeCardButton = { name: string @@ -40,8 +41,8 @@ export function PathNodeCardButton({ const viewFunnel = (): void => { viewPathToFunnel(node) } - const copyName = async (): Promise => { - await copyToClipboard(pageUrl(node)) + const copyName = (): void => { + void copyToClipboard(pageUrl(node)).then(captureException) } const openModal = (): void => openPersonsModal({ path_end_key: name }) diff --git a/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx b/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx index 0e1202affc14c..c2e1fac031681 100644 --- a/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx +++ b/frontend/src/scenes/persons-management/personsManagementSceneLogic.tsx @@ -13,6 +13,7 @@ import { LemonButton } from '@posthog/lemon-ui' import type { personsManagementSceneLogicType } from './personsManagementSceneLogicType' import { Groups } from 'scenes/groups/Groups' +import { Scene } from 'scenes/sceneTypes' export type PersonsManagementTab = { key: string @@ -51,7 +52,7 @@ export const personsManagementSceneLogic = kea( { key: 'persons', url: urls.persons(), - label: 'Persons', + label: 'People', content: , }, { @@ -116,15 +117,18 @@ export const personsManagementSceneLogic = kea( (tabs, activeTab): Breadcrumb[] => { return [ { + key: Scene.PersonsManagement, name: `People`, path: tabs[0].url, }, activeTab ? { + key: activeTab.key, name: activeTab.label, path: activeTab.url, } : { + key: 'loading', name: 'Loading...', }, ] diff --git a/frontend/src/scenes/persons/PersonScene.scss b/frontend/src/scenes/persons/PersonScene.scss index b22eea07f7361..e555d6434aecb 100644 --- a/frontend/src/scenes/persons/PersonScene.scss +++ b/frontend/src/scenes/persons/PersonScene.scss @@ -5,7 +5,7 @@ line-height: 1.125rem; margin: 0 0 0 0.25rem; padding: 0 0.25rem 0 0.375rem; - color: var(--primary); + color: var(--primary-3000); cursor: pointer; svg { diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index 136d69f317baf..f5b6d0e6b333f 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -24,6 +24,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' import { asDisplay } from './person-utils' import { hogqlQuery } from '~/queries/query' +import { Scene } from 'scenes/sceneTypes' export interface PersonsLogicProps { cohort?: number | 'new' @@ -246,12 +247,14 @@ export const personsLogic = kea([ const showPerson = person && location.pathname.match(/\/person\/.+/) const breadcrumbs: Breadcrumb[] = [ { - name: 'Persons', + key: Scene.PersonsManagement, + name: 'People', path: urls.persons(), }, ] if (showPerson) { breadcrumbs.push({ + key: person.id || 'unknown', name: asDisplay(person), }) } diff --git a/frontend/src/scenes/pipeline/Pipeline.stories.tsx b/frontend/src/scenes/pipeline/Pipeline.stories.tsx index 639cf3d1d8cea..97b8f668fa4f7 100644 --- a/frontend/src/scenes/pipeline/Pipeline.stories.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.stories.tsx @@ -3,7 +3,7 @@ import { Meta } from '@storybook/react' import { App } from 'scenes/App' import { router } from 'kea-router' import { urls } from 'scenes/urls' -import { PipelineTabs } from '~/types' +import { PipelineAppTabs, PipelineTabs } from '~/types' import { pipelineLogic } from './pipelineLogic' import { mswDecorator, useStorybookMocks } from '~/mocks/browser' @@ -58,3 +58,29 @@ export function PipelineTransformationsPage(): JSX.Element { }, []) return } + +export function PipelineAppConfiguration(): JSX.Element { + useEffect(() => { + router.actions.push(urls.pipelineApp(1, PipelineAppTabs.Configuration)) + }, []) + return +} + +export function PipelineAppMetrics(): JSX.Element { + useEffect(() => { + router.actions.push(urls.pipelineApp(1, PipelineAppTabs.Metrics)) + }, []) + return +} + +export function PipelineAppLogs(): JSX.Element { + useStorybookMocks({ + get: { + 'api/projects/:team_id/plugin_configs/1/logs': require('./__mocks__/pluginLogs.json'), + }, + }) + useEffect(() => { + router.actions.push(urls.pipelineApp(1, PipelineAppTabs.Logs)) + }, []) + return +} diff --git a/frontend/src/scenes/pipeline/PipelineApp.tsx b/frontend/src/scenes/pipeline/PipelineApp.tsx new file mode 100644 index 0000000000000..e60a2277377e3 --- /dev/null +++ b/frontend/src/scenes/pipeline/PipelineApp.tsx @@ -0,0 +1,48 @@ +import { SceneExport } from 'scenes/sceneTypes' +import { useValues } from 'kea' +import { pipelineAppLogic } from './pipelineAppLogic' +import { PageHeader } from 'lib/components/PageHeader' +import { LemonTabs } from 'lib/lemon-ui/LemonTabs/LemonTabs' +import { router } from 'kea-router' +import { PipelineAppTabs } from '~/types' +import { urls } from 'scenes/urls' +import { PluginLogs } from 'scenes/plugins/plugin/PluginLogs' +import { Spinner } from '@posthog/lemon-ui' +import { capitalizeFirstLetter } from 'lib/utils' + +export const scene: SceneExport = { + component: PipelineApp, + logic: pipelineAppLogic, + paramsToProps: ({ params: { id } }: { params: { id?: string } }) => ({ id: id ? parseInt(id) : 'new' }), +} + +export function PipelineApp({ id }: { id?: string } = {}): JSX.Element { + const { currentTab } = useValues(pipelineAppLogic) + + const confId = id ? parseInt(id) : undefined + + if (!confId) { + return + } + + const tab_to_content: Record = { + [PipelineAppTabs.Configuration]:
Configuration editing
, + [PipelineAppTabs.Metrics]:
Metrics page
, + [PipelineAppTabs.Logs]: , + } + + return ( +
+ + router.actions.push(urls.pipelineApp(confId, tab as PipelineAppTabs))} + tabs={Object.values(PipelineAppTabs).map((tab) => ({ + label: capitalizeFirstLetter(tab), + key: tab, + content: tab_to_content[tab], + }))} + /> +
+ ) +} diff --git a/frontend/src/scenes/pipeline/Transformations.tsx b/frontend/src/scenes/pipeline/Transformations.tsx index 39768644c6462..1a627f0439bc4 100644 --- a/frontend/src/scenes/pipeline/Transformations.tsx +++ b/frontend/src/scenes/pipeline/Transformations.tsx @@ -12,7 +12,7 @@ import { import { useActions, useValues } from 'kea' import { pipelineTransformationsLogic } from './transformationsLogic' import { PluginImage } from 'scenes/plugins/plugin/PluginImage' -import { PipelineTabs, PluginConfigTypeNew, PluginType, ProductKey } from '~/types' +import { PipelineAppTabs, PipelineTabs, PluginConfigTypeNew, PluginType, ProductKey } from '~/types' import { urls } from 'scenes/urls' import { SortableContext, arrayMove, useSortable, verticalListSortingStrategy } from '@dnd-kit/sortable' import { DndContext, DragEndEvent } from '@dnd-kit/core' @@ -100,7 +100,12 @@ export function Transformations(): JSX.Element { return ( <> - + {pluginConfig.name} @@ -153,7 +158,9 @@ export function Transformations(): JSX.Element { } > - + Error @@ -207,7 +214,10 @@ export function Transformations(): JSX.Element { )} @@ -215,7 +225,7 @@ export function Transformations(): JSX.Element { @@ -223,7 +233,7 @@ export function Transformations(): JSX.Element { @@ -244,7 +254,7 @@ export function Transformations(): JSX.Element { { - deleteWithUndo({ + void deleteWithUndo({ endpoint: `plugin_config`, object: { id: pluginConfig.id, diff --git a/frontend/src/scenes/pipeline/__mocks__/pluginLogs.json b/frontend/src/scenes/pipeline/__mocks__/pluginLogs.json new file mode 100644 index 0000000000000..09fa96824bf74 --- /dev/null +++ b/frontend/src/scenes/pipeline/__mocks__/pluginLogs.json @@ -0,0 +1,29 @@ +{ + "count": 2, + "next": null, + "previous": null, + "results": [ + { + "id": "018bb51f-0f9f-0000-34ae-d3aa1d9a5770", + "team_id": 1, + "plugin_id": 1, + "plugin_config_id": 11, + "timestamp": "2023-11-09T17:26:33.626000Z", + "source": "PLUGIN", + "type": "ERROR", + "message": "Error: Received an unexpected error from the endpoint API. Response 400: {\"meta\":{\"errors\":[\"value for attribute '$current_url' cannot be longer than 1000 bytes\"]}}\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)", + "instance_id": "12345678-1234-1234-1234-123456789012" + }, + { + "id": "018bb51e-262a-0000-eb34-39afd4691d56", + "team_id": 1, + "plugin_id": 1, + "plugin_config_id": 11, + "timestamp": "2023-11-09T17:25:33.790000Z", + "source": "PLUGIN", + "type": "INFO", + "message": "Successfully sent event to endpoint", + "instance_id": "12345678-1234-1234-1234-123456789012" + } + ] +} diff --git a/frontend/src/scenes/pipeline/pipelineAppLogic.tsx b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx new file mode 100644 index 0000000000000..260d214df3785 --- /dev/null +++ b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx @@ -0,0 +1,56 @@ +import { kea, reducers, path, props, key, actions, selectors } from 'kea' + +import type { pipelineAppLogicType } from './pipelineAppLogicType' +import { Breadcrumb, PipelineAppTabs } from '~/types' +import { urls } from 'scenes/urls' +import { actionToUrl, urlToAction } from 'kea-router' +import { Scene } from 'scenes/sceneTypes' + +export interface PipelineAppLogicProps { + id: number +} + +export const pipelineAppLogic = kea([ + props({} as PipelineAppLogicProps), + key(({ id }) => id), + path((id) => ['scenes', 'pipeline', 'pipelineAppLogic', id]), + actions({ + setCurrentTab: (tab: PipelineAppTabs = PipelineAppTabs.Configuration) => ({ tab }), + }), + reducers({ + currentTab: [ + PipelineAppTabs.Configuration as PipelineAppTabs, + { + setCurrentTab: (_, { tab }) => tab, + }, + ], + }), + selectors({ + breadcrumbs: [ + () => [], + (): Breadcrumb[] => [ + { + key: Scene.Pipeline, + name: 'Pipeline', + path: urls.pipeline(), + }, + { + key: 'todo', + name: 'App name', + }, + ], + ], + }), + actionToUrl(({ values, props }) => { + return { + setCurrentTab: () => [urls.pipelineApp(props.id, values.currentTab)], + } + }), + urlToAction(({ actions, values }) => ({ + '/pipeline/:id/:tab': ({ tab }) => { + if (tab !== values.currentTab) { + actions.setCurrentTab(tab as PipelineAppTabs) + } + }, + })), +]) diff --git a/frontend/src/scenes/pipeline/pipelineLogic.tsx b/frontend/src/scenes/pipeline/pipelineLogic.tsx index 017e1966745b7..cf99abf943bfc 100644 --- a/frontend/src/scenes/pipeline/pipelineLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineLogic.tsx @@ -3,6 +3,7 @@ import type { pipelineLogicType } from './pipelineLogicType' import { actionToUrl, urlToAction } from 'kea-router' import { urls } from 'scenes/urls' import { Breadcrumb, PipelineTabs } from '~/types' +import { Scene } from 'scenes/sceneTypes' export const singularName = (tab: PipelineTabs): string => { switch (tab) { @@ -43,12 +44,13 @@ export const pipelineLogic = kea([ breadcrumbs: [ (s) => [s.currentTab], (tab): Breadcrumb[] => { - const breadcrumbs: Breadcrumb[] = [{ name: 'Pipeline' }] - breadcrumbs.push({ - name: humanFriendlyTabName(tab), - }) - - return breadcrumbs + return [ + { key: Scene.Pipeline, name: 'Data pipeline' }, + { + key: tab, + name: humanFriendlyTabName(tab), + }, + ] }, ], })), diff --git a/frontend/src/scenes/plugins/edit/interface-jobs/PluginJobConfiguration.tsx b/frontend/src/scenes/plugins/edit/interface-jobs/PluginJobConfiguration.tsx index 497b9b667eaa9..2f4a3a222fc9a 100644 --- a/frontend/src/scenes/plugins/edit/interface-jobs/PluginJobConfiguration.tsx +++ b/frontend/src/scenes/plugins/edit/interface-jobs/PluginJobConfiguration.tsx @@ -1,5 +1,4 @@ import { useMemo } from 'react' -import { Radio, InputNumber } from 'antd' import { ChildFunctionProps, Form } from 'kea-forms' import { Field } from 'lib/forms/Field' import { useValues, useActions } from 'kea' @@ -16,7 +15,7 @@ import { DatePicker } from 'lib/components/DatePicker' import { CodeEditor } from 'lib/components/CodeEditors' import { IconClose, IconPlayCircle, IconSettings } from 'lib/lemon-ui/icons' import { IconCheck } from '@posthog/icons' -import { Tooltip } from '@posthog/lemon-ui' +import { LemonSegmentedButton, Tooltip } from '@posthog/lemon-ui' // keep in sync with plugin-server's export-historical-events.ts export const HISTORICAL_EXPORT_JOB_NAME = 'Export historical events' @@ -106,7 +105,7 @@ function FieldInput({ case 'string': return case 'number': - return + return case 'json': return ( onChange(e.target.value)} - > - - True - - - False - - + options={[ + { + value: true, + label: 'True', + icon: , + }, + { + value: false, + label: 'False', + icon: , + }, + ]} + /> ) case 'date': return ( diff --git a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx b/frontend/src/scenes/plugins/plugin/PluginLogs.tsx index 11275bcab2da5..20438edc4a136 100644 --- a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx +++ b/frontend/src/scenes/plugins/plugin/PluginLogs.tsx @@ -51,6 +51,7 @@ const columns: LemonTableColumns> = [ title: 'Message', key: 'message', dataIndex: 'message', + render: (message: string) => {message}, }, ] diff --git a/frontend/src/scenes/plugins/pluginsLogic.ts b/frontend/src/scenes/plugins/pluginsLogic.ts index dd03bed0a3f6b..3e6954c3cf081 100644 --- a/frontend/src/scenes/plugins/pluginsLogic.ts +++ b/frontend/src/scenes/plugins/pluginsLogic.ts @@ -623,13 +623,13 @@ export const pluginsLogic = kea([ (s) => [s.repository, s.plugins], (repository, plugins) => { const allPossiblePlugins: PluginSelectionType[] = [] - for (const plugin of Object.values(plugins) as PluginType[]) { + for (const plugin of Object.values(plugins)) { allPossiblePlugins.push({ name: plugin.name, url: plugin.url }) } const installedUrls = new Set(Object.values(plugins).map((plugin) => plugin.url)) - for (const plugin of Object.values(repository) as PluginRepositoryEntry[]) { + for (const plugin of Object.values(repository)) { if (!installedUrls.has(plugin.url)) { allPossiblePlugins.push({ name: plugin.name, url: plugin.url }) } diff --git a/frontend/src/scenes/plugins/source/PluginSource.tsx b/frontend/src/scenes/plugins/source/PluginSource.tsx index 0d016a06a7ba1..ca59aea7988aa 100644 --- a/frontend/src/scenes/plugins/source/PluginSource.tsx +++ b/frontend/src/scenes/plugins/source/PluginSource.tsx @@ -57,7 +57,7 @@ export function PluginSource({ if (!monaco) { return } - import('./types/packages.json').then((files) => { + void import('./types/packages.json').then((files) => { for (const [fileName, fileContents] of Object.entries(files).filter( ([fileName]) => fileName !== 'default' )) { diff --git a/frontend/src/scenes/plugins/tabs/apps/components.tsx b/frontend/src/scenes/plugins/tabs/apps/components.tsx index 4a0ecb9426d35..4033a7f86a166 100644 --- a/frontend/src/scenes/plugins/tabs/apps/components.tsx +++ b/frontend/src/scenes/plugins/tabs/apps/components.tsx @@ -23,7 +23,7 @@ export function RepositoryTag({ plugin }: { plugin: PluginType | PluginRepositor if (plugin.plugin_type === 'local' && plugin.url) { return ( - await copyToClipboard(plugin.url?.substring(5) || '')}> + void copyToClipboard(plugin.url?.substring(5) || '')}> Installed Locally ) diff --git a/frontend/src/scenes/products/Products.tsx b/frontend/src/scenes/products/Products.tsx index 66994f4ab0f72..cba24e76c8a36 100644 --- a/frontend/src/scenes/products/Products.tsx +++ b/frontend/src/scenes/products/Products.tsx @@ -3,9 +3,6 @@ import { SceneExport } from 'scenes/sceneTypes' import { BillingProductV2Type, ProductKey } from '~/types' import { useActions, useValues } from 'kea' import { teamLogic } from 'scenes/teamLogic' -import { useEffect } from 'react' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { FEATURE_FLAGS } from 'lib/constants' import { urls } from 'scenes/urls' import { billingLogic } from 'scenes/billing/billingLogic' import { Spinner } from 'lib/lemon-ui/Spinner' @@ -131,18 +128,11 @@ export function ProductCard({ } export function Products(): JSX.Element { - const { featureFlags } = useValues(featureFlagLogic) const { billing } = useValues(billingLogic) const { currentTeam } = useValues(teamLogic) const isFirstProduct = Object.keys(currentTeam?.has_completed_onboarding_for || {}).length === 0 const products = billing?.products || [] - useEffect(() => { - if (featureFlags[FEATURE_FLAGS.PRODUCT_SPECIFIC_ONBOARDING] !== 'test') { - location.href = urls.ingestion() - } - }, []) - return (
diff --git a/frontend/src/scenes/products/productsLogic.tsx b/frontend/src/scenes/products/productsLogic.tsx index 48a17171bdc8a..29c2678b63fe7 100644 --- a/frontend/src/scenes/products/productsLogic.tsx +++ b/frontend/src/scenes/products/productsLogic.tsx @@ -1,16 +1,20 @@ -import { kea, path, actions, listeners } from 'kea' +import { kea, path, actions, listeners, connect } from 'kea' import { teamLogic } from 'scenes/teamLogic' import { ProductKey } from '~/types' import type { productsLogicType } from './productsLogicType' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { onboardingLogic } from 'scenes/onboarding/onboardingLogic' export const productsLogic = kea([ path(() => ['scenes', 'products', 'productsLogic']), + connect({ + actions: [teamLogic, ['updateCurrentTeam'], onboardingLogic, ['setProduct']], + }), actions(() => ({ onSelectProduct: (product: ProductKey) => ({ product }), })), - listeners(() => ({ + listeners(({ actions }) => ({ onSelectProduct: ({ product }) => { eventUsageLogic.actions.reportOnboardingProductSelected(product) @@ -18,7 +22,7 @@ export const productsLogic = kea([ case ProductKey.PRODUCT_ANALYTICS: return case ProductKey.SESSION_REPLAY: - teamLogic.actions.updateCurrentTeam({ + actions.updateCurrentTeam({ session_recording_opt_in: true, capture_console_log_opt_in: true, capture_performance_opt_in: true, diff --git a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx index 0aea88331c5e3..64a4181460b29 100644 --- a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx +++ b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx @@ -33,14 +33,14 @@ export function NewlySeenPersons(): JSX.Element { return ( } diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.scss b/frontend/src/scenes/project-homepage/ProjectHomepage.scss index 3066991290387..f905c0241e4a4 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.scss +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.scss @@ -1,11 +1,11 @@ .project-homepage { - .homepage-dashboard-header { + .HomepageDashboardHeader { margin-top: 1rem; display: flex; justify-content: space-between; align-items: center; - .dashboard-title-container { + .HomepageDashboardHeader__title { display: flex; flex-direction: row; align-items: center; @@ -17,6 +17,16 @@ margin: 0; } } + + .posthog-3000 & { + a { + color: var(--default); + + &:hover { + color: var(--primary-3000); + } + } + } } } diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx index 80cc8e5b02b9f..b26ec70c514d9 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx @@ -8,7 +8,7 @@ import { Scene, SceneExport } from 'scenes/sceneTypes' import { DashboardPlacement } from '~/types' import { inviteLogic } from 'scenes/settings/organization/inviteLogic' import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { IconCottage } from 'lib/lemon-ui/icons' +import { IconHome } from '@posthog/icons' import { projectHomepageLogic } from 'scenes/project-homepage/projectHomepageLogic' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { RecentRecordings } from './RecentRecordings' @@ -81,12 +81,12 @@ export function ProjectHomepage(): JSX.Element {
{currentTeam?.primary_dashboard ? ( <> -
-
+
+
{!dashboard && } {dashboard?.name && ( <> - + - {inCardView ? ( + {hideLineGraph ? ( ) : ( <> diff --git a/frontend/src/scenes/retention/RetentionModal.tsx b/frontend/src/scenes/retention/RetentionModal.tsx index a47a895d40fff..f1d8f7152a995 100644 --- a/frontend/src/scenes/retention/RetentionModal.tsx +++ b/frontend/src/scenes/retention/RetentionModal.tsx @@ -43,7 +43,7 @@ export function RetentionModal(): JSX.Element | null { - triggerExport({ + void triggerExport({ export_format: ExporterFormat.CSV, export_context: { path: row?.people_url, diff --git a/frontend/src/scenes/retention/RetentionTable.scss b/frontend/src/scenes/retention/RetentionTable.scss index 7e10b09fb8173..cb796874edeed 100644 --- a/frontend/src/scenes/retention/RetentionTable.scss +++ b/frontend/src/scenes/retention/RetentionTable.scss @@ -1,5 +1,5 @@ .RetentionTable { - --retention-table-color: var(--primary); + --retention-table-color: var(--primary-3000); font-weight: 500; width: 100%; @@ -70,4 +70,24 @@ } } } + + &.RetentionTable--small-layout { + font-size: 0.75rem; + line-height: 1rem; + + th { + padding-left: 0.25rem; + padding-right: 0.25rem; + } + + .RetentionTable__TextTab { + padding-left: 0.25rem; + padding-right: 0.25rem; + } + + .RetentionTable__Tab { + margin: 0; + padding: 0.5rem 0.25rem; + } + } } diff --git a/frontend/src/scenes/retention/RetentionTable.tsx b/frontend/src/scenes/retention/RetentionTable.tsx index 97d124baae554..967997abe6245 100644 --- a/frontend/src/scenes/retention/RetentionTable.tsx +++ b/frontend/src/scenes/retention/RetentionTable.tsx @@ -1,4 +1,4 @@ -import { useValues, useActions } from 'kea' +import { useActions, useValues } from 'kea' import clsx from 'clsx' import { insightLogic } from 'scenes/insights/insightLogic' @@ -11,11 +11,16 @@ import { BRAND_BLUE_HSL, gradateColor } from 'lib/colors' export function RetentionTable({ inCardView = false }: { inCardView?: boolean }): JSX.Element | null { const { insightProps } = useValues(insightLogic) - const { tableHeaders, tableRows, isLatestPeriod } = useValues(retentionTableLogic(insightProps)) + const { tableHeaders, tableRows, isLatestPeriod, hideSizeColumn, retentionVizOptions } = useValues( + retentionTableLogic(insightProps) + ) const { openModal } = useActions(retentionModalLogic(insightProps)) return ( - +
{tableHeaders.map((heading) => ( @@ -34,7 +39,7 @@ export function RetentionTable({ inCardView = false }: { inCardView?: boolean }) > {row.map((column, columnIndex) => (
- {columnIndex <= 1 ? ( + {columnIndex <= (hideSizeColumn ? 0 : 1) ? ( {column} diff --git a/frontend/src/scenes/retention/retentionLineGraphLogic.ts b/frontend/src/scenes/retention/retentionLineGraphLogic.ts index 127ff04440385..656307fb41d38 100644 --- a/frontend/src/scenes/retention/retentionLineGraphLogic.ts +++ b/frontend/src/scenes/retention/retentionLineGraphLogic.ts @@ -9,6 +9,7 @@ import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { retentionLogic } from './retentionLogic' import type { retentionLineGraphLogicType } from './retentionLineGraphLogicType' +import { isLifecycleQuery, isStickinessQuery } from '~/queries/utils' const DEFAULT_RETENTION_LOGIC_KEY = 'default_retention_key' @@ -117,7 +118,11 @@ export const retentionLineGraphLogic = kea([ aggregationGroupTypeIndex: [ (s) => [s.querySource], (querySource) => { - return querySource?.aggregation_group_type_index ?? 'people' + return ( + (isLifecycleQuery(querySource) || isStickinessQuery(querySource) + ? null + : querySource?.aggregation_group_type_index) ?? 'people' + ) }, ], }), diff --git a/frontend/src/scenes/retention/retentionModalLogic.ts b/frontend/src/scenes/retention/retentionModalLogic.ts index ebd464a94f4b0..5d5c4043a8112 100644 --- a/frontend/src/scenes/retention/retentionModalLogic.ts +++ b/frontend/src/scenes/retention/retentionModalLogic.ts @@ -7,6 +7,7 @@ import { retentionPeopleLogic } from './retentionPeopleLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import type { retentionModalLogicType } from './retentionModalLogicType' +import { isLifecycleQuery, isStickinessQuery } from '~/queries/utils' const DEFAULT_RETENTION_LOGIC_KEY = 'default_retention_key' @@ -35,7 +36,10 @@ export const retentionModalLogic = kea([ aggregationTargetLabel: [ (s) => [s.querySource, s.aggregationLabel], (querySource, aggregationLabel): Noun => { - const { aggregation_group_type_index } = querySource || {} + const aggregation_group_type_index = + isLifecycleQuery(querySource) || isStickinessQuery(querySource) + ? undefined + : querySource?.aggregation_group_type_index return aggregationLabel(aggregation_group_type_index) }, ], diff --git a/frontend/src/scenes/retention/retentionPeopleLogic.ts b/frontend/src/scenes/retention/retentionPeopleLogic.ts index 3268a44c228fd..72024de2a8589 100644 --- a/frontend/src/scenes/retention/retentionPeopleLogic.ts +++ b/frontend/src/scenes/retention/retentionPeopleLogic.ts @@ -31,7 +31,7 @@ export const retentionPeopleLogic = kea([ __default: {} as RetentionTablePeoplePayload, loadPeople: async (rowIndex: number) => { const urlParams = toParams({ ...values.apiFilters, selected_interval: rowIndex }) - return (await api.get(`api/person/retention/?${urlParams}`)) as RetentionTablePeoplePayload + return await api.get(`api/person/retention/?${urlParams}`) }, }, })), diff --git a/frontend/src/scenes/retention/retentionTableLogic.ts b/frontend/src/scenes/retention/retentionTableLogic.ts index 52e442b0d125e..aa495e9d3a68c 100644 --- a/frontend/src/scenes/retention/retentionTableLogic.ts +++ b/frontend/src/scenes/retention/retentionTableLogic.ts @@ -2,7 +2,7 @@ import { dayjs } from 'lib/dayjs' import { kea, props, key, path, connect, selectors } from 'kea' import { range } from 'lib/utils' import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils' -import { InsightLogicProps } from '~/types' +import { InsightLogicProps, InsightType } from '~/types' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { retentionLogic } from './retentionLogic' @@ -36,7 +36,7 @@ export const retentionTableLogic = kea([ connect((props: InsightLogicProps) => ({ values: [ insightVizDataLogic(props), - ['dateRange', 'retentionFilter', 'breakdown'], + ['dateRange', 'retentionFilter', 'breakdown', 'vizSpecificOptions'], retentionLogic(props), ['results'], ], @@ -47,6 +47,12 @@ export const retentionTableLogic = kea([ (dateRange, retentionFilter) => periodIsLatest(dateRange?.date_to || null, retentionFilter?.period || null), ], + retentionVizOptions: [ + (s) => [s.vizSpecificOptions], + (vizSpecificOptions) => vizSpecificOptions?.[InsightType.RETENTION], + ], + hideSizeColumn: [(s) => [s.retentionVizOptions], (retentionVizOptions) => retentionVizOptions?.hideSizeColumn], + maxIntervalsCount: [ (s) => [s.results], (results) => { @@ -55,15 +61,15 @@ export const retentionTableLogic = kea([ ], tableHeaders: [ - (s) => [s.results], - (results) => { - return ['Cohort', 'Size', ...results.map((x) => x.label)] + (s) => [s.results, s.hideSizeColumn], + (results, hideSizeColumn) => { + return ['Cohort', ...(hideSizeColumn ? [] : ['Size']), ...results.map((x) => x.label)] }, ], tableRows: [ - (s) => [s.results, s.maxIntervalsCount, s.retentionFilter, s.breakdown], - (results, maxIntervalsCount, retentionFilter, breakdown) => { + (s) => [s.results, s.maxIntervalsCount, s.retentionFilter, s.breakdown, s.hideSizeColumn], + (results, maxIntervalsCount, retentionFilter, breakdown, hideSizeColumn) => { const { period } = retentionFilter || {} const { breakdowns } = breakdown || {} @@ -75,7 +81,7 @@ export const retentionTableLogic = kea([ ? dayjs(results[rowIndex].date).format('MMM D, h A') : dayjs(results[rowIndex].date).format('MMM D'), // Second column is the first value (which is essentially the total) - results[rowIndex].values[0].count, + ...(hideSizeColumn ? [] : [results[rowIndex].values[0].count]), // All other columns are rendered as percentage ...results[rowIndex].values.map((row) => { const percentage = diff --git a/frontend/src/scenes/saved-insights/SavedInsights.scss b/frontend/src/scenes/saved-insights/SavedInsights.scss index a9e79b54a7ba8..84e02c0b92476 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.scss +++ b/frontend/src/scenes/saved-insights/SavedInsights.scss @@ -14,7 +14,7 @@ .new-insight-dropdown-btn { cursor: pointer; height: 40px; - background-color: var(--primary); + background-color: var(--primary-3000); padding: 8px 12px 8px 16px; border: 1px solid var(--border); border-radius: 4px; diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 229ebc9b0c613..d8c87972cdf00 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -9,6 +9,16 @@ import { organizationLogic } from 'scenes/organizationLogic' import { PageHeader } from 'lib/components/PageHeader' import { SavedInsightsEmptyState } from 'scenes/insights/EmptyStates' import { teamLogic } from '../teamLogic' +import { + IconBrackets, + IconFunnels, + IconHogQL, + IconLifecycle, + IconRetention, + IconStickiness, + IconTrends, + IconUserPaths, +} from '@posthog/icons' import { IconAction, IconBarChart, @@ -22,13 +32,6 @@ import { IconStarFilled, IconStarOutline, IconTableChart, - InsightsFunnelsIcon, - InsightsLifecycleIcon, - InsightsPathsIcon, - InsightSQLIcon, - InsightsRetentionIcon, - InsightsStickinessIcon, - InsightsTrendsIcon, } from 'lib/lemon-ui/icons' import { SceneExport } from 'scenes/sceneTypes' import { TZLabel } from 'lib/components/TZLabel' @@ -74,49 +77,49 @@ export const INSIGHT_TYPES_METADATA: Record = [InsightType.TRENDS]: { name: 'Trends', description: 'Visualize and break down how actions or events vary over time.', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [InsightType.FUNNELS]: { name: 'Funnel', description: 'Discover how many users complete or drop out of a sequence of actions.', - icon: InsightsFunnelsIcon, + icon: IconFunnels, inMenu: true, }, [InsightType.RETENTION]: { name: 'Retention', description: 'See how many users return on subsequent days after an intial action.', - icon: InsightsRetentionIcon, + icon: IconRetention, inMenu: true, }, [InsightType.PATHS]: { name: 'Paths', description: 'Trace the journeys users take within your product and where they drop off.', - icon: InsightsPathsIcon, + icon: IconUserPaths, inMenu: true, }, [InsightType.STICKINESS]: { name: 'Stickiness', description: 'See what keeps users coming back by viewing the interval between repeated actions.', - icon: InsightsStickinessIcon, + icon: IconStickiness, inMenu: true, }, [InsightType.LIFECYCLE]: { name: 'Lifecycle', description: 'Understand growth by breaking down new, resurrected, returning and dormant users.', - icon: InsightsLifecycleIcon, + icon: IconLifecycle, inMenu: true, }, [InsightType.SQL]: { name: 'SQL', description: 'Use HogQL to query your data.', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [InsightType.JSON]: { name: 'Custom', description: 'Save components powered by our JSON query language.', - icon: InsightSQLIcon, + icon: IconBrackets, inMenu: true, }, } @@ -125,37 +128,37 @@ export const QUERY_TYPES_METADATA: Record = { [NodeKind.TrendsQuery]: { name: 'Trends', description: 'Visualize and break down how actions or events vary over time', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.FunnelsQuery]: { name: 'Funnel', description: 'Discover how many users complete or drop out of a sequence of actions', - icon: InsightsFunnelsIcon, + icon: IconFunnels, inMenu: true, }, [NodeKind.RetentionQuery]: { name: 'Retention', description: 'See how many users return on subsequent days after an intial action', - icon: InsightsRetentionIcon, + icon: IconRetention, inMenu: true, }, [NodeKind.PathsQuery]: { name: 'Paths', description: 'Trace the journeys users take within your product and where they drop off', - icon: InsightsPathsIcon, + icon: IconUserPaths, inMenu: true, }, [NodeKind.StickinessQuery]: { name: 'Stickiness', description: 'See what keeps users coming back by viewing the interval between repeated actions', - icon: InsightsStickinessIcon, + icon: IconStickiness, inMenu: true, }, [NodeKind.LifecycleQuery]: { name: 'Lifecycle', description: 'Understand growth by breaking down new, resurrected, returning and dormant users', - icon: InsightsLifecycleIcon, + icon: IconLifecycle, inMenu: true, }, [NodeKind.EventsNode]: { @@ -239,43 +242,43 @@ export const QUERY_TYPES_METADATA: Record = { [NodeKind.SessionsTimelineQuery]: { name: 'Sessions', description: 'Sessions timeline query', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.HogQLQuery]: { name: 'HogQL', description: 'Direct HogQL query', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.HogQLMetadata]: { name: 'HogQL Metadata', description: 'Metadata for a HogQL query', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.DatabaseSchemaQuery]: { name: 'Database Schema', description: 'Introspect the PostHog database schema', - icon: InsightSQLIcon, + icon: IconHogQL, inMenu: true, }, [NodeKind.WebOverviewQuery]: { name: 'Overview Stats', description: 'View overview stats for a website', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.WebStatsTableQuery]: { name: 'Web Table', description: 'A table of results from web analytics, with a breakdown', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, [NodeKind.WebTopClicksQuery]: { name: 'Top Clicks', description: 'View top clicks for a website', - icon: InsightsTrendsIcon, + icon: IconTrends, inMenu: true, }, } @@ -301,7 +304,7 @@ export function InsightIcon({ insight }: { insight: InsightModel }): JSX.Element } const insightMetadata = INSIGHT_TYPES_METADATA[insightType] if (insightMetadata && insightMetadata.icon) { - return + return } return null } @@ -353,8 +356,8 @@ function SavedInsightsGrid(): JSX.Element { insight={{ ...insight }} rename={() => renameInsight(insight)} duplicate={() => duplicateInsight(insight)} - deleteWithUndo={() => - deleteWithUndo({ + deleteWithUndo={async () => + await deleteWithUndo({ object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: loadInsights, @@ -501,7 +504,7 @@ export function SavedInsights(): JSX.Element { - deleteWithUndo({ + void deleteWithUndo({ object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: loadInsights, @@ -522,7 +525,10 @@ export function SavedInsights(): JSX.Element { return (
- } /> + } + /> setSavedInsightsFilters({ tab })} diff --git a/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts b/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts index 34475f73408c7..b85a15facf92f 100644 --- a/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts +++ b/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts @@ -193,7 +193,7 @@ describe('savedInsightsLogic', () => { const sourceInsight = createInsight(123, 'hello') sourceInsight.name = '' sourceInsight.derived_name = 'should be copied' - await logic.actions.duplicateInsight(sourceInsight) + await logic.asyncActions.duplicateInsight(sourceInsight) expect(api.create).toHaveBeenCalledWith( `api/projects/${MOCK_TEAM_ID}/insights`, expect.objectContaining({ name: '' }) @@ -204,7 +204,7 @@ describe('savedInsightsLogic', () => { const sourceInsight = createInsight(123, 'hello') sourceInsight.name = 'should be copied' sourceInsight.derived_name = '' - await logic.actions.duplicateInsight(sourceInsight) + await logic.asyncActions.duplicateInsight(sourceInsight) expect(api.create).toHaveBeenCalledWith( `api/projects/${MOCK_TEAM_ID}/insights`, expect.objectContaining({ name: 'should be copied (copy)' }) diff --git a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts index c1bb0bfdb91a5..349ea9e43e0f7 100644 --- a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts +++ b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts @@ -38,7 +38,7 @@ export interface SavedInsightFilters { search: string insightType: string createdBy: number | 'All users' - dateFrom: string | dayjs.Dayjs | undefined | 'all' | null + dateFrom: string | dayjs.Dayjs | undefined | null dateTo: string | dayjs.Dayjs | undefined | null page: number dashboardId: number | undefined | null diff --git a/frontend/src/scenes/sceneLogic.ts b/frontend/src/scenes/sceneLogic.ts index bd5c46206d001..8e87f1029fd0b 100644 --- a/frontend/src/scenes/sceneLogic.ts +++ b/frontend/src/scenes/sceneLogic.ts @@ -13,8 +13,6 @@ import { LoadedScene, Params, Scene, SceneConfig, SceneExport, SceneParams } fro import { emptySceneParams, preloadedScenes, redirects, routes, sceneConfigurations } from 'scenes/scenes' import { organizationLogic } from './organizationLogic' import { appContextLogic } from './appContextLogic' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { FEATURE_FLAGS } from 'lib/constants' /** Mapping of some scenes that aren't directly accessible from the sidebar to ones that are - for the sidebar. */ const sceneNavAlias: Partial> = { @@ -256,21 +254,12 @@ export const sceneLogic = kea([ !location.pathname.startsWith('/settings') ) { if ( - featureFlagLogic.values.featureFlags[FEATURE_FLAGS.PRODUCT_SPECIFIC_ONBOARDING] === - 'test' && + !teamLogic.values.currentTeam.completed_snippet_onboarding && !Object.keys(teamLogic.values.currentTeam.has_completed_onboarding_for || {}).length ) { - console.warn('No onboarding completed, redirecting to products') + console.warn('No onboarding completed, redirecting to /products') router.actions.replace(urls.products()) return - } else if ( - featureFlagLogic.values.featureFlags[FEATURE_FLAGS.PRODUCT_SPECIFIC_ONBOARDING] !== - 'test' && - !teamLogic.values.currentTeam.completed_snippet_onboarding - ) { - console.warn('Ingestion tutorial not completed, redirecting to it') - router.actions.replace(urls.ingestion()) - return } } } diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts index 5d5ed7a89c3ff..3f41023e13f63 100644 --- a/frontend/src/scenes/sceneTypes.ts +++ b/frontend/src/scenes/sceneTypes.ts @@ -22,6 +22,7 @@ export enum Scene { PersonsManagement = 'PersonsManagement', Person = 'Person', Pipeline = 'Pipeline', + PipelineApp = 'PipelineApp', Group = 'Group', Action = 'Action', Experiments = 'Experiments', @@ -64,7 +65,6 @@ export enum Scene { PasswordReset = 'PasswordReset', PasswordResetComplete = 'PasswordResetComplete', PreflightCheck = 'PreflightCheck', - Ingestion = 'IngestionWizard', OrganizationCreationConfirm = 'OrganizationCreationConfirm', Unsubscribe = 'Unsubscribe', DebugQuery = 'DebugQuery', diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index dc5aa42bb885e..b174df0ff581b 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -3,7 +3,7 @@ import { Error404 as Error404Component } from '~/layout/Error404' import { ErrorNetwork as ErrorNetworkComponent } from '~/layout/ErrorNetwork' import { ErrorProjectUnavailable as ErrorProjectUnavailableComponent } from '~/layout/ErrorProjectUnavailable' import { urls } from 'scenes/urls' -import { InsightShortId, PipelineTabs, PropertyFilterType, ReplayTabs } from '~/types' +import { InsightShortId, PipelineAppTabs, PipelineTabs, PropertyFilterType, ReplayTabs } from '~/types' import { combineUrl } from 'kea-router' import { getDefaultEventsSceneQuery } from 'scenes/events/defaults' import { EventsQuery } from '~/queries/schema' @@ -45,7 +45,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.WebAnalytics]: { projectBased: true, - name: 'Web Analytics', + name: 'Web analytics', layout: 'app-container', }, [Scene.Cohort]: { @@ -54,43 +54,43 @@ export const sceneConfigurations: Partial> = { }, [Scene.Events]: { projectBased: true, - name: 'Event Explorer', + name: 'Event explorer', }, [Scene.BatchExports]: { projectBased: true, - name: 'Batch Exports', + name: 'Batch exports', }, [Scene.BatchExportEdit]: { projectBased: true, - name: 'Edit Batch Export', + name: 'Edit batch export', }, [Scene.BatchExport]: { projectBased: true, - name: 'Batch Export', + name: 'Batch export', }, [Scene.DataManagement]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.EventDefinition]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.PropertyDefinition]: { projectBased: true, - name: 'Data Management', + name: 'Data management', }, [Scene.Replay]: { projectBased: true, - name: 'Session Replay', + name: 'Session replay', }, [Scene.ReplaySingle]: { projectBased: true, - name: 'Replay Recording', + name: 'Replay recording', }, [Scene.ReplayPlaylist]: { projectBased: true, - name: 'Replay Playlist', + name: 'Replay playlist', }, [Scene.Person]: { projectBased: true, @@ -98,7 +98,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.PersonsManagement]: { projectBased: true, - name: 'Persons & Groups', + name: 'People & groups', }, [Scene.Action]: { projectBased: true, @@ -106,12 +106,16 @@ export const sceneConfigurations: Partial> = { }, [Scene.Group]: { projectBased: true, - name: 'Persons & Groups', + name: 'People & groups', }, [Scene.Pipeline]: { projectBased: true, name: 'Pipeline', }, + [Scene.PipelineApp]: { + projectBased: true, + name: 'Pipeline app', + }, [Scene.Experiments]: { projectBased: true, name: 'Experiments', @@ -122,7 +126,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.FeatureFlags]: { projectBased: true, - name: 'Feature Flags', + name: 'Feature flags', }, [Scene.FeatureFlag]: { projectBased: true, @@ -141,27 +145,27 @@ export const sceneConfigurations: Partial> = { }, [Scene.DataWarehouse]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehousePosthog]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseExternal]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseSavedQueries]: { projectBased: true, - name: 'Data Warehouse', + name: 'Data warehouse', }, [Scene.DataWarehouseSettings]: { projectBased: true, - name: 'Data Warehouse Settings', + name: 'Data warehouse settings', }, [Scene.DataWarehouseTable]: { projectBased: true, - name: 'Data Warehouse Table', + name: 'Data warehouse table', }, [Scene.EarlyAccessFeatures]: { projectBased: true, @@ -183,18 +187,14 @@ export const sceneConfigurations: Partial> = { }, [Scene.SavedInsights]: { projectBased: true, - name: 'Insights', + name: 'Product analytics', }, [Scene.ProjectHomepage]: { projectBased: true, name: 'Homepage', }, [Scene.IntegrationsRedirect]: { - name: 'Integrations Redirect', - }, - [Scene.Ingestion]: { - projectBased: true, - layout: 'plain', + name: 'Integrations redirect', }, [Scene.Products]: { projectBased: true, @@ -206,7 +206,7 @@ export const sceneConfigurations: Partial> = { }, [Scene.ToolbarLaunch]: { projectBased: true, - name: 'Launch Toolbar', + name: 'Launch toolbar', }, [Scene.Site]: { projectBased: true, @@ -405,10 +405,14 @@ export const routes: Record = { [urls.persons()]: Scene.PersonsManagement, [urls.pipeline()]: Scene.Pipeline, // One entry for every available tab - ...Object.values(PipelineTabs).reduce((acc, tab) => { - acc[urls.pipeline(tab)] = Scene.Pipeline - return acc - }, {} as Record), + ...(Object.fromEntries(Object.values(PipelineTabs).map((tab) => [urls.pipeline(tab), Scene.Pipeline])) as Record< + string, + Scene + >), + // One entry for each available tab (key by app config id) + ...(Object.fromEntries( + Object.values(PipelineAppTabs).map((tab) => [urls.pipelineApp(':id', tab), Scene.PipelineApp]) + ) as Record), [urls.groups(':groupTypeIndex')]: Scene.PersonsManagement, [urls.group(':groupTypeIndex', ':groupKey', false)]: Scene.Group, [urls.group(':groupTypeIndex', ':groupKey', false, ':groupTab')]: Scene.Group, @@ -464,8 +468,6 @@ export const routes: Record = { [urls.inviteSignup(':id')]: Scene.InviteSignup, [urls.passwordReset()]: Scene.PasswordReset, [urls.passwordResetComplete(':uuid', ':token')]: Scene.PasswordResetComplete, - [urls.ingestion()]: Scene.Ingestion, - [urls.ingestion() + '/*']: Scene.Ingestion, [urls.products()]: Scene.Products, [urls.onboarding(':productKey')]: Scene.Onboarding, [urls.verifyEmail()]: Scene.VerifyEmail, diff --git a/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts b/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts index a302c21fbdb76..58c1e8e23a806 100644 --- a/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts +++ b/frontend/src/scenes/session-recordings/detail/sessionRecordingDetailLogic.ts @@ -2,6 +2,7 @@ import { kea, props, path, selectors } from 'kea' import { Breadcrumb, SessionRecordingType } from '~/types' import type { sessionRecordingDetailLogicType } from './sessionRecordingDetailLogicType' import { urls } from 'scenes/urls' +import { Scene } from 'scenes/sceneTypes' export interface SessionRecordingDetailLogicProps { id?: SessionRecordingType['id'] @@ -12,13 +13,15 @@ export const sessionRecordingDetailLogic = kea( props({} as SessionRecordingDetailLogicProps), selectors({ breadcrumbs: [ - () => [(_, props) => props.id], + () => [(_, props) => props.id as SessionRecordingType['id']], (sessionRecordingId): Breadcrumb[] => [ { + key: Scene.Replay, name: `Replay`, path: urls.replay(), }, { + key: sessionRecordingId, name: sessionRecordingId ?? 'Not Found', path: sessionRecordingId ? urls.replaySingle(sessionRecordingId) : undefined, }, diff --git a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts index 54ef82ab8da18..596692d8ca162 100644 --- a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts +++ b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts @@ -1,5 +1,5 @@ import { BuiltLogic, connect, kea, listeners, path, reducers, selectors } from 'kea' -import { Breadcrumb, PersonType, RecordingSnapshot, SessionRecordingType } from '~/types' +import { Breadcrumb, PersonType, RecordingSnapshot, ReplayTabs, SessionRecordingType } from '~/types' import { urls } from 'scenes/urls' import { loaders } from 'kea-loaders' @@ -14,6 +14,7 @@ import { eventWithTime } from '@rrweb/types' import type { sessionRecordingDataLogicType } from '../player/sessionRecordingDataLogicType' import { prepareRecordingSnapshots, sessionRecordingDataLogic } from '../player/sessionRecordingDataLogic' import { dayjs } from 'lib/dayjs' +import { Scene } from 'scenes/sceneTypes' export type ExportedSessionRecordingFileV1 = { version: '2022-12-02' @@ -196,10 +197,12 @@ export const sessionRecordingFilePlaybackLogic = kea [], (): Breadcrumb[] => [ { - name: `Recordings`, + key: Scene.Replay, + name: `Session replay`, path: urls.replay(), }, { + key: ReplayTabs.FilePlayback, name: 'Import', }, ], diff --git a/frontend/src/scenes/session-recordings/filters/DurationFilter.test.ts b/frontend/src/scenes/session-recordings/filters/DurationFilter.test.ts index ec8b402d74710..7046d99788fe3 100644 --- a/frontend/src/scenes/session-recordings/filters/DurationFilter.test.ts +++ b/frontend/src/scenes/session-recordings/filters/DurationFilter.test.ts @@ -33,7 +33,7 @@ describe('DurationFilter', () => { [PropertyOperator.GreaterThan, 3601, 'inactive_seconds', '> 3601 inactive seconds'], [PropertyOperator.GreaterThan, 3660, 'inactive_seconds', '> 61 inactive minutes'], [PropertyOperator.LessThan, 0, 'active_seconds', '< 0 active seconds'], - ])('converts the value correctly for total duration', async (operator, value, durationType, expectation) => { + ])('converts the value correctly for total duration', (operator, value, durationType, expectation) => { const filter: RecordingDurationFilter = { type: PropertyFilterType.Recording, key: 'duration', diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.scss b/frontend/src/scenes/session-recordings/player/PlayerMeta.scss index 3312579e0b711..2089db1b92dd8 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.scss +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.scss @@ -79,4 +79,14 @@ } } } + + .Link { + .posthog-3000 & { + color: var(--default); + + &:hover { + color: var(--primary-3000); + } + } + } } diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx index 08d675cb62db1..ecc5f95f3ead0 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx @@ -166,12 +166,12 @@ export function PlayerMeta(): JSX.Element { )}
-
+
{!sessionPerson || !startTime ? ( ) : (
- + {'·'} diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss index f57dc4c405164..dadd79e777ec8 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.scss @@ -83,6 +83,14 @@ pointer-events: none; transition: opacity 0.2s ease-in-out; } + + .LemonButton--tertiary { + .posthog-3000 & { + &:hover { + color: var(--primary-3000); + } + } + } } &--inspector-focus { diff --git a/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss b/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss index 1ee0c5586e7d0..53e2b3f3ca9a0 100644 --- a/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss +++ b/frontend/src/scenes/session-recordings/player/controller/Seekbar.scss @@ -136,7 +136,7 @@ } &--primary { - --tick-color: var(--primary); + --tick-color: var(--primary-3000); } .PlayerSeekbarTick__line { diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss index b821b4b8d4c21..b2a7191f56ce2 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorList.scss @@ -11,4 +11,8 @@ border-radius: var(--radius) 0 0 var(--radius); transition: transform 200ms linear; will-change: transform; + + .posthog-3000 & { + background-color: var(--primary-3000); + } } diff --git a/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts b/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts index 0f3d4220cf518..28dfc7a9280fa 100644 --- a/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts @@ -27,7 +27,7 @@ describe('sessionPlayerModalLogic', () => { it('starts as null', () => { expectLogic(logic).toMatchValues({ activeSessionRecording: null }) }) - it('is set by openSessionPlayer and cleared by closeSessionPlayer', async () => { + it('is set by openSessionPlayer and cleared by closeSessionPlayer', () => { expectLogic(logic, () => logic.actions.openSessionPlayer({ id: 'abc' })) .toDispatchActions(['loadSessionRecordingsSuccess']) .toMatchValues({ diff --git a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.test.ts b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.test.ts index d11218217e817..6a372a20e6841 100644 --- a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.test.ts @@ -57,7 +57,7 @@ describe('playerSettingsLogic', () => { afterEach(() => { localStorage.clear() }) - it('should start with the first entry selected', async () => { + it('should start with the first entry selected', () => { expect(logic.values.selectedMiniFilters).toEqual([ 'all-automatic', 'console-all', @@ -66,7 +66,7 @@ describe('playerSettingsLogic', () => { ]) }) - it('should remove other selected filters if alone', async () => { + it('should remove other selected filters if alone', () => { logic.actions.setMiniFilter('all-errors', true) expect(logic.values.selectedMiniFilters.sort()).toEqual([ @@ -77,7 +77,7 @@ describe('playerSettingsLogic', () => { ]) }) - it('should allow multiple filters if not alone', async () => { + it('should allow multiple filters if not alone', () => { logic.actions.setMiniFilter('console-warn', true) logic.actions.setMiniFilter('console-info', true) @@ -90,7 +90,7 @@ describe('playerSettingsLogic', () => { ]) }) - it('should reset to first in tab if empty', async () => { + it('should reset to first in tab if empty', () => { expect(logic.values.selectedMiniFilters.sort()).toEqual([ 'all-automatic', 'console-all', diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts index a98649dc2e8b4..3a9c4920e7d67 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts @@ -23,7 +23,7 @@ const sortedRecordingSnapshotsJson = sortedRecordingSnapshots() describe('sessionRecordingDataLogic', () => { let logic: ReturnType - beforeEach(async () => { + beforeEach(() => { useAvailableFeatures([AvailableFeature.RECORDINGS_PERFORMANCE]) useMocks({ get: { @@ -66,7 +66,7 @@ describe('sessionRecordingDataLogic', () => { it('mounts other logics', async () => { await expectLogic(logic).toMount([eventUsageLogic, teamLogic, userLogic]) }) - it('has default values', async () => { + it('has default values', () => { expect(logic.values).toMatchObject({ bufferedToTime: null, durationMs: 0, diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index d9028d9034c1c..b9d1d578b425a 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -265,7 +265,7 @@ export const sessionRecordingDataLogic = kea([ reportViewed: async (_, breakpoint) => { const durations = generateRecordingReportDurations(cache, values) - await breakpoint() + breakpoint() // Triggered on first paint eventUsageLogic.actions.reportRecording( values.sessionPlayerData, diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index b510c46732e3f..eedbdd9f448f7 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -605,7 +605,7 @@ export const sessionRecordingPlayerLogic = kea( // If replayer isn't initialized, it will be initialized with the already loaded snapshots if (values.player?.replayer) { for (const event of eventsToAdd) { - await values.player?.replayer?.addEvent(event) + values.player?.replayer?.addEvent(event) } } @@ -615,7 +615,7 @@ export const sessionRecordingPlayerLogic = kea( actions.checkBufferingCompleted() breakpoint() }, - loadRecordingMetaSuccess: async () => { + loadRecordingMetaSuccess: () => { // As the connected data logic may be preloaded we call a shared function here and on mount actions.updateFromMetadata() if (props.autoPlay) { @@ -624,7 +624,7 @@ export const sessionRecordingPlayerLogic = kea( } }, - loadRecordingSnapshotsSuccess: async () => { + loadRecordingSnapshotsSuccess: () => { // As the connected data logic may be preloaded we call a shared function here and on mount actions.updateFromMetadata() }, @@ -690,7 +690,7 @@ export const sessionRecordingPlayerLogic = kea( actions.reportRecordingPlayerSpeedChanged(speed) actions.syncPlayerSpeed() }, - seekToTimestamp: async ({ timestamp, forcePlay }, breakpoint) => { + seekToTimestamp: ({ timestamp, forcePlay }, breakpoint) => { actions.stopAnimation() actions.setCurrentTimestamp(timestamp) @@ -959,7 +959,7 @@ export const sessionRecordingPlayerLogic = kea( console.warn('Failed to enable native full-screen mode:', e) } } else if (document.fullscreenElement === props.playerRef?.current) { - document.exitFullscreen() + await document.exitFullscreen() } }, })), diff --git a/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx b/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx index 37a53c2235b79..4e34628a2f797 100644 --- a/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx +++ b/frontend/src/scenes/session-recordings/player/share/PlayerShare.tsx @@ -8,6 +8,7 @@ import { Field } from 'lib/forms/Field' import { copyToClipboard } from 'lib/utils' import { playerShareLogic, PlayerShareLogicProps } from './playerShareLogic' import { SharingModalContent } from 'lib/components/Sharing/SharingModal' +import { captureException } from '@sentry/react' export function PlayerShareRecording(props: PlayerShareLogicProps): JSX.Element { const logic = playerShareLogic(props) @@ -27,7 +28,7 @@ export function PlayerShareRecording(props: PlayerShareLogicProps): JSX.Element fullWidth center sideIcon={} - onClick={async () => await copyToClipboard(url, 'recording link')} + onClick={() => void copyToClipboard(url, 'recording link').then(captureException)} title={url} > {url} diff --git a/frontend/src/scenes/session-recordings/player/utils/segmenter.test.ts b/frontend/src/scenes/session-recordings/player/utils/segmenter.test.ts index 4272a67b256fd..3f991f693a289 100644 --- a/frontend/src/scenes/session-recordings/player/utils/segmenter.test.ts +++ b/frontend/src/scenes/session-recordings/player/utils/segmenter.test.ts @@ -6,7 +6,7 @@ import { dayjs } from 'lib/dayjs' import { RecordingSnapshot } from '~/types' describe('segmenter', () => { - it('matches snapshots', async () => { + it('matches snapshots', () => { const snapshots = convertSnapshotsResponse(sortedRecordingSnapshots().snapshot_data_by_window_id) const segments = createSegments( snapshots, diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx index d3acb8f7aa308..a2c34cfb26a1b 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx @@ -162,7 +162,7 @@ function PinnedIndicator(): JSX.Element | null { function ViewedIndicator(props: { viewed: boolean }): JSX.Element | null { return !props.viewed ? ( -
+
) : null } diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss index 455928f1dad82..379fe80fd42cc 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss @@ -20,6 +20,12 @@ width: 25%; overflow: hidden; height: 100%; + + .text-link { + .posthog-3000 & { + color: var(--default); + } + } } .SessionRecordingsPlaylist__player { @@ -64,11 +70,11 @@ transition: background-color 200ms ease, border 200ms ease; &--active { - border-left-color: var(--primary); + border-left-color: var(--primary-3000); } &:hover { - background-color: var(--primary-highlight); + background-color: var(--primary-3000-highlight); } .SessionRecordingPreview__property-icon:hover { diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx index 7232b38d2a9e2..b830a7c843b98 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx @@ -1,6 +1,14 @@ import { Link } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { playerSettingsLogic } from '../player/playerSettingsLogic' +import { sessionRecordingsPlaylistLogic } from './sessionRecordingsPlaylistLogic' export const SessionRecordingsPlaylistTroubleshooting = (): JSX.Element => { + const { hideViewedRecordings } = useValues(playerSettingsLogic) + const { setHideViewedRecordings } = useActions(playerSettingsLogic) + const { otherRecordings } = useValues(sessionRecordingsPlaylistLogic) + const { setShowSettings } = useActions(sessionRecordingsPlaylistLogic) + return ( <>

No matching recordings

@@ -10,6 +18,19 @@ export const SessionRecordingsPlaylistTroubleshooting = (): JSX.Element => {

    + {otherRecordings.length > 0 && hideViewedRecordings && ( +
  • + Viewed recordings hidden.{' '} + { + setShowSettings(true) + setHideViewedRecordings(false) + }} + > + Toggle option + +
  • + )}
  • They are outside the retention period diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts index 959e885399cc6..0ca39dc4cc933 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts @@ -52,7 +52,7 @@ describe('sessionRecordingsListPropertiesLogic', () => { }) it('loads properties', async () => { - await expectLogic(logic, async () => { + await expectLogic(logic, () => { logic.actions.loadPropertiesForSessions(mockSessons) }).toDispatchActions(['loadPropertiesForSessionsSuccess']) @@ -69,7 +69,7 @@ describe('sessionRecordingsListPropertiesLogic', () => { }) it('does not loads cached properties', async () => { - await expectLogic(logic, async () => { + await expectLogic(logic, () => { logic.actions.loadPropertiesForSessions(mockSessons) }).toDispatchActions(['loadPropertiesForSessionsSuccess']) @@ -80,7 +80,7 @@ describe('sessionRecordingsListPropertiesLogic', () => { }, }) - await expectLogic(logic, async () => { + await expectLogic(logic, () => { logic.actions.maybeLoadPropertiesForSessions(mockSessons) }).toNotHaveDispatchedActions(['loadPropertiesForSessionsSuccess']) diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts index bbe331b1f9c08..550a35a3a43da 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts @@ -165,7 +165,7 @@ describe('sessionRecordingsPlaylistLogic', () => { it('starts as null', () => { expectLogic(logic).toMatchValues({ activeSessionRecording: undefined }) }) - it('is set by setSessionRecordingId', async () => { + it('is set by setSessionRecordingId', () => { expectLogic(logic, () => logic.actions.setSelectedRecordingId('abc')) .toDispatchActions(['loadSessionRecordingsSuccess']) .toMatchValues({ @@ -175,7 +175,7 @@ describe('sessionRecordingsPlaylistLogic', () => { expect(router.values.searchParams).toHaveProperty('sessionRecordingId', 'abc') }) - it('is partial if sessionRecordingId not in list', async () => { + it('is partial if sessionRecordingId not in list', () => { expectLogic(logic, () => logic.actions.setSelectedRecordingId('not-in-list')) .toDispatchActions(['loadSessionRecordingsSuccess']) .toMatchValues({ @@ -198,7 +198,7 @@ describe('sessionRecordingsPlaylistLogic', () => { }) it('mounts and loads the recording when a recording is opened', () => { - expectLogic(logic, async () => await logic.actions.setSelectedRecordingId('abcd')) + expectLogic(logic, async () => logic.asyncActions.setSelectedRecordingId('abcd')) .toMount(sessionRecordingDataLogic({ sessionRecordingId: 'abcd' })) .toDispatchActions(['loadEntireRecording']) }) diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index b07d55e3f9d10..c12c2c9352c67 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -350,7 +350,7 @@ export const sessionRecordingsPlaylistLogic = kea ({ ...state, diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.test.ts index 4530486fb5ed0..7e9f241596e76 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.test.ts @@ -64,9 +64,9 @@ describe('sessionRecordingsPlaylistSceneLogic', () => { }, ], } - expectLogic(logic, async () => { - await logic.actions.setFilters(newFilter) - await logic.actions.updatePlaylist({}) + expectLogic(logic, () => { + logic.actions.setFilters(newFilter) + logic.actions.updatePlaylist({}) }) .toDispatchActions(['setFilters']) .toMatchValues({ filters: expect.objectContaining(newFilter), hasChanges: true }) diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts index f5e310872f570..761c1f0f5e2ef 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts @@ -17,6 +17,7 @@ import type { sessionRecordingsPlaylistSceneLogicType } from './sessionRecording import { PINNED_RECORDINGS_LIMIT } from './sessionRecordingsPlaylistLogic' import api from 'lib/api' import { addRecordingToPlaylist, removeRecordingFromPlaylist } from '../player/utils/playerUtils' +import { Scene } from 'scenes/sceneTypes' export interface SessionRecordingsPlaylistLogicProps { shortId: string @@ -135,14 +136,17 @@ export const sessionRecordingsPlaylistSceneLogic = kea [s.playlist], (playlist): Breadcrumb[] => [ { + key: Scene.Replay, name: 'Replay', path: urls.replay(), }, { + key: ReplayTabs.Playlists, name: 'Playlists', path: urls.replay(ReplayTabs.Playlists), }, { + key: playlist?.short_id || 'new', name: playlist?.name || playlist?.derived_name || '(Untitled)', path: urls.replayPlaylist(playlist?.short_id || ''), }, diff --git a/frontend/src/scenes/session-recordings/saved-playlists/SavedSessionRecordingPlaylistsEmptyState.tsx b/frontend/src/scenes/session-recordings/saved-playlists/SavedSessionRecordingPlaylistsEmptyState.tsx index 5883934034c1e..c1d647f4919a1 100644 --- a/frontend/src/scenes/session-recordings/saved-playlists/SavedSessionRecordingPlaylistsEmptyState.tsx +++ b/frontend/src/scenes/session-recordings/saved-playlists/SavedSessionRecordingPlaylistsEmptyState.tsx @@ -27,7 +27,7 @@ export function SavedSessionRecordingPlaylistsEmptyState(): JSX.Element { AvailableFeature.RECORDINGS_PLAYLISTS, 'recording playlists', "Playlists allow you to save certain session recordings as a group to easily find and watch them again in the future. You've unfortunately run out of playlists on your current subscription plan.", - () => createPlaylist({}, true), + () => void createPlaylist({}, true), undefined, playlists.count ) diff --git a/frontend/src/scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic.ts b/frontend/src/scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic.ts index 970d191b5227e..9a6c03dec4c6c 100644 --- a/frontend/src/scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic.ts +++ b/frontend/src/scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic.ts @@ -25,7 +25,7 @@ export interface SavedSessionRecordingPlaylistsFilters { order: string search: string createdBy: number | 'All users' - dateFrom: string | dayjs.Dayjs | undefined | 'all' | null + dateFrom: string | dayjs.Dayjs | undefined | null dateTo: string | dayjs.Dayjs | undefined | null page: number pinned: boolean @@ -227,7 +227,7 @@ export const savedSessionRecordingPlaylistsLogic = kea ({ - [urls.replay(ReplayTabs.Playlists)]: async (_, searchParams) => { + [urls.replay(ReplayTabs.Playlists)]: (_, searchParams) => { const currentFilters = values.filters const nextFilters = objectClean(searchParams) if (!objectsEqual(currentFilters, nextFilters)) { diff --git a/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts b/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts index 10d58cdcb3d07..be26144e3d296 100644 --- a/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts +++ b/frontend/src/scenes/session-recordings/sessionRecordingsLogic.ts @@ -5,11 +5,12 @@ import { actionToUrl, router, urlToAction } from 'kea-router' import type { sessionRecordingsLogicType } from './sessionRecordingsLogicType' import { SESSION_RECORDINGS_PLAYLIST_FREE_COUNT } from 'lib/constants' import { capitalizeFirstLetter } from 'lib/utils' +import { Scene } from 'scenes/sceneTypes' export const humanFriendlyTabName = (tab: ReplayTabs): string => { switch (tab) { case ReplayTabs.Recent: - return 'Recent Recordings' + return 'Recent recordings' case ReplayTabs.Playlists: return 'Playlists' case ReplayTabs.FilePlayback: @@ -48,11 +49,13 @@ export const sessionRecordingsLogic = kea([ const breadcrumbs: Breadcrumb[] = [] if (tab !== ReplayTabs.Recent) { breadcrumbs.push({ + key: Scene.Replay, name: 'Replay', path: urls.replay(), }) } breadcrumbs.push({ + key: tab, name: humanFriendlyTabName(tab), }) diff --git a/frontend/src/scenes/settings/organization/Permissions/Roles/rolesLogic.tsx b/frontend/src/scenes/settings/organization/Permissions/Roles/rolesLogic.tsx index 32276a71df837..715858c30f2e4 100644 --- a/frontend/src/scenes/settings/organization/Permissions/Roles/rolesLogic.tsx +++ b/frontend/src/scenes/settings/organization/Permissions/Roles/rolesLogic.tsx @@ -53,7 +53,7 @@ export const rolesLogic = kea([ }, ], }), - loaders(({ values, actions }) => ({ + loaders(({ values, actions, asyncActions }) => ({ roles: { loadRoles: async () => { const response = await api.roles.list() @@ -62,7 +62,7 @@ export const rolesLogic = kea([ createRole: async (roleName: string) => { const { roles, roleMembersToAdd } = values const newRole = await api.roles.create(roleName) - await actions.addRoleMembers({ role: newRole, membersToAdd: roleMembersToAdd }) + await asyncActions.addRoleMembers({ role: newRole, membersToAdd: roleMembersToAdd }) eventUsageLogic.actions.reportRoleCreated(roleName) actions.setRoleMembersInFocus([]) actions.setRoleMembersToAdd([]) diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx index 2d8304b40ca7f..a950609c6fa02 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/ConfigureSAMLModal.tsx @@ -40,7 +40,7 @@ export function ConfigureSAMLModal(): JSX.Element { {`${siteUrl}/complete/saml/`} - {configureSAMLModalId ?? undefined} + {configureSAMLModalId || 'unknown'} {siteUrl} diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx index 6fcc0606652c1..f461bb2ef737b 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx @@ -51,9 +51,11 @@ export function VerifyDomainModal(): JSX.Element {
    {domainBeingVerified?.verification_challenge}
    - + {domainBeingVerified && ( + + )}
diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/verifiedDomainsLogic.ts b/frontend/src/scenes/settings/organization/VerifiedDomains/verifiedDomainsLogic.ts index 39746e2109b3b..a6ecafd6365bd 100644 --- a/frontend/src/scenes/settings/organization/VerifiedDomains/verifiedDomainsLogic.ts +++ b/frontend/src/scenes/settings/organization/VerifiedDomains/verifiedDomainsLogic.ts @@ -109,7 +109,7 @@ export const verifiedDomainsLogic = kea([ 'We could not verify your domain yet. DNS propagation may take up to 72 hours. Please try again later.' ) } - actions.replaceDomain(response as OrganizationDomainType) + actions.replaceDomain(response) actions.setVerifyModal(null) return false }, diff --git a/frontend/src/scenes/settings/organization/invitesLogic.tsx b/frontend/src/scenes/settings/organization/invitesLogic.tsx index 1b119f9a52da9..80db898825d58 100644 --- a/frontend/src/scenes/settings/organization/invitesLogic.tsx +++ b/frontend/src/scenes/settings/organization/invitesLogic.tsx @@ -44,7 +44,7 @@ export const invitesLogic = kea([ }, })), listeners({ - createInviteSuccess: async () => { + createInviteSuccess: () => { const nameProvided = false // TODO: Change when adding support for names on invites eventUsageLogic.actions.reportInviteAttempted( nameProvided, diff --git a/frontend/src/scenes/settings/project/AutocaptureSettings.tsx b/frontend/src/scenes/settings/project/AutocaptureSettings.tsx index e65c24e6f76b5..6b5492fc05069 100644 --- a/frontend/src/scenes/settings/project/AutocaptureSettings.tsx +++ b/frontend/src/scenes/settings/project/AutocaptureSettings.tsx @@ -11,7 +11,7 @@ export function AutocaptureSettings(): JSX.Element { const { userLoading } = useValues(userLogic) const { currentTeam } = useValues(teamLogic) const { updateCurrentTeam } = useActions(teamLogic) - const { reportIngestionAutocaptureToggled } = useActions(eventUsageLogic) + const { reportAutocaptureToggled } = useActions(eventUsageLogic) return ( <> @@ -33,7 +33,7 @@ export function AutocaptureSettings(): JSX.Element { updateCurrentTeam({ autocapture_opt_out: !checked, }) - reportIngestionAutocaptureToggled(!checked) + reportAutocaptureToggled(!checked) }} checked={!currentTeam?.autocapture_opt_out} disabled={userLoading} @@ -49,7 +49,7 @@ export function ExceptionAutocaptureSettings(): JSX.Element { const { userLoading } = useValues(userLogic) const { currentTeam } = useValues(teamLogic) const { updateCurrentTeam } = useActions(teamLogic) - const { reportIngestionAutocaptureExceptionsToggled } = useActions(eventUsageLogic) + const { reportAutocaptureExceptionsToggled } = useActions(eventUsageLogic) const { errorsToIgnoreRules, rulesCharacters } = useValues(autocaptureExceptionsLogic) const { setErrorsToIgnoreRules } = useActions(autocaptureExceptionsLogic) @@ -62,7 +62,7 @@ export function ExceptionAutocaptureSettings(): JSX.Element { updateCurrentTeam({ autocapture_exceptions_opt_in: checked, }) - reportIngestionAutocaptureExceptionsToggled(checked) + reportAutocaptureExceptionsToggled(checked) }} checked={!!currentTeam?.autocapture_exceptions_opt_in} disabled={userLoading} @@ -81,7 +81,7 @@ export function ExceptionAutocaptureSettings(): JSX.Element {

You can enter a regular expression that matches values of{' '} here to ignore them. One per line. For example, if you - want to drop all errors that contain the word "bot", or you can enter "bot" here. Or if you want to drop + want to drop all errors that contain the word "bot", you can enter "bot" here. Or if you want to drop all errors that are exactly "bot", you can enter "^bot$".

Only up to 300 characters of config are allowed here.

diff --git a/frontend/src/scenes/settings/project/groupAnalyticsConfigLogic.ts b/frontend/src/scenes/settings/project/groupAnalyticsConfigLogic.ts index 89a3029b4d47e..dd1ec134bbd88 100644 --- a/frontend/src/scenes/settings/project/groupAnalyticsConfigLogic.ts +++ b/frontend/src/scenes/settings/project/groupAnalyticsConfigLogic.ts @@ -40,7 +40,7 @@ export const groupAnalyticsConfigLogic = kea([ ], }), listeners(({ values, actions }) => ({ - save: async () => { + save: () => { const { groupTypes, singularChanges, pluralChanges } = values const payload = Array.from(groupTypes.values()).map((groupType) => { const result = { ...groupType } diff --git a/frontend/src/scenes/settings/project/webhookIntegrationLogic.ts b/frontend/src/scenes/settings/project/webhookIntegrationLogic.ts index 77cf7e1a69180..f649190f541c6 100644 --- a/frontend/src/scenes/settings/project/webhookIntegrationLogic.ts +++ b/frontend/src/scenes/settings/project/webhookIntegrationLogic.ts @@ -55,7 +55,7 @@ export const webhookIntegrationLogic = kea([ ], }), listeners(() => ({ - testWebhookSuccess: async ({ testedWebhook }) => { + testWebhookSuccess: ({ testedWebhook }) => { if (testedWebhook) { teamLogic.actions.updateCurrentTeam({ slack_incoming_webhook: testedWebhook }) } diff --git a/frontend/src/scenes/settings/settingsLogic.ts b/frontend/src/scenes/settings/settingsLogic.ts index 665b7d27a4c7f..b754950febf11 100644 --- a/frontend/src/scenes/settings/settingsLogic.ts +++ b/frontend/src/scenes/settings/settingsLogic.ts @@ -94,10 +94,9 @@ export const settingsLogic = kea([ }), listeners(({ values }) => ({ - selectSetting({ setting }) { + async selectSetting({ setting }) { const url = urls.settings(values.selectedSectionId ?? values.selectedLevel, setting as SettingId) - - copyToClipboard(window.location.origin + url) + await copyToClipboard(window.location.origin + url) }, })), ]) diff --git a/frontend/src/scenes/settings/settingsSceneLogic.ts b/frontend/src/scenes/settings/settingsSceneLogic.ts index ecd2b85d06e3f..5fcc8bb4b9b18 100644 --- a/frontend/src/scenes/settings/settingsSceneLogic.ts +++ b/frontend/src/scenes/settings/settingsSceneLogic.ts @@ -10,6 +10,7 @@ import { SettingSectionId, SettingLevelId, SettingLevelIds } from './types' import type { settingsSceneLogicType } from './settingsSceneLogicType' import { settingsLogic } from './settingsLogic' +import { Scene } from 'scenes/sceneTypes' export const settingsSceneLogic = kea([ path(['scenes', 'settings', 'settingsSceneLogic']), @@ -28,10 +29,12 @@ export const settingsSceneLogic = kea([ (s) => [s.selectedLevel, s.selectedSectionId, s.sections], (selectedLevel, selectedSectionId): Breadcrumb[] => [ { + key: Scene.Settings, name: `Settings`, path: urls.settings('project'), }, { + key: selectedSectionId || selectedLevel, name: selectedSectionId ? SettingsMap.find((x) => x.id === selectedSectionId)?.title : capitalizeFirstLetter(selectedLevel), diff --git a/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx b/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx index f9f79327e1ba4..aca059f1e1fd5 100644 --- a/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx +++ b/frontend/src/scenes/settings/user/PersonalAPIKeys.tsx @@ -90,7 +90,9 @@ function PersonalAPIKeysTable(): JSX.Element { dataIndex: 'value', render: function RenderValue(value) { return value ? ( - {`${value}`} + + {String(value)} + ) : ( secret ) diff --git a/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts b/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts index f500ebe5e0553..54314396d9ae8 100644 --- a/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts +++ b/frontend/src/scenes/settings/user/personalAPIKeysLogic.ts @@ -24,7 +24,7 @@ export const personalAPIKeysLogic = kea([ }, deleteKey: async (key: PersonalAPIKeyType) => { await api.delete(`api/personal_api_keys/${key.id}/`) - return (values.keys as PersonalAPIKeyType[]).filter((filteredKey) => filteredKey.id != key.id) + return values.keys.filter((filteredKey) => filteredKey.id != key.id) }, }, ], diff --git a/frontend/src/scenes/sites/siteLogic.ts b/frontend/src/scenes/sites/siteLogic.ts index 7476f4eb51ab9..06ad0e13fdf98 100644 --- a/frontend/src/scenes/sites/siteLogic.ts +++ b/frontend/src/scenes/sites/siteLogic.ts @@ -2,6 +2,7 @@ import { kea, props, selectors, path } from 'kea' import { Breadcrumb } from '~/types' import type { siteLogicType } from './siteLogicType' +import { Scene } from 'scenes/sceneTypes' export interface SiteLogicProps { url: string @@ -15,9 +16,11 @@ export const siteLogic = kea([ (_, p) => [p.url], (url): Breadcrumb[] => [ { + key: Scene.Site, name: `Site`, }, { + key: url, name: url, }, ], diff --git a/frontend/src/scenes/surveys/SurveyAppearance.tsx b/frontend/src/scenes/surveys/SurveyAppearance.tsx index b5563821908a3..f4227ef131cc0 100644 --- a/frontend/src/scenes/surveys/SurveyAppearance.tsx +++ b/frontend/src/scenes/surveys/SurveyAppearance.tsx @@ -104,7 +104,7 @@ export function SurveyAppearance({ surveyQuestionItem.type === SurveyQuestionType.MultipleChoice) && ( undefined} /> diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 0213c9b4fa296..2941e0a5576fd 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -450,7 +450,13 @@ export default function SurveyEdit(): JSX.Element { SurveyQuestionType.MultipleChoice) && (
- {({ value, onChange }) => ( + {({ + value, + onChange, + }: { + value: string[] + onChange: (newValue: string[]) => void + }) => (
{(value || []).map( ( @@ -527,7 +533,10 @@ export default function SurveyEdit(): JSX.Element { 1 && + index !== survey.questions.length - 1 + ? 'Next' + : survey.appearance.submitButtonText : question.buttonText } /> @@ -841,7 +850,7 @@ export default function SurveyEdit(): JSX.Element { />
- + diff --git a/frontend/src/scenes/surveys/SurveyTemplates.scss b/frontend/src/scenes/surveys/SurveyTemplates.scss index e0706c96e65df..c9622e7624689 100644 --- a/frontend/src/scenes/surveys/SurveyTemplates.scss +++ b/frontend/src/scenes/surveys/SurveyTemplates.scss @@ -1,17 +1,17 @@ @import '../../styles/mixins'; .SurveyTemplateContainer { - flex: 1; - display: flex; align-items: center; + background: var(--bg-light); + border-radius: var(--radius); border: 1px solid var(--border); - border-radius: 6px; + display: flex; + flex: 1; min-height: 300px; - margin-top: 2px; &:hover { cursor: pointer; - border-color: var(--primary-light); + border-color: var(--primary-3000-hover); } .SurveyTemplate { diff --git a/frontend/src/scenes/surveys/SurveyTemplates.tsx b/frontend/src/scenes/surveys/SurveyTemplates.tsx index 5eee9ba520d9a..759245b2f174e 100644 --- a/frontend/src/scenes/surveys/SurveyTemplates.tsx +++ b/frontend/src/scenes/surveys/SurveyTemplates.tsx @@ -28,7 +28,7 @@ export function SurveyTemplates(): JSX.Element { } /> -
+
{defaultSurveyTemplates.map((template, idx) => { return (
([ actions.loadSurveys() actions.reportSurveyResumed(survey) }, - archiveSurvey: async () => { + archiveSurvey: () => { actions.updateSurvey({ archived: true }) }, loadSurveySuccess: () => { @@ -571,10 +573,11 @@ export const surveyLogic = kea([ (s) => [s.survey], (survey: Survey): Breadcrumb[] => [ { + key: Scene.Surveys, name: 'Surveys', path: urls.surveys(), }, - ...(survey?.name ? [{ name: survey.name }] : []), + { key: survey?.id || 'new', name: survey.name }, ], ], dataTableQuery: [ @@ -682,7 +685,7 @@ export const surveyLogic = kea([ // controlled using a PureField in the form urlMatchType: values.urlMatchTypeValidationError, }), - submit: async (surveyPayload) => { + submit: (surveyPayload) => { let surveyPayloadWithTargetingFlagFilters = surveyPayload const flagLogic = featureFlagLogic({ id: values.survey.targeting_flag?.id || 'new' }) if (values.hasTargetingFlag) { @@ -721,12 +724,12 @@ export const surveyLogic = kea([ return [urls.survey(values.survey.id), router.values.searchParams, hashParams] }, })), - afterMount(async ({ props, actions }) => { + afterMount(({ props, actions }) => { if (props.id !== 'new') { - await actions.loadSurvey() + actions.loadSurvey() } if (props.id === 'new') { - await actions.resetSurvey() + actions.resetSurvey() } }), ]) diff --git a/frontend/src/scenes/surveys/surveyViewViz.tsx b/frontend/src/scenes/surveys/surveyViewViz.tsx index 0b5786ab2c109..355f5914f1da8 100644 --- a/frontend/src/scenes/surveys/surveyViewViz.tsx +++ b/frontend/src/scenes/surveys/surveyViewViz.tsx @@ -484,7 +484,7 @@ export function OpenTextViz({ return (
- {event.properties[surveyResponseField]} + {JSON.stringify(event.properties[surveyResponseField])}
([ () => [], (): Breadcrumb[] => [ { + key: Scene.Surveys, name: 'Surveys', path: urls.surveys(), }, diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx index a2a9aa764c090..2a083508fcd9b 100644 --- a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx +++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx @@ -183,7 +183,7 @@ export function PersonsModal({ { - triggerExport({ + void triggerExport({ export_format: ExporterFormat.CSV, export_context: { path: originalUrl, diff --git a/frontend/src/scenes/trends/trendsDataLogic.ts b/frontend/src/scenes/trends/trendsDataLogic.ts index 5ee67ec453f4e..3c4571cbbdbe8 100644 --- a/frontend/src/scenes/trends/trendsDataLogic.ts +++ b/frontend/src/scenes/trends/trendsDataLogic.ts @@ -87,7 +87,6 @@ export const trendsDataLogic = kea([ } else if (lifecycleFilter) { if (lifecycleFilter.toggledLifecycles) { indexedResults = indexedResults.filter((result) => - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion lifecycleFilter.toggledLifecycles!.includes(String(result.status) as LifecycleToggle) ) } diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 7f6fa0c5b03ea..d84ac0cfa7473 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -7,6 +7,7 @@ import { InsightShortId, ReplayTabs, PipelineTabs, + PipelineAppTabs, } from '~/types' import { combineUrl } from 'kea-router' import { ExportOptions } from '~/exporter/types' @@ -95,8 +96,10 @@ export const urls = { personByUUID: (uuid: string, encode: boolean = true): string => encode ? `/persons/${encodeURIComponent(uuid)}` : `/persons/${uuid}`, persons: (): string => '/persons', - pipeline: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : 'destinations'}`, - pipelineNew: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : 'destinations'}/new`, + pipeline: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : PipelineTabs.Destinations}`, + pipelineApp: (id: string | number, tab?: PipelineAppTabs): string => + `/pipeline/${id}/${tab ? tab : PipelineAppTabs.Configuration}`, + pipelineNew: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : PipelineTabs.Destinations}/new`, groups: (groupTypeIndex: string | number): string => `/groups/${groupTypeIndex}`, // :TRICKY: Note that groupKey is provided by user. We need to override urlPatternOptions for kea-router. group: (groupTypeIndex: string | number, groupKey: string, encode: boolean = true, tab?: string | null): string => @@ -108,9 +111,11 @@ export const urls = { featureFlags: (tab?: string): string => `/feature_flags${tab ? `?tab=${tab}` : ''}`, featureFlag: (id: string | number): string => `/feature_flags/${id}`, earlyAccessFeatures: (): string => '/early_access_features', - earlyAccessFeature: (id: ':id' | 'new' | string): string => `/early_access_features/${id}`, + /** @param id A UUID or 'new'. ':id' for routing. */ + earlyAccessFeature: (id: string): string => `/early_access_features/${id}`, surveys: (): string => '/surveys', - survey: (id: ':id' | 'new' | string): string => `/surveys/${id}`, + /** @param id A UUID or 'new'. ':id' for routing. */ + survey: (id: string): string => `/surveys/${id}`, surveyTemplates: (): string => '/survey_templates', dataWarehouse: (): string => '/data-warehouse', dataWarehouseTable: (): string => `/data-warehouse/new`, @@ -152,7 +157,6 @@ export const urls = { verifyEmail: (userUuid: string = '', token: string = ''): string => `/verify_email${userUuid ? `/${userUuid}` : ''}${token ? `/${token}` : ''}`, inviteSignup: (id: string): string => `/signup/${id}`, - ingestion: (): string => '/ingestion', products: (): string => '/products', onboarding: (productKey: string): string => `/onboarding/${productKey}`, // Cloud only diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx index 815e57d583d58..fc20cfbf9d691 100644 --- a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx @@ -1,8 +1,8 @@ import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleComponent } from '~/queries/types' import { DataTableNode, InsightVizNode, NodeKind, WebStatsBreakdown } from '~/queries/schema' import { UnexpectedNeverError } from 'lib/utils' -import { useActions } from 'kea' -import { webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' +import { useActions, useValues } from 'kea' +import { GeographyTab, webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' import { useCallback, useMemo } from 'react' import { Query } from '~/queries/Query/Query' import { countryCodeToFlag, countryCodeToName } from 'scenes/insights/views/WorldMap' @@ -173,11 +173,16 @@ export const webAnalyticsDataTableQueryContext: QueryContext = { } export const WebStatsTrendTile = ({ query }: { query: InsightVizNode }): JSX.Element => { - const { togglePropertyFilter } = useActions(webAnalyticsLogic) + const { togglePropertyFilter, setGeographyTab } = useActions(webAnalyticsLogic) + const { hasCountryFilter } = useValues(webAnalyticsLogic) const { key: worldMapPropertyName } = webStatsBreakdownToPropertyName(WebStatsBreakdown.Country) const onWorldMapClick = useCallback( (breakdownValue: string) => { togglePropertyFilter(PropertyFilterType.Event, worldMapPropertyName, breakdownValue) + if (!hasCountryFilter) { + // if we just added a country filter, switch to the region tab, as the world map will not be useful + setGeographyTab(GeographyTab.REGIONS) + } }, [togglePropertyFilter, worldMapPropertyName] ) @@ -187,9 +192,14 @@ export const WebStatsTrendTile = ({ query }: { query: InsightVizNode }): JSX.Ele ...webAnalyticsDataTableQueryContext, chartRenderingMetadata: { [ChartDisplayType.WorldMap]: { - countryProps: (countryCode, values) => ({ - onClick: values && values.count > 0 ? () => onWorldMapClick(countryCode) : undefined, - }), + countryProps: (countryCode, values) => { + return { + onClick: + values && (values.count > 0 || values.aggregated_value > 0) + ? () => onWorldMapClick(countryCode) + : undefined, + } + }, }, }, } diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx index 9f4b9f42336a0..1a01cd4051ab5 100644 --- a/frontend/src/scenes/web-analytics/WebDashboard.tsx +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -97,9 +97,11 @@ const Tiles = (): JSX.Element => { return (
{title &&

{title}

} @@ -120,7 +122,11 @@ const TabsTileItem = ({ tile }: { tile: TabsTile }): JSX.Element => { return ( ({ diff --git a/frontend/src/scenes/web-analytics/WebTabs.tsx b/frontend/src/scenes/web-analytics/WebTabs.tsx index 96d1d89d2001e..d96e72e9e5746 100644 --- a/frontend/src/scenes/web-analytics/WebTabs.tsx +++ b/frontend/src/scenes/web-analytics/WebTabs.tsx @@ -24,7 +24,7 @@ export const WebTabs = ({
{

{activeTab?.title}

} -
+
{tabs.length > 1 && ( // TODO switch to a select if more than 3
    diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index 96b8aa4b81b7e..43a3740b04bff 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -14,18 +14,23 @@ import { ChartDisplayType, EventDefinition, EventDefinitionType, + InsightType, + PropertyDefinition, PropertyFilterType, PropertyOperator, + RetentionPeriod, } from '~/types' import { isNotNil } from 'lib/utils' import { loaders } from 'kea-loaders' import api from 'lib/api' import { dayjs } from 'lib/dayjs' -import { STALE_EVENT_SECONDS } from 'lib/constants' +import { RETENTION_FIRST_TIME, STALE_EVENT_SECONDS } from 'lib/constants' +import { windowValues } from 'kea-window-values' export interface WebTileLayout { colSpan?: number rowSpan?: number + className?: string } interface BaseTile { @@ -209,7 +214,7 @@ export const webAnalyticsLogic = kea([ }, ], }), - selectors(({ actions }) => ({ + selectors(({ actions, values }) => ({ tiles: [ (s) => [ s.webAnalyticsFilters, @@ -220,6 +225,8 @@ export const webAnalyticsLogic = kea([ s.geographyTab, s.dateFrom, s.dateTo, + () => values.isGreaterThanMd, + () => values.shouldShowGeographyTile, ], ( webAnalyticsFilters, @@ -229,13 +236,15 @@ export const webAnalyticsLogic = kea([ sourceTab, geographyTab, dateFrom, - dateTo + dateTo, + isGreaterThanMd: boolean, + shouldShowGeographyTile ): WebDashboardTile[] => { const dateRange = { date_from: dateFrom, date_to: dateTo, } - return [ + const tiles: (WebDashboardTile | null)[] = [ { layout: { colSpan: 12, @@ -488,89 +497,126 @@ export const webAnalyticsLogic = kea([ ], }, { + title: 'Retention', layout: { - colSpan: 6, + colSpan: 12, }, - activeTabId: geographyTab, - setTabId: actions.setGeographyTab, - tabs: [ - { - id: GeographyTab.MAP, - title: 'World map', - linkText: 'Map', - query: { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - breakdown: { - breakdown: '$geoip_country_code', - breakdown_type: 'person', - }, - dateRange, - series: [ - { - event: '$pageview', - kind: NodeKind.EventsNode, - math: BaseMathType.UniqueUsers, - }, - ], - trendsFilter: { - display: ChartDisplayType.WorldMap, - }, - filterTestAccounts: true, - properties: webAnalyticsFilters, - }, - hidePersonsModal: true, - }, - }, - { - id: GeographyTab.COUNTRIES, - title: 'Top countries', - linkText: 'Countries', - query: { - full: true, - kind: NodeKind.DataTableNode, - source: { - kind: NodeKind.WebStatsTableQuery, - properties: webAnalyticsFilters, - breakdownBy: WebStatsBreakdown.Country, - dateRange, - }, + query: { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.RetentionQuery, + properties: webAnalyticsFilters, + dateRange, + filterTestAccounts: true, + retentionFilter: { + retention_type: RETENTION_FIRST_TIME, + retention_reference: 'total', + total_intervals: isGreaterThanMd ? 8 : 5, + period: RetentionPeriod.Week, }, }, - { - id: GeographyTab.REGIONS, - title: 'Top regions', - linkText: 'Regions', - query: { - full: true, - kind: NodeKind.DataTableNode, - source: { - kind: NodeKind.WebStatsTableQuery, - properties: webAnalyticsFilters, - breakdownBy: WebStatsBreakdown.Region, - dateRange, - }, + vizSpecificOptions: { + [InsightType.RETENTION]: { + hideLineGraph: true, + hideSizeColumn: !isGreaterThanMd, + useSmallLayout: !isGreaterThanMd, }, }, - { - id: GeographyTab.CITIES, - title: 'Top cities', - linkText: 'Cities', - query: { - full: true, - kind: NodeKind.DataTableNode, - source: { - kind: NodeKind.WebStatsTableQuery, - properties: webAnalyticsFilters, - breakdownBy: WebStatsBreakdown.City, - dateRange, - }, - }, - }, - ], + }, }, + shouldShowGeographyTile + ? { + layout: { + colSpan: 12, + }, + activeTabId: geographyTab, + setTabId: actions.setGeographyTab, + tabs: [ + { + id: GeographyTab.MAP, + title: 'World map', + linkText: 'Map', + query: { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + breakdown: { + breakdown: '$geoip_country_code', + breakdown_type: 'person', + }, + dateRange, + series: [ + { + event: '$pageview', + kind: NodeKind.EventsNode, + math: BaseMathType.UniqueUsers, + }, + ], + trendsFilter: { + display: ChartDisplayType.WorldMap, + }, + filterTestAccounts: true, + properties: webAnalyticsFilters, + }, + hidePersonsModal: true, + }, + }, + { + id: GeographyTab.COUNTRIES, + title: 'Top countries', + linkText: 'Countries', + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebStatsTableQuery, + properties: webAnalyticsFilters, + breakdownBy: WebStatsBreakdown.Country, + dateRange, + }, + }, + }, + { + id: GeographyTab.REGIONS, + title: 'Top regions', + linkText: 'Regions', + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebStatsTableQuery, + properties: webAnalyticsFilters, + breakdownBy: WebStatsBreakdown.Region, + dateRange, + }, + }, + }, + { + id: GeographyTab.CITIES, + title: 'Top cities', + linkText: 'Cities', + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebStatsTableQuery, + properties: webAnalyticsFilters, + breakdownBy: WebStatsBreakdown.City, + dateRange, + }, + }, + }, + ], + } + : null, ] + return tiles.filter(isNotNil) + }, + ], + hasCountryFilter: [ + (s) => [s.webAnalyticsFilters], + (webAnalyticsFilters: WebAnalyticsPropertyFilters) => { + return webAnalyticsFilters.some((filter) => filter.key === '$geoip_country_code') }, ], })), @@ -603,8 +649,8 @@ export const webAnalyticsLogic = kea([ ? pageleaveResult.value.results.find((r) => r.name === '$pageleave') : undefined - const shouldWarnAboutNoPageviews = !pageviewEntry || isEventDefinitionStale(pageviewEntry) - const shouldWarnAboutNoPageleaves = !pageleaveEntry || isEventDefinitionStale(pageleaveEntry) + const shouldWarnAboutNoPageviews = !pageviewEntry || isDefinitionStale(pageviewEntry) + const shouldWarnAboutNoPageleaves = !pageleaveEntry || isDefinitionStale(pageleaveEntry) return { shouldWarnAboutNoPageviews, @@ -612,15 +658,30 @@ export const webAnalyticsLogic = kea([ } }, }, + shouldShowGeographyTile: { + _default: null as boolean | null, + loadShouldShowGeographyTile: async (): Promise => { + const response = await api.propertyDefinitions.list({ + event_names: ['$pageview'], + properties: ['$geoip_country_code'], + }) + const countryCodeDefinition = response.results.find((r) => r.name === '$geoip_country_code') + return !!countryCodeDefinition && !isDefinitionStale(countryCodeDefinition) + }, + }, })), // start the loaders after mounting the logic afterMount(({ actions }) => { actions.loadStatusCheck() + actions.loadShouldShowGeographyTile() + }), + windowValues({ + isGreaterThanMd: (window: Window) => window.innerWidth > 768, }), ]) -const isEventDefinitionStale = (definition: EventDefinition): boolean => { +const isDefinitionStale = (definition: EventDefinition | PropertyDefinition): boolean => { const parsedLastSeen = definition.last_seen_at ? dayjs(definition.last_seen_at) : null return !!parsedLastSeen && dayjs().diff(parsedLastSeen, 'seconds') > STALE_EVENT_SECONDS } diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss index 2c35db1562c61..38d993b173604 100644 --- a/frontend/src/styles/global.scss +++ b/frontend/src/styles/global.scss @@ -395,7 +395,7 @@ input::-ms-clear { cursor: pointer; div:nth-child(1) { - background: var(--primary); + background: var(--primary-3000); } div:nth-child(2) { @@ -526,10 +526,13 @@ input::-ms-clear { body { // Until we have 3000 rolled out we fallback to standard colors --text-3000: var(--default); + --text-secondary-3000: var(--text-secondary); --muted-3000: var(--muted); --trace-3000: var(--trace-3000-light); --primary-3000: var(--primary); + --primary-3000-highlight: var(--primary-highlight); --primary-3000-hover: var(--primary-light); + --primary-3000-active: var(--primary-dark); --secondary-3000: var(--secondary); --secondary-3000-hover: var(--secondary-light); --accent-3000: var(--side); @@ -539,16 +542,20 @@ body { --glass-bg-3000: var(--bg-light); --glass-border-3000: var(--border); --bg-light: #fff; - --link: var(--primary); + --bg-table: var(--bg-light); + --link: var(--primary-3000); touch-action: manipulation; // Disable double-tap-to-zoom on mobile, making taps slightly snappier &.posthog-3000[theme='light'] { --text-3000: var(--text-3000-light); + --text-secondary-3000: var(--text-secondary-3000-light); --muted-3000: var(--muted-3000-light); --trace-3000: var(--trace-3000-light); --primary-3000: var(--primary-3000-light); + --primary-3000-highlight: var(--primary-3000-highlight-light); --primary-3000-hover: var(--primary-3000-hover-light); + --primary-3000-active: var(--primary-3000-active-light); --secondary-3000: var(--secondary-3000-light); --secondary-3000-hover: var(--secondary-3000-hover-light); --accent-3000: var(--accent-3000-light); @@ -558,15 +565,31 @@ body { --glass-bg-3000: var(--glass-bg-3000-light); --glass-border-3000: var(--glass-border-3000-light); --bg-light: #fff; + --bg-table: #f9faf7; --link: var(--link-3000-light); + --shadow-elevation-3000: var(--shadow-elevation-3000-light); + --primary-3000-frame-bg: var(--primary-3000-frame-bg-light); + --primary-3000-frame-border: var(--primary-3000-frame-border-light); + --primary-3000-button-bg: var(--primary-3000-button-bg-light); + --primary-3000-button-border: var(--primary-3000-button-border-light); + --primary-3000-button-border-hover: var(--primary-3000-button-border-hover-light); + --primary-alt-highlight-3000: var(--primary-alt-highlight-3000-light); + --secondary-3000-frame-bg: var(--secondary-3000-frame-bg-light); + --secondary-3000-frame-border: var(--secondary-3000-frame-border-light); + --secondary-3000-button-bg: var(--secondary-3000-button-bg-light); + --secondary-3000-button-border: var(--secondary-3000-button-border-light); + --secondary-3000-button-border-hover: var(--secondary-3000-button-border-hover-light); } &.posthog-3000[theme='dark'] { --text-3000: var(--text-3000-dark); + --text-secondary-3000: var(--text-secondary-3000-dark); --muted-3000: var(--muted-3000-dark); --trace-3000: var(--trace-3000-dark); --primary-3000: var(--primary-3000-dark); + --primary-3000-highlight: var(--primary-3000-highlight-dark); --primary-3000-hover: var(--primary-3000-hover-dark); + --primary-3000-active: var(--primary-3000-active-dark); --secondary-3000: var(--secondary-3000-dark); --secondary-3000-hover: var(--secondary-3000-hover-dark); --accent-3000: var(--accent-3000-dark); @@ -576,12 +599,26 @@ body { --glass-bg-3000: var(--glass-bg-3000-dark); --glass-border-3000: var(--glass-border-3000-dark); --bg-light: var(--accent-3000); + --bg-table: #232429; --brand-key: #fff; // In dark mode the black in PostHog's logo is replaced with white for proper contrast --link: var(--link-3000-dark); + --shadow-elevation-3000: var(--shadow-elevation-3000-dark); + --primary-3000-frame-bg: var(--primary-3000-frame-bg-dark); + --primary-3000-frame-border: var(--primary-3000-frame-border-dark); + --primary-3000-button-bg: var(--primary-3000-button-bg-dark); + --primary-3000-button-border: var(--primary-3000-button-border-dark); + --primary-3000-button-border-hover: var(--primary-3000-button-border-hover-dark); + --primary-alt-highlight-3000: var(--primary-alt-highlight-3000-dark); + --secondary-3000-frame-bg: var(--secondary-3000-frame-bg-dark); + --secondary-3000-frame-border: var(--secondary-3000-frame-border-dark); + --secondary-3000-button-bg: var(--secondary-3000-button-bg-dark); + --secondary-3000-button-border: var(--secondary-3000-button-border-dark); + --secondary-3000-button-border-hover: var(--secondary-3000-button-border-hover-dark); } &.posthog-3000 { --default: var(--text-3000); + --text-secondary: var(--text-secondary-3000); --muted: var(--muted-3000); --muted-alt: var(--muted-3000); --primary-alt: var(--text-3000); @@ -589,10 +626,26 @@ body { --border-bold: var(--border-bold-3000); --mid: var(--bg-3000); --side: var(--bg-3000); + --primary-alt-highlight: var(--primary-alt-highlight-3000); background: var(--bg-3000); overflow: hidden; // Each area handles scrolling individually (e.g. navbar, scene, side panel) + .LemonButton, + .Link { + .text-link { + color: var(--text-3000); + } + + &:hover { + .text-link { + color: var(--primary-3000); + } + } + } + + --shadow-elevation: var(--shadow-elevation-3000); + * > { ::-webkit-scrollbar { width: 0.5rem; diff --git a/frontend/src/styles/vars.scss b/frontend/src/styles/vars.scss index 419862be8bbe3..1a501d5800285 100644 --- a/frontend/src/styles/vars.scss +++ b/frontend/src/styles/vars.scss @@ -103,7 +103,7 @@ $colors: ( 'border-bold': rgb(0 0 0 / 24%), 'border-active': rgb(0 0 0 / 36%), 'transparent': transparent, - 'link': var(--link), + 'link': var(--primary-3000), // Colors of the PostHog logo 'brand-blue': #1d4aff, 'brand-red': #f54e00, @@ -113,10 +113,14 @@ $colors: ( // This becomes white in dark mode // PostHog 3000 'text-3000-light': #111, + 'text-secondary-3000-light': rgba(#111, 0.7), 'muted-3000-light': rgba(#111, 0.5), 'trace-3000-light': rgba(#111, 0.25), - 'primary-3000-light': rgba(#000, 0.75), - 'primary-3000-hover-light': #000, + 'primary-3000-light': #f54e01, + 'primary-3000-highlight-light': rgba(#f54e01, 0.1), + 'primary-3000-hover-light': #f54e01, + 'primary-3000-active-light': #f54e01, + 'secondary-3000-light': rgba(#cfd1c2, 0.6), 'secondary-3000-hover-light': #cfd1c2, 'accent-3000-light': #eeefe9, @@ -125,23 +129,58 @@ $colors: ( 'border-bold-3000-light': #c1c2b9, 'glass-bg-3000-light': #e4e5deb3, 'glass-border-3000-light': #e4e5de, - 'link-3000-light': var(--primary), + + 'link-3000-light': #f54e00, + 'primary-3000-frame-bg-light': #eb9d2a, + 'primary-3000-frame-border-light': #c28926, + 'primary-3000-button-bg-light': #fff, + 'primary-3000-button-border-light': #b17816, + 'primary-3000-button-border-hover-light': #8e5b03, + + 'secondary-3000-frame-bg-light': #e1dddd, + 'secondary-3000-frame-border-light': #d7d7d7, + 'secondary-3000-button-bg-light': #f3f4ef, + 'secondary-3000-button-border-light': #ccc, + 'secondary-3000-button-border-hover-light': #aaa, + + 'shadow-elevation-3000-light': 0 2px 0 var(--border-3000-light), + 'shadow-elevation-3000-dark': 0 2px 0 var(--border-3000-dark), 'text-3000-dark': #fff, + 'text-secondary-3000-dark': rgba(#fff, 0.7), 'muted-3000-dark': rgba(#fff, 0.5), 'trace-3000-dark': rgba(#fff, 0.25), - 'primary-3000-dark': var(--primary), - 'primary-3000-hover-dark': var(--primary-light), - 'secondary-3000-dark': #3b4159, + 'primary-3000-dark': #f7a503, + 'primary-3000-highlight-dark': rgba(#f7a503, 0.1), + 'primary-3000-hover-dark': #f7a503, + 'primary-3000-active-dark': #f7a503, + 'primary-alt-highlight-3000-light': #e5e7e0, + + 'secondary-3000-dark': #1d1f27, 'secondary-3000-hover-dark': #575d77, - 'accent-3000-dark': #1d1f27, - 'bg-3000-dark': #151619, + 'accent-3000-dark': #232429, + 'bg-3000-dark': #1d1f27, 'border-3000-dark': #2b2c32, 'border-bold-3000-dark': #3f4046, 'glass-bg-3000-dark': #1d1f27b3, 'glass-border-3000-dark': var(--border-3000-dark), - 'link-3000-dark': rgb(47 129 247), + 'link-3000-dark': #f1a82c, + + 'primary-3000-frame-bg-dark': #926826, + 'primary-3000-frame-border-dark': #a97a2f, + 'primary-3000-button-bg-dark': #e0a045, + 'primary-3000-button-border-dark': #b17816, + 'primary-3000-button-border-hover-dark': #8e5b03, + 'primary-alt-highlight-3000-dark': #232429, + + 'secondary-3000-frame-bg-dark': #323232, + 'secondary-3000-frame-border-dark': #383838, + 'secondary-3000-button-bg-dark': #1d1f27, + 'secondary-3000-button-border-dark': #4a4c52, + 'secondary-3000-button-border-hover-dark': #5e6064, + // The derived colors 'text-3000': var(--text-3000), + 'text-secondary-3000': var(--text-secondary-3000), 'muted-3000': var(--muted-3000), 'trace-3000': var(--trace-3000), 'primary-3000': var(--primary-3000), @@ -156,6 +195,16 @@ $colors: ( 'glass-border-3000': var(--glass-border-3000), 'link-3000': var(--link-3000), // 'bg-light': var(--accent-3000), + 'primary-3000-frame-bg': var(--primary-3000-frame-bg), + 'primary-3000-frame-border': var(--primary-3000-frame-border), + 'primary-3000-button-bg': var(--primary-3000-button-bg), + 'primary-3000-button-border': var(--primary-3000-button-border), + 'primary-3000-button-border-hover': var(--primary-3000-button-border-hover), + 'secondary-3000-frame-bg': var(--secondary-3000-frame-bg), + 'secondary-3000-frame-border': var(--secondary-3000-frame-border), + 'secondary-3000-button-bg': var(--secondary-3000-button-bg), + 'secondary-3000-button-border': var(--secondary-3000-button-border), + 'secondary-3000-button-border-hover': var(--secondary-3000-button-border-hover), ); // These vars are modified via SCSS for legacy reasons (e.g. darken/lighten), so keeping as SCSS vars for now. @@ -230,7 +279,6 @@ $_lifecycle_dormant: $_danger; // Funnels // TODO: unify with lib/colors.ts, getGraphColors() - --funnel-default: var(--primary); --funnel-background: var(--border-light); --funnel-axis: var(--border); --funnel-grid: #ddd; @@ -243,7 +291,7 @@ $_lifecycle_dormant: $_danger; --recording-seekbar-red: var(--brand-red); --recording-hover-event: var(--primary-bg-hover); --recording-hover-event-mid: var(--primary-bg-active); - --recording-hover-event-dark: var(--primary); + --recording-hover-event-dark: var(--primary-3000); --recording-current-event: #eef2ff; --recording-current-event-dark: var(--primary-alt); --recording-failure-event: #fee9e2; @@ -281,7 +329,7 @@ $_lifecycle_dormant: $_danger; // which means they aren't available in the toolbar --toastify-color-dark: var(--accent-3000-dark); --toastify-color-light: var(--bg-light); - --toastify-color-info: var(--primary); + --toastify-color-info: var(--primary-3000); --toastify-color-success: var(--success); --toastify-color-warning: var(--warning); --toastify-color-error: var(--danger); diff --git a/frontend/src/toolbar/actions/actionsLogic.ts b/frontend/src/toolbar/actions/actionsLogic.ts index 63057d6330b29..8d02e177c0f94 100644 --- a/frontend/src/toolbar/actions/actionsLogic.ts +++ b/frontend/src/toolbar/actions/actionsLogic.ts @@ -62,9 +62,7 @@ export const actionsLogic = kea([ .search(searchTerm) .map(({ item }) => item) : allActions - return [...filteredActions].sort((a, b) => - (a.name ?? 'Untitled').localeCompare(b.name ?? 'Untitled') - ) as ActionType[] + return [...filteredActions].sort((a, b) => (a.name ?? 'Untitled').localeCompare(b.name ?? 'Untitled')) }, ], actionCount: [(s) => [s.allActions], (allActions) => allActions.length], diff --git a/frontend/src/toolbar/elements/heatmapLogic.ts b/frontend/src/toolbar/elements/heatmapLogic.ts index 84d7348a42b01..773201702befc 100644 --- a/frontend/src/toolbar/elements/heatmapLogic.ts +++ b/frontend/src/toolbar/elements/heatmapLogic.ts @@ -184,7 +184,7 @@ export const heatmapLogic = kea([ if (domElements === undefined) { domElements = Array.from( querySelectorAllDeep(combinedSelector, document, cache.pageElements) - ) as HTMLElement[] + ) cache.selectorToElements[combinedSelector] = domElements } diff --git a/frontend/src/toolbar/flags/featureFlagsLogic.ts b/frontend/src/toolbar/flags/featureFlagsLogic.ts index c6a31e6b62bc9..67485f6d708ce 100644 --- a/frontend/src/toolbar/flags/featureFlagsLogic.ts +++ b/frontend/src/toolbar/flags/featureFlagsLogic.ts @@ -111,7 +111,7 @@ export const featureFlagsLogic = kea([ toolbarLogic.values.posthog?.featureFlags.reloadFeatureFlags() } }, - deleteOverriddenUserFlag: async ({ flagKey }) => { + deleteOverriddenUserFlag: ({ flagKey }) => { const { posthog: clientPostHog } = toolbarLogic.values if (clientPostHog) { const updatedFlags = { ...values.localOverrides } @@ -128,8 +128,8 @@ export const featureFlagsLogic = kea([ }, })), events(({ actions }) => ({ - afterMount: async () => { - await actions.getUserFlags() + afterMount: () => { + actions.getUserFlags() actions.checkLocalOverrides() }, })), diff --git a/frontend/src/toolbar/toolbarLogic.ts b/frontend/src/toolbar/toolbarLogic.ts index 1703bdd4ce3a2..c4759bb29d511 100644 --- a/frontend/src/toolbar/toolbarLogic.ts +++ b/frontend/src/toolbar/toolbarLogic.ts @@ -5,7 +5,6 @@ import { clearSessionToolbarToken } from '~/toolbar/utils' import { posthog } from '~/toolbar/posthog' import { actionsTabLogic } from '~/toolbar/actions/actionsTabLogic' import { toolbarButtonLogic } from '~/toolbar/button/toolbarButtonLogic' -import type { PostHog } from 'posthog-js' import { lemonToast } from 'lib/lemon-ui/lemonToast' export const toolbarLogic = kea([ @@ -30,8 +29,8 @@ export const toolbarLogic = kea([ userIntent: [props.userIntent || null, { logout: () => null, clearUserIntent: () => null }], source: [props.source || null, { logout: () => null }], buttonVisible: [true, { showButton: () => true, hideButton: () => false, logout: () => false }], - dataAttributes: [(props.dataAttributes || []) as string[]], - posthog: [(props.posthog ?? null) as PostHog | null], + dataAttributes: [props.dataAttributes || []], + posthog: [props.posthog ?? null], })), selectors({ @@ -63,7 +62,7 @@ export const toolbarLogic = kea([ } clearSessionToolbarToken() }, - processUserIntent: async () => { + processUserIntent: () => { if (props.userIntent === 'add-action' || props.userIntent === 'edit-action') { actionsTabLogic.actions.showButtonActions() toolbarButtonLogic.actions.showActionsInfo() diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 8c544b57481a4..eed32835f8ac2 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -526,6 +526,12 @@ export enum PipelineTabs { Destinations = 'destinations', } +export enum PipelineAppTabs { + Configuration = 'configuration', + Logs = 'logs', + Metrics = 'metrics', +} + export enum ProgressStatus { Draft = 'draft', Running = 'running', @@ -1272,6 +1278,7 @@ export interface BillingV2PlanType { current_plan?: any tiers?: BillingV2TierType[] included_if?: 'no_active_subscription' | 'has_subscription' | null + initial_billing_limit?: number } export interface PlanInterface { @@ -1341,7 +1348,7 @@ export interface InsightModel extends Cacheable { description?: string favorited?: boolean order: number | null - result: any | null + result: any deleted: boolean saved: boolean created_at: string @@ -2746,16 +2753,30 @@ export interface DateMappingOption { defaultInterval?: IntervalType } -export interface Breadcrumb { +interface BreadcrumbBase { + /** E.g. scene identifier or item ID. Particularly important if `onRename` is used. */ + key: string | number /** Name to display. */ name: string | null | undefined /** Symbol, e.g. a lettermark or a profile picture. */ symbol?: React.ReactNode - /** Path to link to. */ - path?: string /** Whether to show a custom popover */ popover?: Pick } +interface LinkBreadcrumb extends BreadcrumbBase { + /** Path to link to. */ + path?: string + onRename?: never +} +interface RenamableBreadcrumb extends BreadcrumbBase { + path?: never + /** When this is set, an "Edit" button shows up next to the title */ + onRename?: (newName: string) => Promise +} +export type Breadcrumb = LinkBreadcrumb | RenamableBreadcrumb +export type FinalizedBreadcrumb = + | (LinkBreadcrumb & { globalKey: string }) + | (RenamableBreadcrumb & { globalKey: string }) export enum GraphType { Bar = 'bar', diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 83decc1fa7bd1..7ad1758c3c617 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0363_add_replay_payload_capture_config +posthog: 0364_team_external_data_workspace_rows sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/package.json b/package.json index c99c80f987667..739b04566e4d3 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ "build:esbuild": "node frontend/build.mjs", "schema:build": "pnpm run schema:build:json && pnpm run schema:build:python", "schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'frontend/src/queries/schema.ts' --no-type-check > frontend/src/queries/schema.json && prettier --write frontend/src/queries/schema.json", - "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", + "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", "grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp", "grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4", "grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4", @@ -76,7 +76,7 @@ "@medv/finder": "^2.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.4.6", - "@posthog/icons": "0.2.0", + "@posthog/icons": "0.4.1", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", "@rrweb/types": "^2.0.0-alpha.11", diff --git a/plugin-server/src/utils/status.ts b/plugin-server/src/utils/status.ts index d620bd01b92c6..385b97739685e 100644 --- a/plugin-server/src/utils/status.ts +++ b/plugin-server/src/utils/status.ts @@ -1,5 +1,6 @@ import pino from 'pino' +import { defaultConfig } from '../config/config' import { LogLevel, PluginsServerConfig } from '../types' import { isProdEnv } from './env-utils' @@ -14,7 +15,6 @@ export interface StatusBlueprint { export class Status implements StatusBlueprint { mode?: string - explicitLogLevel?: LogLevel logger: pino.Logger prompt: string transport: any @@ -22,7 +22,7 @@ export class Status implements StatusBlueprint { constructor(mode?: string) { this.mode = mode - const logLevel: LogLevel = this.explicitLogLevel || LogLevel.Info + const logLevel: LogLevel = defaultConfig.LOG_LEVEL if (isProdEnv()) { this.logger = pino({ // By default pino will log the level number. So we can easily unify diff --git a/plugin-server/src/worker/plugins/run.ts b/plugin-server/src/worker/plugins/run.ts index 9775e9c1ef860..e957132313168 100644 --- a/plugin-server/src/worker/plugins/run.ts +++ b/plugin-server/src/worker/plugins/run.ts @@ -111,7 +111,7 @@ async function runSingleTeamPluginComposeWebhook( const request = await trackedFetch(webhook.url, { method: webhook.method || 'POST', body: JSON.stringify(webhook.body, undefined, 4), - headers: { 'Content-Type': 'application/json' }, + headers: webhook.headers || { 'Content-Type': 'application/json' }, timeout: hub.EXTERNAL_REQUEST_TIMEOUT_MS, }) if (request.ok) { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8021f4e2d634f..5df469d0bd22b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true @@ -36,8 +36,8 @@ dependencies: specifier: 4.4.6 version: 4.4.6(monaco-editor@0.39.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/icons': - specifier: 0.2.0 - version: 0.2.0(react-dom@18.2.0)(react@18.2.0) + specifier: 0.4.1 + version: 0.4.1(react-dom@18.2.0)(react@18.2.0) '@posthog/plugin-scaffold': specifier: ^1.4.4 version: 1.4.4 @@ -3417,8 +3417,8 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/icons@0.2.0(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-ftFoIropSJaFbxzzt1eGOgJCsbK0+L5KDdxKcpbhl4nMbmCEI/awzj98l+0pp/JAJzDrAsqEou7MvdJrntOGbw==} + /@posthog/icons@0.4.1(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-sR7lDltjoAeExsOMZOZvCz8Z1rHbBqhZo8RABCCvx00MoBCUuydE1y2xpSoP5BVfMogY4ycDktnihw4ICUsb3Q==} peerDependencies: react: '>=16.14.0' react-dom: '>=16.14.0' @@ -5952,7 +5952,7 @@ packages: resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==} dependencies: '@types/connect': 3.4.38 - '@types/node': 18.11.9 + '@types/node': 18.18.9 dev: true /@types/chart.js@2.9.37: @@ -5980,7 +5980,7 @@ packages: /@types/connect@3.4.38: resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==} dependencies: - '@types/node': 18.11.9 + '@types/node': 18.18.9 dev: true /@types/cookie@0.4.1: @@ -6258,7 +6258,7 @@ packages: /@types/express-serve-static-core@4.17.41: resolution: {integrity: sha512-OaJ7XLaelTgrvlZD8/aa0vvvxZdUmlCn6MtWeB7TkiKW70BQLc9XEPpDLPdbo52ZhXUCrznlWdCHWxJWtdyajA==} dependencies: - '@types/node': 18.11.9 + '@types/node': 18.18.9 '@types/qs': 6.9.10 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 @@ -6447,6 +6447,12 @@ packages: resolution: {integrity: sha512-t3rNFBgJRugIhackit2mVcLfF6IRc0JE4oeizPQL8Zrm8n2WY/0wOdpOPhdtG0V9Q2TlW/axbF1MJ6z+Yj/kKQ==} dev: true + /@types/node@18.18.9: + resolution: {integrity: sha512-0f5klcuImLnG4Qreu9hPj/rEfFq6YRc5n2mAjSsH+ec/mJL+3voBH0+8T7o8RpFjH7ovc+TRsL/c7OYIQsPTfQ==} + dependencies: + undici-types: 5.26.5 + dev: true + /@types/normalize-package-data@2.4.1: resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} dev: true @@ -6592,7 +6598,7 @@ packages: resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==} dependencies: '@types/mime': 1.3.5 - '@types/node': 18.11.9 + '@types/node': 18.18.9 dev: true /@types/serve-static@1.15.4: @@ -6608,7 +6614,7 @@ packages: dependencies: '@types/http-errors': 2.0.4 '@types/mime': 3.0.4 - '@types/node': 18.11.9 + '@types/node': 18.18.9 dev: true /@types/set-cookie-parser@2.4.2: @@ -19008,6 +19014,10 @@ packages: which-boxed-primitive: 1.0.2 dev: true + /undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + dev: true + /unicode-canonical-property-names-ecmascript@2.0.0: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} diff --git a/posthog/api/capture.py b/posthog/api/capture.py index a7d72f9ca1f3e..ac954a3b8d6d2 100644 --- a/posthog/api/capture.py +++ b/posthog/api/capture.py @@ -262,11 +262,11 @@ def drop_events_over_quota(token: str, events: List[Any]) -> List[Any]: if not settings.EE_AVAILABLE: return events - from ee.billing.quota_limiting import QuotaResource, list_limited_team_tokens + from ee.billing.quota_limiting import QuotaResource, list_limited_team_attributes results = [] - limited_tokens_events = list_limited_team_tokens(QuotaResource.EVENTS) - limited_tokens_recordings = list_limited_team_tokens(QuotaResource.RECORDINGS) + limited_tokens_events = list_limited_team_attributes(QuotaResource.EVENTS) + limited_tokens_recordings = list_limited_team_attributes(QuotaResource.RECORDINGS) for event in events: if event.get("event") in SESSION_RECORDING_EVENT_NAMES: diff --git a/posthog/api/insight.py b/posthog/api/insight.py index a31f2dd9dbe05..20ec5e93d0619 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -21,7 +21,6 @@ from rest_framework.settings import api_settings from rest_framework_csv import renderers as csvrenderers from sentry_sdk import capture_exception -from statshog.defaults.django import statsd from posthog import schema from posthog.api.documentation import extend_schema @@ -32,6 +31,7 @@ TrendResultsSerializer, TrendSerializer, ) +from posthog.clickhouse.cancel import cancel_query_on_cluster from posthog.api.routing import StructuredViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin @@ -43,7 +43,6 @@ synchronously_update_cache, ) from posthog.caching.insights_api import should_refresh_insight -from posthog.client import sync_execute from posthog.constants import ( BREAKDOWN_VALUES_LIMIT, INSIGHT, @@ -95,7 +94,6 @@ ClickHouseSustainedRateThrottle, ) from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL -from posthog.settings.data_stores import CLICKHOUSE_CLUSTER from prometheus_client import Counter from posthog.user_permissions import UserPermissionsSerializerMixin from posthog.utils import ( @@ -1034,11 +1032,7 @@ def activity(self, request: request.Request, **kwargs): def cancel(self, request: request.Request, **kwargs): if "client_query_id" not in request.data: raise serializers.ValidationError({"client_query_id": "Field is required."}) - sync_execute( - f"KILL QUERY ON CLUSTER '{CLICKHOUSE_CLUSTER}' WHERE query_id LIKE %(client_query_id)s", - {"client_query_id": f"{self.team.pk}_{request.data['client_query_id']}%"}, - ) - statsd.incr("clickhouse.query.cancellation_requested", tags={"team_id": self.team.pk}) + cancel_query_on_cluster(team_id=self.team.pk, client_query_id=request.data["client_query_id"]) return Response(status=status.HTTP_201_CREATED) @action(methods=["POST"], detail=False) diff --git a/posthog/api/organization_feature_flag.py b/posthog/api/organization_feature_flag.py index 6f339f2976a5a..d149de721dccb 100644 --- a/posthog/api/organization_feature_flag.py +++ b/posthog/api/organization_feature_flag.py @@ -1,8 +1,4 @@ -from posthog.api.routing import StructuredViewSetMixin -from posthog.api.feature_flag import FeatureFlagSerializer -from posthog.api.feature_flag import CanEditFeatureFlag -from posthog.models import FeatureFlag, Team -from posthog.permissions import OrganizationMemberPermissions +from typing import Dict from django.core.exceptions import ObjectDoesNotExist from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated @@ -12,6 +8,14 @@ viewsets, status, ) +from posthog.api.cohort import CohortSerializer +from posthog.api.routing import StructuredViewSetMixin +from posthog.api.feature_flag import FeatureFlagSerializer +from posthog.api.feature_flag import CanEditFeatureFlag +from posthog.models import FeatureFlag, Team +from posthog.models.cohort import Cohort +from posthog.models.filters.filter import Filter +from posthog.permissions import OrganizationMemberPermissions class OrganizationFeatureFlagView( @@ -86,7 +90,7 @@ def copy_flags(self, request, *args, **kwargs): for target_project_id in target_project_ids: # Target project does not exist try: - Team.objects.get(id=target_project_id) + target_project = Team.objects.get(id=target_project_id) except ObjectDoesNotExist: failed_projects.append( { @@ -96,10 +100,65 @@ def copy_flags(self, request, *args, **kwargs): ) continue - context = { - "request": request, - "team_id": target_project_id, - } + # get all linked cohorts, sorted by creation order + seen_cohorts_cache: Dict[str, Cohort] = {} + sorted_cohort_ids = flag_to_copy.get_cohort_ids( + seen_cohorts_cache=seen_cohorts_cache, sort_by_topological_order=True + ) + + # destination cohort id is different from original cohort id - create mapping + name_to_dest_cohort_id: Dict[str, int] = {} + # create cohorts in the destination project + if len(sorted_cohort_ids): + for cohort_id in sorted_cohort_ids: + original_cohort = seen_cohorts_cache[str(cohort_id)] + + # search in destination project by name + destination_cohort = Cohort.objects.filter( + name=original_cohort.name, team_id=target_project_id, deleted=False + ).first() + + # create new cohort in the destination project + if not destination_cohort: + prop_group = Filter( + data={"properties": original_cohort.properties.to_dict(), "is_simplified": True} + ).property_groups + + for prop in prop_group.flat: + if prop.type == "cohort": + original_child_cohort_id = prop.value + original_child_cohort = seen_cohorts_cache[str(original_child_cohort_id)] + prop.value = name_to_dest_cohort_id[original_child_cohort.name] + + destination_cohort_serializer = CohortSerializer( + data={ + "team": target_project, + "name": original_cohort.name, + "groups": [], + "filters": {"properties": prop_group.to_dict()}, + "description": original_cohort.description, + "is_static": original_cohort.is_static, + }, + context={ + "request": request, + "team_id": target_project.id, + }, + ) + destination_cohort_serializer.is_valid(raise_exception=True) + destination_cohort = destination_cohort_serializer.save() + + if destination_cohort is not None: + name_to_dest_cohort_id[original_cohort.name] = destination_cohort.id + + # reference correct destination cohort ids in the flag + for group in flag_to_copy.conditions: + props = group.get("properties", []) + for prop in props: + if isinstance(prop, dict) and prop.get("type") == "cohort": + original_cohort_id = prop["value"] + cohort_name = (seen_cohorts_cache[str(original_cohort_id)]).name + prop["value"] = name_to_dest_cohort_id[cohort_name] + flag_data = { "key": flag_to_copy.key, "name": flag_to_copy.name, @@ -109,6 +168,10 @@ def copy_flags(self, request, *args, **kwargs): "ensure_experience_continuity": flag_to_copy.ensure_experience_continuity, "deleted": False, } + context = { + "request": request, + "team_id": target_project_id, + } existing_flag = FeatureFlag.objects.filter( key=feature_flag_key, team_id=target_project_id, deleted=False diff --git a/posthog/api/query.py b/posthog/api/query.py index 224aedce40464..021139911cb96 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -1,11 +1,11 @@ import json import re -from typing import Dict, Optional, cast, Any, List +import uuid +from typing import Dict -from django.http import HttpResponse, JsonResponse +from django.http import JsonResponse from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, OpenApiResponse -from pydantic import BaseModel from rest_framework import viewsets from rest_framework.decorators import action from rest_framework.exceptions import ParseError, ValidationError, NotAuthenticated @@ -17,46 +17,31 @@ from posthog import schema from posthog.api.documentation import extend_schema +from posthog.api.services.query import process_query from posthog.api.routing import StructuredViewSetMixin +from posthog.clickhouse.client.execute_async import ( + cancel_query, + enqueue_process_query_task, + get_query_status, +) from posthog.clickhouse.query_tagging import tag_queries from posthog.errors import ExposedCHQueryError from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt -from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.hogql.errors import HogQLException -from posthog.hogql.metadata import get_hogql_metadata -from posthog.hogql.modifiers import create_default_modifiers_for_team -from posthog.hogql_queries.query_runner import get_query_runner -from posthog.models import Team from posthog.models.user import User from posthog.permissions import ( ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission, ) -from posthog.queries.time_to_see_data.serializers import ( - SessionEventsQuerySerializer, - SessionsQuerySerializer, -) -from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions from posthog.rate_limit import ( AIBurstRateThrottle, AISustainedRateThrottle, TeamRateThrottle, ) -from posthog.schema import HogQLMetadata +from posthog.schema import QueryStatus from posthog.utils import refresh_requested_by_client -QUERY_WITH_RUNNER = [ - "LifecycleQuery", - "TrendsQuery", - "WebOverviewQuery", - "WebTopSourcesQuery", - "WebTopClicksQuery", - "WebTopPagesQuery", - "WebStatsTableQuery", -] -QUERY_WITH_RUNNER_NO_CACHE = ["EventsQuery", "PersonsQuery", "HogQLQuery", "SessionsTimelineQuery"] - class QueryThrottle(TeamRateThrottle): scope = "query" @@ -116,40 +101,73 @@ def get_throttles(self): OpenApiParameter( "client_query_id", OpenApiTypes.STR, - description="Client provided query ID. Can be used to cancel queries.", + description="Client provided query ID. Can be used to retrieve the status or cancel the query.", + ), + OpenApiParameter( + "async", + OpenApiTypes.BOOL, + description=( + "(Experimental) " + "Whether to run the query asynchronously. Defaults to False." + " If True, the `id` of the query can be used to check the status and to cancel it." + ), ), ], responses={ 200: OpenApiResponse(description="Query results"), }, ) - def list(self, request: Request, **kw) -> HttpResponse: - self._tag_client_query_id(request.GET.get("client_query_id")) - query_json = QuerySchemaParser.validate_query(self._query_json_from_request(request)) - # allow lists as well as dicts in response with safe=False - try: - return JsonResponse(process_query(self.team, query_json, request=request), safe=False) - except HogQLException as e: - raise ValidationError(str(e)) - except ExposedCHQueryError as e: - raise ValidationError(str(e), e.code_name) - - def post(self, request, *args, **kwargs): + def create(self, request, *args, **kwargs) -> JsonResponse: request_json = request.data query_json = request_json.get("query") - self._tag_client_query_id(request_json.get("client_query_id")) - # allow lists as well as dicts in response with safe=False + query_async = request_json.get("async", False) + refresh_requested = refresh_requested_by_client(request) + + client_query_id = request_json.get("client_query_id") or uuid.uuid4().hex + self._tag_client_query_id(client_query_id) + + if query_async: + query_id = enqueue_process_query_task( + team_id=self.team.pk, + query_json=query_json, + query_id=client_query_id, + refresh_requested=refresh_requested, + ) + return JsonResponse(QueryStatus(id=query_id, team_id=self.team.pk).model_dump(), safe=False) + try: - return JsonResponse(process_query(self.team, query_json, request=request), safe=False) - except HogQLException as e: - raise ValidationError(str(e)) - except ExposedCHQueryError as e: - raise ValidationError(str(e), e.code_name) + result = process_query(self.team, query_json, refresh_requested=refresh_requested) + return JsonResponse(result, safe=False) + except (HogQLException, ExposedCHQueryError) as e: + raise ValidationError(str(e), getattr(e, "code_name", None)) except Exception as e: self.handle_column_ch_error(e) capture_exception(e) raise e + @extend_schema( + description="(Experimental)", + responses={ + 200: OpenApiResponse(description="Query status"), + }, + ) + @extend_schema( + description="(Experimental)", + responses={ + 200: OpenApiResponse(description="Query status"), + }, + ) + def retrieve(self, request: Request, pk=None, *args, **kwargs) -> JsonResponse: + status = get_query_status(team_id=self.team.pk, query_id=pk) + return JsonResponse(status.__dict__, safe=False) + + @extend_schema( + description="(Experimental)", + ) + def destroy(self, request, pk=None, *args, **kwargs): + cancel_query(self.team.pk, pk) + return Response(status=204) + @action(methods=["GET"], detail=False) def draft_sql(self, request: Request, *args, **kwargs) -> Response: if not isinstance(request.user, User): @@ -177,8 +195,10 @@ def handle_column_ch_error(self, error): return def _tag_client_query_id(self, query_id: str | None): - if query_id is not None: - tag_queries(client_query_id=query_id) + if query_id is None: + return + + tag_queries(client_query_id=query_id) def _query_json_from_request(self, request): if request.method == "POST": @@ -205,73 +225,3 @@ def parsing_error(ex): except (json.JSONDecodeError, UnicodeDecodeError) as error_main: raise ValidationError("Invalid JSON: %s" % (str(error_main))) return query - - -def _unwrap_pydantic(response: Any) -> Dict | List: - if isinstance(response, list): - return [_unwrap_pydantic(item) for item in response] - - elif isinstance(response, BaseModel): - resp1: Dict[str, Any] = {} - for key in response.__fields__.keys(): - resp1[key] = _unwrap_pydantic(getattr(response, key)) - return resp1 - - elif isinstance(response, dict): - resp2: Dict[str, Any] = {} - for key in response.keys(): - resp2[key] = _unwrap_pydantic(response.get(key)) - return resp2 - - return response - - -def _unwrap_pydantic_dict(response: Any) -> Dict: - return cast(dict, _unwrap_pydantic(response)) - - -def process_query( - team: Team, - query_json: Dict, - in_export_context: Optional[bool] = False, - request: Optional[Request] = None, -) -> Dict: - # query_json has been parsed by QuerySchemaParser - # it _should_ be impossible to end up in here with a "bad" query - query_kind = query_json.get("kind") - tag_queries(query=query_json) - - if query_kind in QUERY_WITH_RUNNER: - refresh_requested = refresh_requested_by_client(request) if request else False - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) - return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) - elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) - return _unwrap_pydantic_dict(query_runner.calculate()) - elif query_kind == "HogQLMetadata": - metadata_query = HogQLMetadata.model_validate(query_json) - metadata_response = get_hogql_metadata(query=metadata_query, team=team) - return _unwrap_pydantic_dict(metadata_response) - elif query_kind == "DatabaseSchemaQuery": - database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) - return serialize_database(database) - elif query_kind == "TimeToSeeDataSessionsQuery": - sessions_query_serializer = SessionsQuerySerializer(data=query_json) - sessions_query_serializer.is_valid(raise_exception=True) - return {"results": get_sessions(sessions_query_serializer).data} - elif query_kind == "TimeToSeeDataQuery": - serializer = SessionEventsQuerySerializer( - data={ - "team_id": team.pk, - "session_start": query_json["sessionStart"], - "session_end": query_json["sessionEnd"], - "session_id": query_json["sessionId"], - } - ) - serializer.is_valid(raise_exception=True) - return get_session_events(serializer) or {} - else: - if query_json.get("source"): - return process_query(team, query_json["source"]) - - raise ValidationError(f"Unsupported query kind: {query_kind}") diff --git a/posthog/api/services/__init__.py b/posthog/api/services/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py new file mode 100644 index 0000000000000..1ef831bde1b82 --- /dev/null +++ b/posthog/api/services/query.py @@ -0,0 +1,97 @@ +import structlog +from typing import Any, Dict, List, Optional, cast + +from pydantic import BaseModel +from rest_framework.exceptions import ValidationError + +from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.database.database import create_hogql_database, serialize_database +from posthog.hogql.metadata import get_hogql_metadata +from posthog.hogql.modifiers import create_default_modifiers_for_team +from posthog.hogql_queries.query_runner import get_query_runner +from posthog.models import Team +from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer +from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions +from posthog.schema import HogQLMetadata + +logger = structlog.get_logger(__name__) + +QUERY_WITH_RUNNER = [ + "LifecycleQuery", + "TrendsQuery", + "WebOverviewQuery", + "WebTopSourcesQuery", + "WebTopClicksQuery", + "WebTopPagesQuery", + "WebStatsTableQuery", +] +QUERY_WITH_RUNNER_NO_CACHE = ["EventsQuery", "PersonsQuery", "HogQLQuery", "SessionsTimelineQuery"] + + +def _unwrap_pydantic(response: Any) -> Dict | List: + if isinstance(response, list): + return [_unwrap_pydantic(item) for item in response] + + elif isinstance(response, BaseModel): + resp1: Dict[str, Any] = {} + for key in response.__fields__.keys(): + resp1[key] = _unwrap_pydantic(getattr(response, key)) + return resp1 + + elif isinstance(response, dict): + resp2: Dict[str, Any] = {} + for key in response.keys(): + resp2[key] = _unwrap_pydantic(response.get(key)) + return resp2 + + return response + + +def _unwrap_pydantic_dict(response: Any) -> Dict: + return cast(dict, _unwrap_pydantic(response)) + + +def process_query( + team: Team, + query_json: Dict, + in_export_context: Optional[bool] = False, + refresh_requested: Optional[bool] = False, +) -> Dict: + # query_json has been parsed by QuerySchemaParser + # it _should_ be impossible to end up in here with a "bad" query + query_kind = query_json.get("kind") + tag_queries(query=query_json) + + if query_kind in QUERY_WITH_RUNNER: + query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) + elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: + query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + return _unwrap_pydantic_dict(query_runner.calculate()) + elif query_kind == "HogQLMetadata": + metadata_query = HogQLMetadata.model_validate(query_json) + metadata_response = get_hogql_metadata(query=metadata_query, team=team) + return _unwrap_pydantic_dict(metadata_response) + elif query_kind == "DatabaseSchemaQuery": + database = create_hogql_database(team.pk, modifiers=create_default_modifiers_for_team(team)) + return serialize_database(database) + elif query_kind == "TimeToSeeDataSessionsQuery": + sessions_query_serializer = SessionsQuerySerializer(data=query_json) + sessions_query_serializer.is_valid(raise_exception=True) + return {"results": get_sessions(sessions_query_serializer).data} + elif query_kind == "TimeToSeeDataQuery": + serializer = SessionEventsQuerySerializer( + data={ + "team_id": team.pk, + "session_start": query_json["sessionStart"], + "session_end": query_json["sessionEnd"], + "session_id": query_json["sessionId"], + } + ) + serializer.is_valid(raise_exception=True) + return get_session_events(serializer) or {} + else: + if query_json.get("source"): + return process_query(team, query_json["source"]) + + raise ValidationError(f"Unsupported query kind: {query_kind}") diff --git a/posthog/api/survey.py b/posthog/api/survey.py index a2b3e8c3fcdd3..ef3e8c166dac8 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -221,19 +221,29 @@ def update(self, instance: Survey, validated_data): existing_flag_serializer.is_valid(raise_exception=True) existing_flag_serializer.save() else: - new_flag = self._create_new_targeting_flag(instance.name, new_filters) + new_flag = self._create_new_targeting_flag(instance.name, new_filters, bool(instance.start_date)) validated_data["targeting_flag_id"] = new_flag.id validated_data.pop("targeting_flag_filters") + end_date = validated_data.get("end_date") + if instance.targeting_flag: + # turn off feature flag if survey is ended + if end_date is None: + instance.targeting_flag.active = True + else: + instance.targeting_flag.active = False + instance.targeting_flag.save() + return super().update(instance, validated_data) - def _create_new_targeting_flag(self, name, filters): + def _create_new_targeting_flag(self, name, filters, active=False): feature_flag_key = slugify(f"{SURVEY_TARGETING_FLAG_PREFIX}{name}") feature_flag_serializer = FeatureFlagSerializer( data={ "key": feature_flag_key, "name": f"Targeting flag for survey {name}", "filters": filters, + "active": active, }, context=self.context, ) diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 66a6b7ac1190e..e09dabc5bf688 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -71,7 +71,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_actions-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/actions/%3F%24'*/ @@ -226,7 +227,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_actions-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/actions/%3F%24'*/ @@ -552,7 +554,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_actions-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/actions/%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index 50d15b6145259..1373bf5f4060b 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -71,7 +71,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_annotations-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/annotations/%3F%24'*/ @@ -150,7 +151,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_annotations-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/annotations/%3F%24'*/ @@ -474,7 +476,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_annotations-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/annotations/%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index c11bcf8af7d75..655c44eff5ab9 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -81,7 +81,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='team-detail',route='api/projects/%28%3FP%3Cid%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -306,7 +307,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -461,7 +463,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -604,7 +607,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."api_token" = 'token123' LIMIT 21 diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index 5328b0eaa8e62..d7908693f9cb2 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -49,7 +49,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -174,7 +175,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."api_token" = 'token123' LIMIT 21 /*controller='posthog.api.early_access_feature.early_access_features',route='%5Eapi/early_access_features/%3F%28%3F%3A%5B%3F%23%5D.%2A%29%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 2f1e429bac578..97b03322b3e2d 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -78,7 +78,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='element-stats',route='api/element/stats/%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 0f8b2b5457332..4d3b882aa5b51 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -669,7 +669,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -719,7 +720,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -852,7 +854,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1075,7 +1078,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1225,6 +1229,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -1352,6 +1357,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -1460,6 +1466,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -1595,7 +1602,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1687,7 +1695,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1771,7 +1780,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1828,7 +1838,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index b0b05656ef376..e2b852a604b20 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -125,7 +125,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -221,7 +222,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -313,7 +315,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -515,7 +518,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -648,7 +652,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -787,7 +792,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -879,7 +885,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -1085,7 +1092,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -1211,7 +1219,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -1268,7 +1277,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -1414,7 +1424,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='organization_feature_flags-copy-flags',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/copy_flags/%3F%24'*/ @@ -1675,7 +1686,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid /*controller='organization_feature_flags-detail',route='api/organizations/%28%3FP%3Cparent_lookup_organization_id%3E%5B%5E/.%5D%2B%29/feature_flags/%28%3FP%3Cfeature_flag_key%3E%5B%5E/.%5D%2B%29/%3F%24'*/ ' diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index 2d2cb9a03cbfe..dcd94e83ea36c 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -89,7 +89,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='posthog.views.preflight_check',route='%5E_preflight/%3F%28%3F%3A%5B%3F%23%5D.%2A%29%3F%24'*/ diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 4536dfb45977e..1d5a134d8111f 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -150,7 +150,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."api_token" = 'token123' LIMIT 21 /*controller='posthog.api.survey.surveys',route='%5Eapi/surveys/%3F%28%3F%3A%5B%3F%23%5D.%2A%29%3F%24'*/ diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index c5475d4515b21..08769c536599c 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -71,7 +71,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -196,7 +197,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%28%3FP%3Cpk%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -324,6 +326,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -535,6 +538,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -701,6 +705,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -879,6 +884,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -1043,6 +1049,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -1281,7 +1288,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1338,7 +1346,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1486,7 +1495,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1603,7 +1613,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1660,7 +1671,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1806,7 +1818,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -1939,7 +1952,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -2192,7 +2206,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -2432,7 +2447,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -2568,6 +2584,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -2705,7 +2722,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -2817,7 +2835,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -2923,7 +2942,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -3073,7 +3093,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -3170,6 +3191,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -3292,7 +3314,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -3408,7 +3431,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -3549,7 +3573,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -3862,7 +3887,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -4020,7 +4046,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4154,7 +4181,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4246,7 +4274,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4407,7 +4436,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4464,7 +4494,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4580,7 +4611,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -4737,7 +4769,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5155,7 +5188,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5296,7 +5330,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5388,7 +5423,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5511,7 +5547,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -5595,7 +5632,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5652,7 +5690,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5768,7 +5807,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -5915,7 +5955,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -6078,6 +6119,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -6477,7 +6519,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%28%3FP%3Cpk%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -6634,6 +6677,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -6815,6 +6859,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -6981,6 +7026,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -7112,7 +7158,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -7209,6 +7256,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -7373,6 +7421,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -8005,7 +8054,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8273,7 +8323,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8434,7 +8485,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8491,7 +8543,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8607,7 +8660,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8764,7 +8818,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -8887,7 +8942,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -9015,7 +9071,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -9162,7 +9219,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -9471,7 +9529,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -9618,7 +9677,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -9722,7 +9782,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ @@ -9859,6 +9920,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -9977,7 +10039,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%28%3FP%3Cpk%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -10116,6 +10179,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -10297,6 +10361,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -10446,7 +10511,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -10557,6 +10623,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -10709,7 +10776,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -10894,7 +10962,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -11005,6 +11074,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -11157,7 +11227,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ @@ -11304,6 +11375,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", @@ -11531,7 +11603,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index fc373eefb7a43..32ff35e826dd3 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -71,7 +71,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/%3F%24'*/ @@ -168,7 +169,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/%28%3FP%3Cshort_id%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -225,7 +227,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/%28%3FP%3Cshort_id%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -334,7 +337,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-all-activity',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/activity/%3F%24'*/ @@ -546,7 +550,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/%28%3FP%3Cshort_id%3E%5B%5E/.%5D%2B%29/%3F%24'*/ @@ -657,6 +662,7 @@ "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at", "posthog_user"."id", "posthog_user"."password", "posthog_user"."last_login", @@ -763,7 +769,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_notebooks-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/notebooks/%28%3FP%3Cshort_id%3E%5B%5E/.%5D%2B%29/%3F%24'*/ diff --git a/posthog/api/test/test_feature_flag_utils.py b/posthog/api/test/test_feature_flag_utils.py new file mode 100644 index 0000000000000..dd6108d7ff54c --- /dev/null +++ b/posthog/api/test/test_feature_flag_utils.py @@ -0,0 +1,73 @@ +from typing import Dict, Set +from posthog.test.base import ( + APIBaseTest, +) +from posthog.models.cohort import Cohort +from posthog.models.cohort.util import sort_cohorts_topologically + + +class TestFeatureFlagUtils(APIBaseTest): + def setUp(self): + super().setUp() + + def test_cohorts_sorted_topologically(self): + cohorts = {} + + def create_cohort(name): + cohorts[name] = Cohort.objects.create( + team=self.team, + name=name, + filters={ + "properties": { + "type": "AND", + "values": [ + {"key": "name", "value": "test", "type": "person"}, + ], + } + }, + ) + + create_cohort("a") + create_cohort("b") + create_cohort("c") + + # (c)-->(b) + cohorts["c"].filters["properties"]["values"][0] = { + "key": "id", + "value": cohorts["b"].pk, + "type": "cohort", + "negation": True, + } + cohorts["c"].save() + + # (a)-->(c) + cohorts["a"].filters["properties"]["values"][0] = { + "key": "id", + "value": cohorts["c"].pk, + "type": "cohort", + "negation": True, + } + cohorts["a"].save() + + cohort_ids = {cohorts["a"].pk, cohorts["b"].pk, cohorts["c"].pk} + seen_cohorts_cache = { + str(cohorts["a"].pk): cohorts["a"], + str(cohorts["b"].pk): cohorts["b"], + str(cohorts["c"].pk): cohorts["c"], + } + + # (a)-->(c)-->(b) + # create b first, since it doesn't depend on any other cohorts + # then c, because it depends on b + # then a, because it depends on c + + # thus destination creation order: b, c, a + destination_creation_order = [cohorts["b"].pk, cohorts["c"].pk, cohorts["a"].pk] + topologically_sorted_cohort_ids = sort_cohorts_topologically(cohort_ids, seen_cohorts_cache) + self.assertEqual(topologically_sorted_cohort_ids, destination_creation_order) + + def test_empty_cohorts_set(self): + cohort_ids: Set[int] = set() + seen_cohorts_cache: Dict[str, Cohort] = {} + topologically_sorted_cohort_ids = sort_cohorts_topologically(cohort_ids, seen_cohorts_cache) + self.assertEqual(topologically_sorted_cohort_ids, []) diff --git a/posthog/api/test/test_organization_feature_flag.py b/posthog/api/test/test_organization_feature_flag.py index cd78e5c238f20..103756d0c4911 100644 --- a/posthog/api/test/test_organization_feature_flag.py +++ b/posthog/api/test/test_organization_feature_flag.py @@ -1,6 +1,8 @@ from rest_framework import status +from posthog.models.cohort.util import sort_cohorts_topologically from posthog.models.user import User from posthog.models.team.team import Team +from posthog.models.cohort import Cohort from ee.models.organization_resource_access import OrganizationResourceAccess from posthog.constants import AvailableFeature from posthog.models import FeatureFlag @@ -428,3 +430,230 @@ def test_copy_feature_flag_cannot_edit(self): } response = self.client.post(url, data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) + + def test_copy_feature_flag_cohort_nonexistent_in_destination(self): + cohorts = {} + creation_order = [] + + def create_cohort(name, children): + creation_order.append(name) + properties = [{"key": "$some_prop", "value": "nomatchihope", "type": "person"}] + if children: + properties = [{"key": "id", "type": "cohort", "value": child.pk} for child in children] + + cohorts[name] = Cohort.objects.create( + team=self.team, + name=str(name), + filters={ + "properties": { + "type": "AND", + "values": properties, + } + }, + ) + + # link cohorts + create_cohort(1, None) + create_cohort(3, None) + create_cohort(2, [cohorts[1]]) + create_cohort(4, [cohorts[2], cohorts[3]]) + create_cohort(5, [cohorts[4]]) + create_cohort(6, None) + create_cohort(7, [cohorts[5], cohorts[6]]) # "head" cohort + + flag_to_copy = FeatureFlag.objects.create( + team=self.team_1, + created_by=self.user, + key="flag-with-cohort", + filters={ + "groups": [ + { + "rollout_percentage": 20, + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohorts[7].pk, # link "head" cohort + } + ], + } + ] + }, + ) + + url = f"/api/organizations/{self.organization.id}/feature_flags/copy_flags" + target_project = self.team_2 + + data = { + "feature_flag_key": flag_to_copy.key, + "from_project": flag_to_copy.team_id, + "target_project_ids": [target_project.id], + } + response = self.client.post(url, data) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # check all cohorts were created in the destination project + for name in creation_order: + found_cohort = Cohort.objects.filter(name=str(name), team_id=target_project.id).exists() + self.assertTrue(found_cohort) + + def test_copy_feature_flag_cohort_nonexistent_in_destination_2(self): + feature_flag_key = "flag-with-cohort" + cohorts = {} + + def create_cohort(name): + cohorts[name] = Cohort.objects.create( + team=self.team, + name=name, + filters={ + "properties": { + "type": "AND", + "values": [ + {"key": "name", "value": "test", "type": "person"}, + ], + } + }, + ) + + create_cohort("a") + create_cohort("b") + create_cohort("c") + create_cohort("d") + + def connect(parent, child): + cohorts[parent].filters["properties"]["values"][0] = { + "key": "id", + "value": cohorts[child].pk, + "type": "cohort", + } + cohorts[parent].save() + + connect("d", "b") + connect("a", "d") + connect("c", "a") + + head_cohort = cohorts["c"] + flag_to_copy = FeatureFlag.objects.create( + team=self.team_1, + created_by=self.user, + key=feature_flag_key, + filters={ + "groups": [ + { + "rollout_percentage": 20, + "properties": [ + { + "key": "id", + "type": "cohort", + "value": head_cohort.pk, # link "head" cohort + } + ], + } + ] + }, + ) + + url = f"/api/organizations/{self.organization.id}/feature_flags/copy_flags" + target_project = self.team_2 + + data = { + "feature_flag_key": flag_to_copy.key, + "from_project": flag_to_copy.team_id, + "target_project_ids": [target_project.id], + } + response = self.client.post(url, data) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + # check all cohorts were created in the destination project + for name in cohorts.keys(): + found_cohort = Cohort.objects.filter(name=name, team_id=target_project.id)[0] + self.assertTrue(found_cohort) + + # destination flag contains the head cohort + destination_flag = FeatureFlag.objects.get(key=feature_flag_key, team_id=target_project.id) + destination_flag_head_cohort_id = destination_flag.filters["groups"][0]["properties"][0]["value"] + destination_head_cohort = Cohort.objects.get(pk=destination_flag_head_cohort_id, team_id=target_project.id) + self.assertEqual(destination_head_cohort.name, head_cohort.name) + self.assertNotEqual(destination_head_cohort.id, head_cohort.id) + + # get topological order of the original cohorts + original_cohorts_cache = {} + for _, cohort in cohorts.items(): + original_cohorts_cache[str(cohort.id)] = cohort + original_cohort_ids = {int(str_id) for str_id in original_cohorts_cache.keys()} + topologically_sorted_original_cohort_ids = sort_cohorts_topologically( + original_cohort_ids, original_cohorts_cache + ) + + # drill down the destination cohorts in the reverse topological order + # the order of names should match the reverse topological order of the original cohort names + topologically_sorted_original_cohort_ids_reversed = topologically_sorted_original_cohort_ids[::-1] + + def traverse(cohort, index): + expected_cohort_id = topologically_sorted_original_cohort_ids_reversed[index] + expected_name = original_cohorts_cache[str(expected_cohort_id)].name + self.assertEqual(expected_name, cohort.name) + + prop = cohort.filters["properties"]["values"][0] + if prop["type"] == "cohort": + next_cohort_id = prop["value"] + next_cohort = Cohort.objects.get(pk=next_cohort_id, team_id=target_project.id) + traverse(next_cohort, index + 1) + + traverse(destination_head_cohort, 0) + + def test_copy_feature_flag_destination_cohort_not_overridden(self): + cohort_name = "cohort-1" + target_project = self.team_2 + original_cohort = Cohort.objects.create( + team=self.team, + name=cohort_name, + groups=[{"properties": [{"key": "$some_prop", "value": "original_value", "type": "person"}]}], + ) + + destination_cohort_prop_value = "destination_value" + Cohort.objects.create( + team=target_project, + name=cohort_name, + groups=[{"properties": [{"key": "$some_prop", "value": destination_cohort_prop_value, "type": "person"}]}], + ) + + flag_to_copy = FeatureFlag.objects.create( + team=self.team_1, + created_by=self.user, + key="flag-with-cohort", + filters={ + "groups": [ + { + "rollout_percentage": 20, + "properties": [ + { + "key": "id", + "type": "cohort", + "value": original_cohort.pk, + } + ], + } + ] + }, + ) + + url = f"/api/organizations/{self.organization.id}/feature_flags/copy_flags" + + data = { + "feature_flag_key": flag_to_copy.key, + "from_project": flag_to_copy.team_id, + "target_project_ids": [target_project.id], + } + response = self.client.post(url, data) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + destination_cohort = Cohort.objects.filter(name=cohort_name, team=target_project).first() + self.assertTrue(destination_cohort is not None) + # check destination value not overwritten + + if destination_cohort is not None: + self.assertTrue(destination_cohort.groups[0]["properties"][0]["value"] == destination_cohort_prop_value) diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index b49cd25b83287..ff03704605014 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -1,11 +1,11 @@ import json +from unittest import mock from unittest.mock import patch -from urllib.parse import quote from freezegun import freeze_time from rest_framework import status -from posthog.api.query import process_query +from posthog.api.services.query import process_query from posthog.models.property_definition import PropertyDefinition, PropertyType from posthog.models.utils import UUIDT from posthog.schema import ( @@ -336,51 +336,9 @@ def test_person_property_filter(self): response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual(len(response["results"]), 2) - def test_json_undefined_constant_error(self): - response = self.client.get( - f"/api/projects/{self.team.id}/query/?query=%7B%22kind%22%3A%22EventsQuery%22%2C%22select%22%3A%5B%22*%22%5D%2C%22limit%22%3AInfinity%7D" - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_input", - "detail": "Unsupported constant found in JSON: Infinity", - "attr": None, - }, - ) - - response = self.client.get( - f"/api/projects/{self.team.id}/query/?query=%7B%22kind%22%3A%22EventsQuery%22%2C%22select%22%3A%5B%22*%22%5D%2C%22limit%22%3ANaN%7D" - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_input", - "detail": "Unsupported constant found in JSON: NaN", - "attr": None, - }, - ) - def test_safe_clickhouse_error_passed_through(self): query = {"kind": "EventsQuery", "select": ["timestamp + 'string'"]} - # Safe errors are passed through in GET requests - response_get = self.client.get(f"/api/projects/{self.team.id}/query/?query={quote(json.dumps(query))}") - self.assertEqual(response_get.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response_get.json(), - self.validation_error_response( - "Illegal types DateTime64(6, 'UTC') and String of arguments of function plus: " - "While processing toTimeZone(timestamp, 'UTC') + 'string'.", - "illegal_type_of_argument", - ), - ) - - # Safe errors are passed through in POST requests too response_post = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query}) self.assertEqual(response_post.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -396,11 +354,6 @@ def test_safe_clickhouse_error_passed_through(self): def test_unsafe_clickhouse_error_is_swallowed(self, sqlparse_format_mock): query = {"kind": "EventsQuery", "select": ["timestamp"]} - # Unsafe errors are swallowed in GET requests (in this case we should not expose malformed SQL) - response_get = self.client.get(f"/api/projects/{self.team.id}/query/?query={quote(json.dumps(query))}") - self.assertEqual(response_get.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - - # Unsafe errors are swallowed in POST requests too response_post = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query}) self.assertEqual(response_post.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) @@ -832,3 +785,87 @@ def test_full_hogql_query_values(self): ) self.assertEqual(response.get("results", [])[0][0], 20) + + +class TestQueryRetrieve(APIBaseTest): + def setUp(self): + super().setUp() + self.team_id = self.team.pk + self.valid_query_id = "12345" + self.invalid_query_id = "invalid-query-id" + self.redis_client_mock = mock.Mock() + self.redis_get_patch = mock.patch("posthog.redis.get_client", return_value=self.redis_client_mock) + self.redis_get_patch.start() + + def tearDown(self): + self.redis_get_patch.stop() + + def test_with_valid_query_id(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": False, + "complete": True, + "results": ["result1", "result2"], + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json()["complete"], True, response.content) + + def test_with_invalid_query_id(self): + self.redis_client_mock.get.return_value = None + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.invalid_query_id}/") + self.assertEqual(response.status_code, 404) + + def test_completed_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "complete": True, + "results": ["result1", "result2"], + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertTrue(response.json()["complete"]) + + def test_running_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "complete": False, + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertFalse(response.json()["complete"]) + + def test_failed_query(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": True, + "error_message": "Query failed", + } + ).encode() + response = self.client.get(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 200) + self.assertTrue(response.json()["error"]) + + def test_destroy(self): + self.redis_client_mock.get.return_value = json.dumps( + { + "id": self.valid_query_id, + "team_id": self.team_id, + "error": True, + "error_message": "Query failed", + } + ).encode() + response = self.client.delete(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") + self.assertEqual(response.status_code, 204) + self.redis_client_mock.delete.assert_called_once() diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 92008ce32657d..75cd3d1c91e5b 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -365,7 +365,7 @@ def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): "groups": [{"variant": None, "properties": [], "rollout_percentage": 20}] } - def test_updating_survey_to_remove_targeting_doesnt_delete_targeting_flag(self): + def test_updating_survey_to_send_none_targeting_doesnt_delete_targeting_flag(self): survey_with_targeting = self.client.post( f"/api/projects/{self.team.id}/surveys/", data={ @@ -409,7 +409,7 @@ def test_updating_survey_to_remove_targeting_doesnt_delete_targeting_flag(self): assert FeatureFlag.objects.filter(id=flagId).exists() - def test_updating_survey_to_send_none_targeting_deletes_targeting_flag(self): + def test_updating_survey_to_remove_targeting_deletes_targeting_flag(self): survey_with_targeting = self.client.post( f"/api/projects/{self.team.id}/surveys/", data={ @@ -697,6 +697,58 @@ def test_deleting_survey_deletes_targeting_flag(self): assert deleted_survey.status_code == status.HTTP_204_NO_CONTENT assert not FeatureFlag.objects.filter(id=response.json()["targeting_flag"]["id"]).exists() + def test_inactive_surveys_disables_targeting_flag(self): + survey_with_targeting = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "survey with targeting", + "type": "popover", + "targeting_flag_filters": { + "groups": [ + { + "variant": None, + "rollout_percentage": None, + "properties": [ + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } + ], + } + ] + }, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ).json() + assert FeatureFlag.objects.filter(id=survey_with_targeting["targeting_flag"]["id"]).get().active is False + # launch survey + self.client.patch( + f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/", + data={ + "start_date": datetime.now() - timedelta(days=1), + }, + ) + assert FeatureFlag.objects.filter(id=survey_with_targeting["targeting_flag"]["id"]).get().active is True + # stop the survey + self.client.patch( + f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/", + data={ + "end_date": datetime.now() + timedelta(days=1), + }, + ) + assert FeatureFlag.objects.filter(id=survey_with_targeting["targeting_flag"]["id"]).get().active is False + # resume survey again + self.client.patch( + f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/", + data={ + "end_date": None, + }, + ) + assert FeatureFlag.objects.filter(id=survey_with_targeting["targeting_flag"]["id"]).get().active is True + def test_can_list_surveys(self): self.client.post( f"/api/projects/{self.team.id}/surveys/", diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index be11c4ffe48b5..f7ee632e2ad48 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -141,7 +141,7 @@ def calculate_for_query_based_insight( ) # local import to avoid circular reference - from posthog.api.query import process_query + from posthog.api.services.query import process_query # TODO need to properly check that hogql is enabled? return cache_key, cache_type, process_query(team, insight.query, True) diff --git a/posthog/celery.py b/posthog/celery.py index a7b62848bfab3..53c67214783ee 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -27,7 +27,8 @@ from posthog.cloud_utils import is_cloud from posthog.metrics import pushed_metrics_registry from posthog.redis import get_client -from posthog.utils import get_crontab, get_instance_region +from posthog.utils import get_crontab +from posthog.ph_client import get_ph_client # set the default Django settings module for the 'celery' program. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "posthog.settings") @@ -333,6 +334,13 @@ def setup_periodic_tasks(sender: Celery, **kwargs): name="sync datawarehouse sources that have settled in s3 bucket", ) + # Every 30 minutes try to retrieve and calculate total rows synced in period + sender.add_periodic_task( + crontab(minute="*/30"), + calculate_external_data_rows_synced.s(), + name="calculate external data rows synced", + ) + # Set up clickhouse query instrumentation @task_prerun.connect @@ -387,24 +395,19 @@ def redis_heartbeat(): @app.task(ignore_result=True, bind=True) -def enqueue_clickhouse_execute_with_progress( - self, team_id, query_id, query, args=None, settings=None, with_column_types=False -): +def process_query_task(self, team_id, query_id, query_json, in_export_context=False, refresh_requested=False): """ - Kick off query with progress reporting - Iterate over the progress status - Save status to redis + Kick off query Once complete save results to redis """ - from posthog.client import execute_with_progress - - execute_with_progress( - team_id, - query_id, - query, - args, - settings, - with_column_types, + from posthog.client import execute_process_query + + execute_process_query( + team_id=team_id, + query_id=query_id, + query_json=query_json, + in_export_context=in_export_context, + refresh_requested=refresh_requested, task_id=self.request.id, ) @@ -507,10 +510,10 @@ def pg_row_count(): CLICKHOUSE_TABLES = [ - "events", + "sharded_events", "person", "person_distinct_id2", - "session_replay_events", + "sharded_session_replay_events", "log_entries", ] if not is_cloud(): @@ -532,9 +535,8 @@ def clickhouse_lag(): ) for table in CLICKHOUSE_TABLES: try: - QUERY = ( - """select max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag from {table};""" - ) + QUERY = """SELECT max(_timestamp) observed_ts, now() now_ts, now() - max(_timestamp) as lag + FROM {table}""" query = QUERY.format(table=table) lag = sync_execute(query)[0][2] statsd.gauge( @@ -680,9 +682,8 @@ def clickhouse_row_count(): ) for table in CLICKHOUSE_TABLES: try: - QUERY = ( - """select count(1) freq from {table} where _timestamp >= toStartOfDay(date_sub(DAY, 2, now()));""" - ) + QUERY = """SELECT sum(rows) rows from system.parts + WHERE table = '{table}' and active;""" query = QUERY.format(table=table) rows = sync_execute(query)[0][0] row_count_gauge.labels(table_name=table).set(rows) @@ -737,10 +738,11 @@ def clickhouse_part_count(): from posthog.client import sync_execute QUERY = """ - select table, count(1) freq - from system.parts - group by table - order by freq desc; + SELECT table, count(1) freq + FROM system.parts + WHERE active + GROUP BY table + ORDER BY freq DESC; """ rows = sync_execute(QUERY) @@ -903,29 +905,10 @@ def debug_task(self): @app.task(ignore_result=True) def calculate_decide_usage() -> None: from django.db.models import Q - from posthoganalytics import Posthog - from posthog.models import Team from posthog.models.feature_flag.flag_analytics import capture_team_decide_usage - if not is_cloud(): - return - - # send EU data to EU, US data to US - api_key = None - host = None - region = get_instance_region() - if region == "EU": - api_key = "phc_dZ4GK1LRjhB97XozMSkEwPXx7OVANaJEwLErkY1phUF" - host = "https://eu.posthog.com" - elif region == "US": - api_key = "sTMFPsFhdP1Ssg" - host = "https://app.posthog.com" - - if not api_key: - return - - ph_client = Posthog(api_key, host=host) + ph_client = get_ph_client() for team in Team.objects.select_related("organization").exclude( Q(organization__for_internal_metrics=True) | Q(is_demo=True) @@ -935,6 +918,22 @@ def calculate_decide_usage() -> None: ph_client.shutdown() +@app.task(ignore_result=True) +def calculate_external_data_rows_synced() -> None: + from django.db.models import Q + from posthog.models import Team + from posthog.tasks.warehouse import ( + capture_workspace_rows_synced_by_team, + check_external_data_source_billing_limit_by_team, + ) + + for team in Team.objects.select_related("organization").exclude( + Q(organization__for_internal_metrics=True) | Q(is_demo=True) | Q(external_data_workspace_id__isnull=True) + ): + capture_workspace_rows_synced_by_team.delay(team.pk) + check_external_data_source_billing_limit_by_team.delay(team.pk) + + @app.task(ignore_result=True) def find_flags_with_enriched_analytics(): from datetime import datetime, timedelta @@ -1092,7 +1091,7 @@ def ee_persist_finished_recordings(): @app.task(ignore_result=True) def sync_datawarehouse_sources(): try: - from posthog.warehouse.sync_resource import sync_resources + from posthog.tasks.warehouse import sync_resources except ImportError: pass else: diff --git a/posthog/clickhouse/cancel.py b/posthog/clickhouse/cancel.py new file mode 100644 index 0000000000000..e05eea7ad3d64 --- /dev/null +++ b/posthog/clickhouse/cancel.py @@ -0,0 +1,14 @@ +from statshog.defaults.django import statsd + +from posthog.api.services.query import logger +from posthog.clickhouse.client import sync_execute +from posthog.settings import CLICKHOUSE_CLUSTER + + +def cancel_query_on_cluster(team_id: int, client_query_id: str) -> None: + result = sync_execute( + f"KILL QUERY ON CLUSTER '{CLICKHOUSE_CLUSTER}' WHERE query_id LIKE %(client_query_id)s", + {"client_query_id": f"{team_id}_{client_query_id}%"}, + ) + logger.info("Cancelled query %s for team %s, result: %s", client_query_id, team_id, result) + statsd.incr("clickhouse.query.cancellation_requested", tags={"team_id": team_id}) diff --git a/posthog/clickhouse/client/__init__.py b/posthog/clickhouse/client/__init__.py index f2ad255c395e1..a249ebbabb4ad 100644 --- a/posthog/clickhouse/client/__init__.py +++ b/posthog/clickhouse/client/__init__.py @@ -1,8 +1,8 @@ from posthog.clickhouse.client.execute import query_with_columns, sync_execute -from posthog.clickhouse.client.execute_async import execute_with_progress +from posthog.clickhouse.client.execute_async import execute_process_query __all__ = [ "sync_execute", "query_with_columns", - "execute_with_progress", + "execute_process_query", ] diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 3bb28c3f20075..fc9e292b08ee4 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -1,172 +1,94 @@ -import hashlib +import datetime import json -import time -from dataclasses import asdict as dataclass_asdict -from dataclasses import dataclass -from time import perf_counter -from typing import Any, Optional - -from posthog import celery -from clickhouse_driver import Client as SyncClient -from django.conf import settings as app_settings -from statshog.defaults.django import statsd - -from posthog import redis -from posthog.celery import enqueue_clickhouse_execute_with_progress -from posthog.clickhouse.client.execute import _prepare_query -from posthog.errors import wrap_query_error -from posthog.settings import ( - CLICKHOUSE_CA, - CLICKHOUSE_DATABASE, - CLICKHOUSE_HOST, - CLICKHOUSE_PASSWORD, - CLICKHOUSE_SECURE, - CLICKHOUSE_USER, - CLICKHOUSE_VERIFY, -) - -REDIS_STATUS_TTL = 600 # 10 minutes - - -@dataclass -class QueryStatus: - team_id: int - num_rows: float = 0 - total_rows: float = 0 - error: bool = False - complete: bool = False - error_message: str = "" - results: Any = None - start_time: Optional[float] = None - end_time: Optional[float] = None - task_id: Optional[str] = None - - -def generate_redis_results_key(query_id): - REDIS_KEY_PREFIX_ASYNC_RESULTS = "query_with_progress" - key = f"{REDIS_KEY_PREFIX_ASYNC_RESULTS}:{query_id}" - return key - - -def execute_with_progress( +import uuid + +import structlog +from rest_framework.exceptions import NotFound + +from posthog import celery, redis +from posthog.celery import process_query_task +from posthog.clickhouse.query_tagging import tag_queries +from posthog.schema import QueryStatus + +logger = structlog.get_logger(__name__) + +REDIS_STATUS_TTL_SECONDS = 600 # 10 minutes +REDIS_KEY_PREFIX_ASYNC_RESULTS = "query_async" + + +class QueryNotFoundError(NotFound): + pass + + +class QueryRetrievalError(Exception): + pass + + +def generate_redis_results_key(query_id: str, team_id: int) -> str: + return f"{REDIS_KEY_PREFIX_ASYNC_RESULTS}:{team_id}:{query_id}" + + +def execute_process_query( team_id, query_id, - query, - args=None, - settings=None, - with_column_types=False, - update_freq=0.2, + query_json, + in_export_context, + refresh_requested, task_id=None, ): - """ - Kick off query with progress reporting - Iterate over the progress status - Save status to redis - Once complete save results to redis - """ - - key = generate_redis_results_key(query_id) - ch_client = SyncClient( - host=CLICKHOUSE_HOST, - database=CLICKHOUSE_DATABASE, - secure=CLICKHOUSE_SECURE, - user=CLICKHOUSE_USER, - password=CLICKHOUSE_PASSWORD, - ca_certs=CLICKHOUSE_CA, - verify=CLICKHOUSE_VERIFY, - settings={"max_result_rows": "10000"}, - ) + key = generate_redis_results_key(query_id, team_id) redis_client = redis.get_client() - start_time = perf_counter() - - prepared_sql, prepared_args, tags = _prepare_query(client=ch_client, query=query, args=args) + from posthog.models import Team + from posthog.api.services.query import process_query - query_status = QueryStatus(team_id, task_id=task_id) + team = Team.objects.get(pk=team_id) - start_time = time.time() + query_status = QueryStatus( + id=query_id, + team_id=team_id, + task_id=task_id, + complete=False, + error=True, # Assume error in case nothing below ends up working + start_time=datetime.datetime.utcnow(), + ) + value = query_status.model_dump_json() try: - progress = ch_client.execute_with_progress( - prepared_sql, - params=prepared_args, - settings=settings, - with_column_types=with_column_types, + tag_queries(client_query_id=query_id, team_id=team_id) + results = process_query( + team=team, query_json=query_json, in_export_context=in_export_context, refresh_requested=refresh_requested ) - for num_rows, total_rows in progress: - query_status = QueryStatus( - team_id=team_id, - num_rows=num_rows, - total_rows=total_rows, - complete=False, - error=False, - error_message="", - results=None, - start_time=start_time, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - time.sleep(update_freq) - else: - rv = progress.get_result() - query_status = QueryStatus( - team_id=team_id, - num_rows=query_status.num_rows, - total_rows=query_status.total_rows, - complete=True, - error=False, - start_time=query_status.start_time, - end_time=time.time(), - error_message="", - results=rv, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - + logger.info("Got results for team %s query %s", team_id, query_id) + query_status.complete = True + query_status.error = False + query_status.results = results + query_status.expiration_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=REDIS_STATUS_TTL_SECONDS) + query_status.end_time = datetime.datetime.utcnow() + value = query_status.model_dump_json() except Exception as err: - err = wrap_query_error(err) - tags["failed"] = True - tags["reason"] = type(err).__name__ - statsd.incr("clickhouse_sync_execution_failure") - query_status = QueryStatus( - team_id=team_id, - num_rows=query_status.num_rows, - total_rows=query_status.total_rows, - complete=False, - error=True, - start_time=query_status.start_time, - end_time=time.time(), - error_message=str(err), - results=None, - task_id=task_id, - ) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) - + query_status.results = None # Clear results in case they are faulty + query_status.error_message = str(err) + logger.error("Error processing query for team %s query %s: %s", team_id, query_id, err) + value = query_status.model_dump_json() raise err finally: - ch_client.disconnect() + redis_client.set(key, value, ex=REDIS_STATUS_TTL_SECONDS) - execution_time = perf_counter() - start_time - statsd.timing("clickhouse_sync_execution_time", execution_time * 1000.0) - - if app_settings.SHELL_PLUS_PRINT_SQL: - print("Execution time: %.6fs" % (execution_time,)) # noqa T201 - - -def enqueue_execute_with_progress( +def enqueue_process_query_task( team_id, - query, - args=None, - settings=None, - with_column_types=False, - bypass_celery=False, + query_json, query_id=None, + refresh_requested=False, + in_export_context=False, + bypass_celery=False, force=False, ): if not query_id: - query_id = _query_hash(query, team_id, args) - key = generate_redis_results_key(query_id) + query_id = uuid.uuid4().hex + + key = generate_redis_results_key(query_id, team_id) redis_client = redis.get_client() if force: @@ -187,49 +109,55 @@ def enqueue_execute_with_progress( # If we've seen this query before return the query_id and don't resubmit it. return query_id - # Immediately set status so we don't have race with celery - query_status = QueryStatus(team_id=team_id, start_time=time.time()) - redis_client.set(key, json.dumps(dataclass_asdict(query_status)), ex=REDIS_STATUS_TTL) + # Immediately set status, so we don't have race with celery + query_status = QueryStatus(id=query_id, team_id=team_id) + redis_client.set(key, query_status.model_dump_json(), ex=REDIS_STATUS_TTL_SECONDS) if bypass_celery: # Call directly ( for testing ) - enqueue_clickhouse_execute_with_progress(team_id, query_id, query, args, settings, with_column_types) + process_query_task( + team_id, query_id, query_json, in_export_context=in_export_context, refresh_requested=refresh_requested + ) else: - enqueue_clickhouse_execute_with_progress.delay(team_id, query_id, query, args, settings, with_column_types) + task = process_query_task.delay( + team_id, query_id, query_json, in_export_context=in_export_context, refresh_requested=refresh_requested + ) + query_status.task_id = task.id + redis_client.set(key, query_status.model_dump_json(), ex=REDIS_STATUS_TTL_SECONDS) return query_id -def get_status_or_results(team_id, query_id): - """ - Returns QueryStatus data class - QueryStatus data class contains either: - Current status of running query - Results of completed query - Error payload of failed query - """ +def get_query_status(team_id, query_id): redis_client = redis.get_client() - key = generate_redis_results_key(query_id) + key = generate_redis_results_key(query_id, team_id) + try: byte_results = redis_client.get(key) - if byte_results: - str_results = byte_results.decode("utf-8") - else: - return QueryStatus(team_id, error=True, error_message="Query is unknown to backend") - query_status = QueryStatus(**json.loads(str_results)) - if query_status.team_id != team_id: - raise Exception("Requesting team is not executing team") except Exception as e: - query_status = QueryStatus(team_id, error=True, error_message=str(e)) - return query_status + raise QueryRetrievalError(f"Error retrieving query {query_id} for team {team_id}") from e + if not byte_results: + raise QueryNotFoundError(f"Query {query_id} not found for team {team_id}") -def _query_hash(query: str, team_id: int, args: Any) -> str: - """ - Takes a query and returns a hex encoded hash of the query and args - """ - if args: - key = hashlib.md5((str(team_id) + query + json.dumps(args)).encode("utf-8")).hexdigest() - else: - key = hashlib.md5((str(team_id) + query).encode("utf-8")).hexdigest() - return key + return QueryStatus(**json.loads(byte_results)) + + +def cancel_query(team_id, query_id): + query_status = get_query_status(team_id, query_id) + + if query_status.task_id: + logger.info("Got task id %s, attempting to revoke", query_status.task_id) + celery.app.control.revoke(query_status.task_id, terminate=True) + + from posthog.clickhouse.cancel import cancel_query_on_cluster + + logger.info("Revoked task id %s, attempting to cancel on cluster", query_status.task_id) + cancel_query_on_cluster(team_id, query_id) + + redis_client = redis.get_client() + key = generate_redis_results_key(query_id, team_id) + logger.info("Deleting redis query key %s", key) + redis_client.delete(key) + + return True diff --git a/posthog/clickhouse/client/test/test_execute_async.py b/posthog/clickhouse/client/test/test_execute_async.py new file mode 100644 index 0000000000000..1ab4bf49e03d3 --- /dev/null +++ b/posthog/clickhouse/client/test/test_execute_async.py @@ -0,0 +1,152 @@ +import uuid +from unittest.mock import patch + +from django.test import TestCase + +from posthog.clickhouse.client import execute_async as client +from posthog.client import sync_execute +from posthog.hogql.errors import HogQLException +from posthog.models import Organization, Team +from posthog.test.base import ClickhouseTestMixin + + +def build_query(sql): + return { + "kind": "HogQLQuery", + "query": sql, + } + + +class ClickhouseClientTestCase(TestCase, ClickhouseTestMixin): + def setUp(self): + self.organization = Organization.objects.create(name="test") + self.team = Team.objects.create(organization=self.organization) + self.team_id = self.team.pk + + def test_async_query_client(self): + query = build_query("SELECT 1+1") + team_id = self.team_id + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + result = client.get_query_status(team_id, query_id) + self.assertFalse(result.error, result.error_message) + self.assertTrue(result.complete) + self.assertEqual(result.results["results"], [[2]]) + + def test_async_query_client_errors(self): + query = build_query("SELECT WOW SUCH DATA FROM NOWHERE THIS WILL CERTAINLY WORK") + self.assertRaises( + HogQLException, + client.enqueue_process_query_task, + **{"team_id": (self.team_id), "query_json": query, "bypass_celery": True}, + ) + query_id = uuid.uuid4().hex + try: + client.enqueue_process_query_task(self.team_id, query, query_id=query_id, bypass_celery=True) + except Exception: + pass + + result = client.get_query_status(self.team_id, query_id) + self.assertTrue(result.error) + self.assertRegex(result.error_message, "Unknown table") + + def test_async_query_client_uuid(self): + query = build_query("SELECT toUUID('00000000-0000-0000-0000-000000000000')") + team_id = self.team_id + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + result = client.get_query_status(team_id, query_id) + self.assertFalse(result.error, result.error_message) + self.assertTrue(result.complete) + self.assertEqual(result.results["results"], [["00000000-0000-0000-0000-000000000000"]]) + + def test_async_query_client_does_not_leak(self): + query = build_query("SELECT 1+1") + team_id = self.team_id + wrong_team = 5 + query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True) + + try: + client.get_query_status(wrong_team, query_id) + except Exception as e: + self.assertEqual(str(e), f"Query {query_id} not found for team {wrong_team}") + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_is_lazy(self, execute_sync_mock): + query = build_query("SELECT 4 + 4") + query_id = uuid.uuid4().hex + team_id = self.team_id + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we only called clickhouse once + execute_sync_mock.assert_called_once() + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_is_lazy_but_not_too_lazy(self, execute_sync_mock): + query = build_query("SELECT 8 + 8") + query_id = uuid.uuid4().hex + team_id = self.team_id + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again, but with force + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we called clickhouse twice + self.assertEqual(execute_sync_mock.call_count, 2) + + @patch("posthog.clickhouse.client.execute_async.process_query_task") + def test_async_query_client_manual_query_uuid(self, execute_sync_mock): + # This is a unique test because technically in the test pattern `SELECT 8 + 8` is already + # in redis. This tests to make sure it is treated as a unique run of that query + query = build_query("SELECT 8 + 8") + team_id = self.team_id + query_id = "I'm so unique" + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Try the same query again, but with force + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + + # Try the same query again (for good measure!) + client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + + # Assert that we called clickhouse twice + self.assertEqual(execute_sync_mock.call_count, 2) + + def test_client_strips_comments_from_request(self): + """ + To ensure we can easily copy queries from `system.query_log` in e.g. + Metabase, we strip comments from the query we send. Metabase doesn't + display multilined output. + + See https://github.com/metabase/metabase/issues/14253 + + Note I'm not really testing much complexity, I trust that those will + come out as failures in other tests. + """ + from posthog.clickhouse.query_tagging import tag_queries + + # First add in the request information that should be added to the sql. + # We check this to make sure it is not removed by the comment stripping + with self.capture_select_queries() as sqls: + tag_queries(kind="request", id="1") + sync_execute( + query=""" + -- this request returns 1 + SELECT 1 + """ + ) + self.assertEqual(len(sqls), 1) + first_query = sqls[0] + self.assertIn(f"SELECT 1", first_query) + self.assertNotIn("this request returns", first_query) + + # Make sure it still includes the "annotation" comment that includes + # request routing information for debugging purposes + self.assertIn("/* request:1 */", first_query) diff --git a/posthog/clickhouse/plugin_log_entries.py b/posthog/clickhouse/plugin_log_entries.py index 1f4f7c70d7146..1ac1cb0759ce7 100644 --- a/posthog/clickhouse/plugin_log_entries.py +++ b/posthog/clickhouse/plugin_log_entries.py @@ -25,7 +25,7 @@ PLUGIN_LOG_ENTRIES_TABLE_ENGINE = lambda: ReplacingMergeTree(PLUGIN_LOG_ENTRIES_TABLE, ver="_timestamp") PLUGIN_LOG_ENTRIES_TABLE_SQL = lambda: ( PLUGIN_LOG_ENTRIES_TABLE_BASE_SQL - + """PARTITION BY plugin_id ORDER BY (team_id, id) + + """PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) {ttl_period} SETTINGS index_granularity=512 """ diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index b260abb7ce1d0..cd975ff0f823c 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -1367,7 +1367,7 @@ , _offset UInt64 ) ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_noshard/posthog.plugin_log_entries', '{replica}-{shard}', _timestamp) - PARTITION BY plugin_id ORDER BY (team_id, id) + PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) SETTINGS index_granularity=512 @@ -2166,7 +2166,7 @@ , _offset UInt64 ) ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_noshard/posthog.plugin_log_entries', '{replica}-{shard}', _timestamp) - PARTITION BY plugin_id ORDER BY (team_id, id) + PARTITION BY toYYYYMMDD(timestamp) ORDER BY (team_id, plugin_id, plugin_config_id, timestamp) SETTINGS index_granularity=512 diff --git a/posthog/hogql/filters.py b/posthog/hogql/filters.py index c900ac1bc5ea6..32ce707d0c647 100644 --- a/posthog/hogql/filters.py +++ b/posthog/hogql/filters.py @@ -63,6 +63,7 @@ def visit_placeholder(self, node): parse_expr( "timestamp < {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}, + start=None, # do not add location information for "timestamp" to the metadata ) ) @@ -77,6 +78,7 @@ def visit_placeholder(self, node): parse_expr( "timestamp >= {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}, + start=None, # do not add location information for "timestamp" to the metadata ) ) diff --git a/posthog/hogql/modifiers.py b/posthog/hogql/modifiers.py index 0643deefcc6fa..8884f197afcf6 100644 --- a/posthog/hogql/modifiers.py +++ b/posthog/hogql/modifiers.py @@ -1,7 +1,7 @@ from typing import Optional from posthog.models import Team -from posthog.schema import HogQLQueryModifiers +from posthog.schema import HogQLQueryModifiers, MaterializationMode from posthog.utils import PersonOnEventsMode @@ -22,4 +22,7 @@ def create_default_modifiers_for_team( if modifiers.inCohortVia is None: modifiers.inCohortVia = "subquery" + if modifiers.materializationMode is None or modifiers.materializationMode == MaterializationMode.auto: + modifiers.materializationMode = MaterializationMode.legacy_null_as_null + return modifiers diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 5e5a076b9e55f..f89614d0dc95a 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -39,6 +39,7 @@ from posthog.models.property import PropertyName, TableColumn from posthog.models.team.team import WeekStartDay from posthog.models.utils import UUIDT +from posthog.schema import MaterializationMode from posthog.utils import PersonOnEventsMode @@ -907,47 +908,51 @@ def visit_property_type(self, type: ast.PropertyType): while isinstance(table, ast.TableAliasType): table = table.table_type - # find a materialized property for the first part of the chain - materialized_property_sql: Optional[str] = None - if isinstance(table, ast.TableType): - if self.dialect == "clickhouse": - table_name = table.table.to_printed_clickhouse(self.context) - else: - table_name = table.table.to_printed_hogql() - if field is None: - raise HogQLException(f"Can't resolve field {field_type.name} on table {table_name}") - field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name) - - materialized_column = self._get_materialized_column(table_name, type.chain[0], field_name) - if materialized_column: - property_sql = self._print_identifier(materialized_column) - property_sql = f"{self.visit(field_type.table_type)}.{property_sql}" - materialized_property_sql = property_sql - elif ( - self.context.within_non_hogql_query - and (isinstance(table, ast.SelectQueryAliasType) and table.alias == "events__pdi__person") - or (isinstance(table, ast.VirtualTableType) and table.field == "poe") - ): - # :KLUDGE: Legacy person properties handling. Only used within non-HogQL queries, such as insights. - if self.context.modifiers.personsOnEventsMode != PersonOnEventsMode.DISABLED: - materialized_column = self._get_materialized_column("events", type.chain[0], "person_properties") - else: - materialized_column = self._get_materialized_column("person", type.chain[0], "properties") - if materialized_column: - materialized_property_sql = self._print_identifier(materialized_column) - args: List[str] = [] - if materialized_property_sql is not None: - # When reading materialized columns, treat the values "" and "null" as NULL-s. - # TODO: rematerialize all columns to support empty strings and "null" string values. - materialized_property_sql = f"nullIf(nullIf({materialized_property_sql}, ''), 'null')" - if len(type.chain) == 1: - return materialized_property_sql - else: - for name in type.chain[1:]: - args.append(self.context.add_value(name)) - return self._unsafe_json_extract_trim_quotes(materialized_property_sql, args) + if self.context.modifiers.materializationMode != "disabled": + # find a materialized property for the first part of the chain + materialized_property_sql: Optional[str] = None + if isinstance(table, ast.TableType): + if self.dialect == "clickhouse": + table_name = table.table.to_printed_clickhouse(self.context) + else: + table_name = table.table.to_printed_hogql() + if field is None: + raise HogQLException(f"Can't resolve field {field_type.name} on table {table_name}") + field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name) + + materialized_column = self._get_materialized_column(table_name, type.chain[0], field_name) + if materialized_column: + property_sql = self._print_identifier(materialized_column) + property_sql = f"{self.visit(field_type.table_type)}.{property_sql}" + materialized_property_sql = property_sql + elif ( + self.context.within_non_hogql_query + and (isinstance(table, ast.SelectQueryAliasType) and table.alias == "events__pdi__person") + or (isinstance(table, ast.VirtualTableType) and table.field == "poe") + ): + # :KLUDGE: Legacy person properties handling. Only used within non-HogQL queries, such as insights. + if self.context.modifiers.personsOnEventsMode != PersonOnEventsMode.DISABLED: + materialized_column = self._get_materialized_column("events", type.chain[0], "person_properties") + else: + materialized_column = self._get_materialized_column("person", type.chain[0], "properties") + if materialized_column: + materialized_property_sql = self._print_identifier(materialized_column) + + if materialized_property_sql is not None: + # TODO: rematerialize all columns to properly support empty strings and "null" string values. + if self.context.modifiers.materializationMode == MaterializationMode.legacy_null_as_string: + materialized_property_sql = f"nullIf({materialized_property_sql}, '')" + else: # MaterializationMode.auto.legacy_null_as_null + materialized_property_sql = f"nullIf(nullIf({materialized_property_sql}, ''), 'null')" + + if len(type.chain) == 1: + return materialized_property_sql + else: + for name in type.chain[1:]: + args.append(self.context.add_value(name)) + return self._unsafe_json_extract_trim_quotes(materialized_property_sql, args) for name in type.chain: args.append(self.context.add_value(name)) diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 9d619c23175b6..5695a0d0be2e5 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -126,26 +126,35 @@ def property_to_expr( elif len(value) == 1: value = value[0] else: - exprs = [ - property_to_expr( - Property( - type=property.type, - key=property.key, - operator=property.operator, - value=v, - ), - team, - scope, + if operator in [PropertyOperator.exact, PropertyOperator.is_not]: + op = ( + ast.CompareOperationOp.In + if operator == PropertyOperator.exact + else ast.CompareOperationOp.NotIn ) - for v in value - ] - if ( - operator == PropertyOperator.is_not - or operator == PropertyOperator.not_icontains - or operator == PropertyOperator.not_regex - ): - return ast.And(exprs=exprs) - return ast.Or(exprs=exprs) + + return ast.CompareOperation( + op=op, + left=ast.Field(chain=["properties", property.key]), + right=ast.Tuple(exprs=[ast.Constant(value=v) for v in value]), + ) + else: + exprs = [ + property_to_expr( + Property( + type=property.type, + key=property.key, + operator=property.operator, + value=v, + ), + team, + scope, + ) + for v in value + ] + if operator == PropertyOperator.not_icontains or operator == PropertyOperator.not_regex: + return ast.And(exprs=exprs) + return ast.Or(exprs=exprs) chain = ["person", "properties"] if property.type == "person" and scope != "person" else ["properties"] field = ast.Field(chain=chain + [property.key]) diff --git a/posthog/hogql/test/test_modifiers.py b/posthog/hogql/test/test_modifiers.py index ba5ed58e84882..4296213727f37 100644 --- a/posthog/hogql/test/test_modifiers.py +++ b/posthog/hogql/test/test_modifiers.py @@ -1,7 +1,7 @@ from posthog.hogql.modifiers import create_default_modifiers_for_team from posthog.hogql.query import execute_hogql_query from posthog.models import Cohort -from posthog.schema import HogQLQueryModifiers, PersonsOnEventsMode +from posthog.schema import HogQLQueryModifiers, PersonsOnEventsMode, MaterializationMode from posthog.test.base import BaseTest from django.test import override_settings @@ -144,3 +144,43 @@ def test_modifiers_in_cohort_join(self): modifiers=HogQLQueryModifiers(inCohortVia="leftjoin"), ) assert "LEFT JOIN" in response.clickhouse + + def test_modifiers_materialization_mode(self): + try: + from ee.clickhouse.materialized_columns.analyze import materialize + except ModuleNotFoundError: + # EE not available? Assume we're good + self.assertEqual(1 + 2, 3) + return + materialize("events", "$browser") + + response = execute_hogql_query( + "SELECT properties.$browser FROM events", + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.auto), + ) + assert "SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null') FROM events" in response.clickhouse + + response = execute_hogql_query( + "SELECT properties.$browser FROM events", + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.legacy_null_as_null), + ) + assert "SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null') FROM events" in response.clickhouse + + response = execute_hogql_query( + "SELECT properties.$browser FROM events", + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.legacy_null_as_string), + ) + assert "SELECT nullIf(events.`mat_$browser`, '') FROM events" in response.clickhouse + + response = execute_hogql_query( + "SELECT properties.$browser FROM events", + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.disabled), + ) + assert ( + "SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_0)s), ''), 'null'), '^\"|\"$', '') FROM events" + in response.clickhouse + ) diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index c0ed528ea4da9..ecdfecee28671 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -163,7 +163,7 @@ def test_property_to_expr_event_list(self): # positive self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "exact"}), - self._parse_expr("properties.a = 'b' or properties.a = 'c'"), + self._parse_expr("properties.a IN ('b', 'c')"), ) self.assertEqual( self._property_to_expr( @@ -183,7 +183,7 @@ def test_property_to_expr_event_list(self): # negative self.assertEqual( self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "is_not"}), - self._parse_expr("properties.a != 'b' and properties.a != 'c'"), + self._parse_expr("properties.a NOT IN ('b', 'c')"), ) self.assertEqual( self._property_to_expr( diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index 48018cd789264..b2a9a7d12bf4d 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -7,7 +7,7 @@ from posthog.hogql.errors import HogQLException from posthog.hogql.resolver import resolve_types from posthog.hogql.resolver_utils import get_long_table_name -from posthog.hogql.visitor import TraversingVisitor +from posthog.hogql.visitor import TraversingVisitor, clone_expr def resolve_lazy_tables( @@ -180,6 +180,7 @@ def visit_select_query(self, node: ast.SelectQuery): # For all the collected tables, create the subqueries, and add them to the table. for table_name, table_to_add in tables_to_add.items(): subquery = table_to_add.lazy_table.lazy_select(table_to_add.fields_accessed, self.context.modifiers) + subquery = cast(ast.SelectQuery, clone_expr(subquery, clear_locations=True)) subquery = cast(ast.SelectQuery, resolve_types(subquery, self.context, [node.type])) old_table_type = select_type.tables[table_name] select_type.tables[table_name] = ast.SelectQueryAliasType(alias=table_name, select_query_type=subquery.type) @@ -202,6 +203,7 @@ def visit_select_query(self, node: ast.SelectQuery): self.context, node, ) + join_to_add = cast(ast.JoinExpr, clone_expr(join_to_add, clear_locations=True)) join_to_add = cast(ast.JoinExpr, resolve_types(join_to_add, self.context, [node.type])) select_type.tables[to_table] = join_to_add.type diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index a1b6973995668..e7ec26a441ded 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -1,6 +1,6 @@ import json from datetime import timedelta -from typing import Dict, List, Optional, Any +from typing import Dict, List, Optional from dateutil.parser import isoparse from django.db.models import Prefetch @@ -15,7 +15,7 @@ from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner -from posthog.models import Action, Person, Team +from posthog.models import Action, Person from posthog.models.element import chain_to_elements from posthog.models.person.person import get_distinct_ids_for_subquery from posthog.models.person.util import get_persons_by_distinct_ids @@ -39,19 +39,6 @@ class EventsQueryRunner(QueryRunner): query: EventsQuery query_type = EventsQuery - def __init__( - self, - query: EventsQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, - ): - super().__init__(query, team, timings, in_export_context) - if isinstance(query, EventsQuery): - self.query = query - else: - self.query = EventsQuery.model_validate(query) - def to_query(self) -> ast.SelectQuery: # Note: This code is inefficient and problematic, see https://github.com/PostHog/posthog/issues/13485 for details. if self.timings is None: @@ -199,6 +186,7 @@ def calculate(self) -> EventsQueryResponse: workload=Workload.ONLINE, query_type="EventsQuery", timings=self.timings, + modifiers=self.modifiers, in_export_context=self.in_export_context, ) diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index 4326a2ba7dbee..a79e875d14a73 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -1,5 +1,4 @@ from datetime import timedelta -from typing import Dict, Optional, Any from posthog.clickhouse.client.connection import Workload from posthog.hogql import ast @@ -9,7 +8,6 @@ from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner -from posthog.models import Team from posthog.schema import ( HogQLQuery, HogQLQueryResponse, @@ -23,19 +21,6 @@ class HogQLQueryRunner(QueryRunner): query: HogQLQuery query_type = HogQLQuery - def __init__( - self, - query: HogQLQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, - ): - super().__init__(query, team, timings, in_export_context) - if isinstance(query, HogQLQuery): - self.query = query - else: - self.query = HogQLQuery.model_validate(query) - def to_query(self) -> ast.SelectQuery: if self.timings is None: self.timings = HogQLTimings() @@ -60,7 +45,7 @@ def calculate(self) -> HogQLQueryResponse: query_type="HogQLQuery", query=self.to_query(), filters=self.query.filters, - modifiers=self.query.modifiers, + modifiers=self.query.modifiers or self.modifiers, team=self.team, workload=Workload.ONLINE, timings=self.timings, diff --git a/posthog/hogql_queries/insights/insight_persons_query_runner.py b/posthog/hogql_queries/insights/insight_persons_query_runner.py index a6bc08c0d0849..51cf792346992 100644 --- a/posthog/hogql_queries/insights/insight_persons_query_runner.py +++ b/posthog/hogql_queries/insights/insight_persons_query_runner.py @@ -1,13 +1,11 @@ from datetime import timedelta -from typing import Dict, Optional, Any, cast +from typing import cast from posthog.hogql import ast from posthog.hogql.query import execute_hogql_query -from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.lifecycle_query_runner import LifecycleQueryRunner from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.hogql_queries.query_runner import QueryRunner, get_query_runner -from posthog.models import Team from posthog.models.filters.mixins.utils import cached_property from posthog.schema import InsightPersonsQuery, HogQLQueryResponse @@ -16,19 +14,6 @@ class InsightPersonsQueryRunner(QueryRunner): query: InsightPersonsQuery query_type = InsightPersonsQuery - def __init__( - self, - query: InsightPersonsQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, - ): - super().__init__(query, team, timings, in_export_context) - if isinstance(query, InsightPersonsQuery): - self.query = query - else: - self.query = InsightPersonsQuery.model_validate(query) - @cached_property def source_runner(self) -> QueryRunner: return get_query_runner(self.query.source, self.team, self.timings, self.in_export_context) @@ -54,6 +39,7 @@ def calculate(self) -> HogQLQueryResponse: query=self.to_query(), team=self.team, timings=self.timings, + modifiers=self.modifiers, ) def _is_stale(self, cached_result_package): diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py index da088f11daac7..c8731994d9a8c 100644 --- a/posthog/hogql_queries/insights/lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta from math import ceil -from typing import Optional, Any, Dict, List +from typing import Optional, List from django.utils.timezone import datetime from posthog.caching.insights_api import ( @@ -14,9 +14,8 @@ from posthog.hogql.printer import to_printed_hogql from posthog.hogql.property import property_to_expr, action_to_expr from posthog.hogql.query import execute_hogql_query -from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner -from posthog.models import Team, Action +from posthog.models import Action from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.filters.mixins.utils import cached_property from posthog.schema import ( @@ -31,15 +30,6 @@ class LifecycleQueryRunner(QueryRunner): query: LifecycleQuery query_type = LifecycleQuery - def __init__( - self, - query: LifecycleQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, - ): - super().__init__(query, team, timings, in_export_context) - def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: if self.query.samplingFactor == 0: counts_with_sampling = ast.Constant(value=0) @@ -139,6 +129,7 @@ def calculate(self) -> LifecycleQueryResponse: query=query, team=self.team, timings=self.timings, + modifiers=self.modifiers, ) # TODO: can we move the data conversion part into the query as well? It would make it easier to swap diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index f6afdfd591e85..f7499741cd51e 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -420,10 +420,10 @@ def test_trends_breakdowns(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] - assert response.results[0]["label"] == f"$pageview - Chrome" - assert response.results[1]["label"] == f"$pageview - Edge" - assert response.results[2]["label"] == f"$pageview - Firefox" - assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" assert response.results[0]["count"] == 6 assert response.results[1]["count"] == 1 assert response.results[2]["count"] == 2 @@ -479,11 +479,11 @@ def test_trends_breakdowns_histogram(self): "[32.5,40.01]", ] - assert response.results[0]["label"] == '$pageview - ["",""]' - assert response.results[1]["label"] == "$pageview - [10.0,17.5]" - assert response.results[2]["label"] == "$pageview - [17.5,25.0]" - assert response.results[3]["label"] == "$pageview - [25.0,32.5]" - assert response.results[4]["label"] == "$pageview - [32.5,40.01]" + assert response.results[0]["label"] == '["",""]' + assert response.results[1]["label"] == "[10.0,17.5]" + assert response.results[2]["label"] == "[17.5,25.0]" + assert response.results[3]["label"] == "[25.0,32.5]" + assert response.results[4]["label"] == "[32.5,40.01]" assert response.results[0]["data"] == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] assert response.results[1]["data"] == [0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0] @@ -554,14 +554,47 @@ def test_trends_breakdowns_hogql(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" + assert response.results[0]["count"] == 6 + assert response.results[1]["count"] == 1 + assert response.results[2]["count"] == 2 + assert response.results[3]["count"] == 1 + + def test_trends_breakdowns_multiple_hogql(self): + self._create_test_events() + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview"), EventsNode(event="$pageleave")], + None, + BreakdownFilter(breakdown_type=BreakdownType.hogql, breakdown="properties.$browser"), + ) + + breakdown_labels = [result["breakdown_value"] for result in response.results] + + assert len(response.results) == 8 + assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari", "Chrome", "Edge", "Firefox", "Safari"] assert response.results[0]["label"] == f"$pageview - Chrome" assert response.results[1]["label"] == f"$pageview - Edge" assert response.results[2]["label"] == f"$pageview - Firefox" assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[4]["label"] == f"$pageleave - Chrome" + assert response.results[5]["label"] == f"$pageleave - Edge" + assert response.results[6]["label"] == f"$pageleave - Firefox" + assert response.results[7]["label"] == f"$pageleave - Safari" assert response.results[0]["count"] == 6 assert response.results[1]["count"] == 1 assert response.results[2]["count"] == 2 assert response.results[3]["count"] == 1 + assert response.results[4]["count"] == 3 + assert response.results[5]["count"] == 1 + assert response.results[6]["count"] == 1 + assert response.results[7]["count"] == 1 def test_trends_breakdowns_and_compare(self): self._create_test_events() @@ -626,10 +659,10 @@ def test_trends_breakdown_and_aggregation_query_orchestration(self): assert len(response.results) == 4 assert breakdown_labels == ["Chrome", "Edge", "Firefox", "Safari"] - assert response.results[0]["label"] == f"$pageview - Chrome" - assert response.results[1]["label"] == f"$pageview - Edge" - assert response.results[2]["label"] == f"$pageview - Firefox" - assert response.results[3]["label"] == f"$pageview - Safari" + assert response.results[0]["label"] == f"Chrome" + assert response.results[1]["label"] == f"Edge" + assert response.results[2]["label"] == f"Firefox" + assert response.results[3]["label"] == f"Safari" assert response.results[0]["data"] == [ 0, diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index ff013658d021e..3aac186437f1c 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -35,6 +35,7 @@ HogQLQueryResponse, TrendsQuery, TrendsQueryResponse, + HogQLQueryModifiers, ) @@ -48,9 +49,10 @@ def __init__( query: TrendsQuery | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, + modifiers: Optional[HogQLQueryModifiers] = None, in_export_context: Optional[bool] = None, ): - super().__init__(query, team, timings, in_export_context) + super().__init__(query, team=team, timings=timings, modifiers=modifiers, in_export_context=in_export_context) self.series = self.setup_series() def _is_stale(self, cached_result_package): @@ -129,11 +131,12 @@ def calculate(self): query=query, team=self.team, timings=self.timings, + modifiers=self.modifiers, ) timings.extend(response.timings) - res.extend(self.build_series_response(response, series_with_extra)) + res.extend(self.build_series_response(response, series_with_extra, len(queries))) if ( self.query.trendsFilter is not None @@ -144,7 +147,7 @@ def calculate(self): return TrendsQueryResponse(results=res, timings=timings) - def build_series_response(self, response: HogQLQueryResponse, series: SeriesWithExtras): + def build_series_response(self, response: HogQLQueryResponse, series: SeriesWithExtras, series_count: int): if response.results is None: return [] @@ -243,7 +246,13 @@ def get_value(name: str, val: Any): series_object["label"] = "{} - {}".format(series_object["label"], cohort_name) series_object["breakdown_value"] = get_value("breakdown_value", val) else: - series_object["label"] = "{} - {}".format(series_object["label"], get_value("breakdown_value", val)) + # If there's multiple series, include the object label in the series label + if series_count > 1: + series_object["label"] = "{} - {}".format( + series_object["label"], get_value("breakdown_value", val) + ) + else: + series_object["label"] = get_value("breakdown_value", val) series_object["breakdown_value"] = get_value("breakdown_value", val) res.append(series_object) diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index ce490cadfc834..1fb210226f619 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -298,7 +298,7 @@ def _breakdown_filter(_filter: Dict): def _group_aggregation_filter(filter: Dict): - if _insight_type(filter) == "STICKINESS": + if _insight_type(filter) == "STICKINESS" or _insight_type(filter) == "LIFECYCLE": return {} return {"aggregation_group_type_index": filter.get("aggregation_group_type_index")} diff --git a/posthog/hogql_queries/persons_query_runner.py b/posthog/hogql_queries/persons_query_runner.py index 34c86ee13300b..9d45b0a8ccb69 100644 --- a/posthog/hogql_queries/persons_query_runner.py +++ b/posthog/hogql_queries/persons_query_runner.py @@ -1,15 +1,13 @@ import json from datetime import timedelta -from typing import Optional, Any, Dict, List, cast, Literal +from typing import List, cast, Literal from posthog.hogql import ast from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, MAX_SELECT_RETURNED_ROWS from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import property_to_expr, has_aggregation from posthog.hogql.query import execute_hogql_query -from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner, get_query_runner -from posthog.models import Team from posthog.schema import PersonsQuery, PersonsQueryResponse PERSON_FULL_TUPLE = ["id", "properties", "created_at", "is_identified"] @@ -19,25 +17,13 @@ class PersonsQueryRunner(QueryRunner): query: PersonsQuery query_type = PersonsQuery - def __init__( - self, - query: PersonsQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, - ): - super().__init__(query=query, team=team, timings=timings, in_export_context=in_export_context) - if isinstance(query, PersonsQuery): - self.query = query - else: - self.query = PersonsQuery.model_validate(query) - def calculate(self) -> PersonsQueryResponse: response = execute_hogql_query( query_type="PersonsQuery", query=self.to_query(), team=self.team, timings=self.timings, + modifiers=self.modifiers, ) input_columns = self.input_columns() if "person" in input_columns: diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 13c59c6d51c88..d3127fde4e4de 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -28,6 +28,7 @@ HogQLQuery, InsightPersonsQuery, DashboardFilter, + HogQLQueryModifiers, ) from posthog.utils import generate_cache_key, get_safe_cache @@ -86,6 +87,7 @@ def get_query_runner( team: Team, timings: Optional[HogQLTimings] = None, in_export_context: Optional[bool] = False, + modifiers: Optional[HogQLQueryModifiers] = None, ) -> "QueryRunner": kind = None if isinstance(query, dict): @@ -103,6 +105,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "TrendsQuery": from .insights.trends.trends_query_runner import TrendsQueryRunner @@ -112,6 +115,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "EventsQuery": from .events_query_runner import EventsQueryRunner @@ -121,6 +125,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "PersonsQuery": from .persons_query_runner import PersonsQueryRunner @@ -130,6 +135,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "InsightPersonsQuery": from .insights.insight_persons_query_runner import InsightPersonsQueryRunner @@ -139,6 +145,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "HogQLQuery": from .hogql_query_runner import HogQLQueryRunner @@ -148,6 +155,7 @@ def get_query_runner( team=team, timings=timings, in_export_context=in_export_context, + modifiers=modifiers, ) if kind == "SessionsTimelineQuery": from .sessions_timeline_query_runner import SessionsTimelineQueryRunner @@ -156,19 +164,20 @@ def get_query_runner( query=cast(SessionsTimelineQuery | Dict[str, Any], query), team=team, timings=timings, + modifiers=modifiers, ) if kind == "WebOverviewQuery": from .web_analytics.web_overview import WebOverviewQueryRunner - return WebOverviewQueryRunner(query=query, team=team, timings=timings) + return WebOverviewQueryRunner(query=query, team=team, timings=timings, modifiers=modifiers) if kind == "WebTopClicksQuery": from .web_analytics.top_clicks import WebTopClicksQueryRunner - return WebTopClicksQueryRunner(query=query, team=team, timings=timings) + return WebTopClicksQueryRunner(query=query, team=team, timings=timings, modifiers=modifiers) if kind == "WebStatsTableQuery": from .web_analytics.stats_table import WebStatsTableQueryRunner - return WebStatsTableQueryRunner(query=query, team=team, timings=timings) + return WebStatsTableQueryRunner(query=query, team=team, timings=timings, modifiers=modifiers) raise ValueError(f"Can't get a runner for an unknown query kind: {kind}") @@ -178,6 +187,7 @@ class QueryRunner(ABC): query_type: Type[RunnableQueryNode] team: Team timings: HogQLTimings + modifiers: HogQLQueryModifiers in_export_context: bool def __init__( @@ -185,11 +195,13 @@ def __init__( query: RunnableQueryNode | BaseModel | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None, + modifiers: Optional[HogQLQueryModifiers] = None, in_export_context: Optional[bool] = False, ): self.team = team self.timings = timings or HogQLTimings() self.in_export_context = in_export_context or False + self.modifiers = create_default_modifiers_for_team(team, modifiers) if isinstance(query, self.query_type): self.query = query # type: ignore else: @@ -244,7 +256,7 @@ def to_hogql(self) -> str: team_id=self.team.pk, enable_select_queries=True, timings=self.timings, - modifiers=create_default_modifiers_for_team(self.team), + modifiers=self.modifiers, ), "hogql", ) @@ -253,8 +265,9 @@ def toJSON(self) -> str: return self.query.model_dump_json(exclude_defaults=True, exclude_none=True) def _cache_key(self) -> str: + modifiers = self.modifiers.model_dump_json(exclude_defaults=True, exclude_none=True) return generate_cache_key( - f"query_{self.toJSON()}_{self.__class__.__name__}_{self.team.pk}_{self.team.timezone}" + f"query_{self.toJSON()}_{self.__class__.__name__}_{self.team.pk}_{self.team.timezone}_{modifiers}" ) @abstractmethod diff --git a/posthog/hogql_queries/sessions_timeline_query_runner.py b/posthog/hogql_queries/sessions_timeline_query_runner.py index abea2867e2b90..54f024900ff06 100644 --- a/posthog/hogql_queries/sessions_timeline_query_runner.py +++ b/posthog/hogql_queries/sessions_timeline_query_runner.py @@ -1,6 +1,6 @@ from datetime import timedelta import json -from typing import Dict, Optional, Any, cast +from typing import Dict, cast from posthog.api.element import ElementSerializer @@ -10,7 +10,6 @@ from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner -from posthog.models import Team from posthog.models.element.element import chain_to_elements from posthog.schema import EventType, SessionsTimelineQuery, SessionsTimelineQueryResponse, TimelineEntry from posthog.utils import relative_date_parse @@ -37,18 +36,6 @@ class SessionsTimelineQueryRunner(QueryRunner): query: SessionsTimelineQuery query_type = SessionsTimelineQuery - def __init__( - self, - query: SessionsTimelineQuery | Dict[str, Any], - team: Team, - timings: Optional[HogQLTimings] = None, - ): - super().__init__(query, team, timings) - if isinstance(query, SessionsTimelineQuery): - self.query = query - else: - self.query = SessionsTimelineQuery.model_validate(query) - def _get_events_subquery(self) -> ast.SelectQuery: after = relative_date_parse(self.query.after or "-24h", self.team.timezone_info) before = relative_date_parse(self.query.before or "-0h", self.team.timezone_info) @@ -147,6 +134,7 @@ def calculate(self) -> SessionsTimelineQueryResponse: workload=Workload.ONLINE, query_type="SessionsTimelineQuery", timings=self.timings, + modifiers=self.modifiers, ) assert query_result.results is not None timeline_entries_map: Dict[str, TimelineEntry] = {} diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py index 5b82b0fae5af9..28a0d47036778 100644 --- a/posthog/hogql_queries/test/test_query_runner.py +++ b/posthog/hogql_queries/test/test_query_runner.py @@ -12,6 +12,7 @@ RunnableQueryNode, ) from posthog.models.team.team import Team +from posthog.schema import HogQLQueryModifiers, MaterializationMode, HogQLQuery from posthog.test.base import BaseTest @@ -92,7 +93,7 @@ def test_cache_key(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore cache_key = runner._cache_key() - self.assertEqual(cache_key, "cache_33c9ea3098895d5a363a75feefafef06") + self.assertEqual(cache_key, "cache_b8a6b70478ec6139c8f7f379c808d5b9") def test_cache_key_runner_subclass(self): TestQueryRunner = self.setup_test_query_runner_class() @@ -106,7 +107,7 @@ class TestSubclassQueryRunner(TestQueryRunner): # type: ignore runner = TestSubclassQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore cache_key = runner._cache_key() - self.assertEqual(cache_key, "cache_d626615de8ad0df73c1d8610ca586597") + self.assertEqual(cache_key, "cache_cfab9e42d088def74792922de5b513ac") def test_cache_key_different_timezone(self): TestQueryRunner = self.setup_test_query_runner_class() @@ -117,7 +118,7 @@ def test_cache_key_different_timezone(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore cache_key = runner._cache_key() - self.assertEqual(cache_key, "cache_aeb23ec9e8de56dd8499f99f2e976d5a") + self.assertEqual(cache_key, "cache_9f12fefe07c0ab79e93935aed6b0bfa6") def test_cache_response(self): TestQueryRunner = self.setup_test_query_runner_class() @@ -143,3 +144,28 @@ def test_cache_response(self): # returns fresh response if stale response = runner.run(refresh_requested=False) self.assertEqual(response.is_cached, False) + + def test_modifier_passthrough(self): + try: + from ee.clickhouse.materialized_columns.analyze import materialize + from posthog.hogql_queries.hogql_query_runner import HogQLQueryRunner + + materialize("events", "$browser") + except ModuleNotFoundError: + # EE not available? Assume we're good + self.assertEqual(1 + 2, 3) + return + + runner = HogQLQueryRunner( + query=HogQLQuery(query="select properties.$browser from events"), + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.legacy_null_as_string), + ) + assert "events.`mat_$browser" in runner.calculate().clickhouse + + runner = HogQLQueryRunner( + query=HogQLQuery(query="select properties.$browser from events"), + team=self.team, + modifiers=HogQLQueryModifiers(materializationMode=MaterializationMode.disabled), + ) + assert "events.`mat_$browser" not in runner.calculate().clickhouse diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index 0e3f9b67a1943..815ce775c91d8 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -77,6 +77,7 @@ def calculate(self): query=self.to_query(), team=self.team, timings=self.timings, + modifiers=self.modifiers, ) return WebStatsTableQueryResponse( diff --git a/posthog/hogql_queries/web_analytics/top_clicks.py b/posthog/hogql_queries/web_analytics/top_clicks.py index 1693f2c1d86ce..3218e68975f7a 100644 --- a/posthog/hogql_queries/web_analytics/top_clicks.py +++ b/posthog/hogql_queries/web_analytics/top_clicks.py @@ -50,6 +50,7 @@ def calculate(self): query=self.to_query(), team=self.team, timings=self.timings, + modifiers=self.modifiers, ) return WebTopClicksQueryResponse( diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index 4c8b2b857eec3..201fad05baf8c 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -48,17 +48,21 @@ def property_filters_without_pathname(self) -> List[Union[EventPropertyFilter, P return [p for p in self.query.properties if p.key != "$pathname"] def session_where(self, include_previous_period: Optional[bool] = None): - properties = [ - parse_expr( - "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", - placeholders={ - "date_from": self.query_date_range.previous_period_date_from_as_hogql() - if include_previous_period - else self.query_date_range.date_from_as_hogql(), - "date_to": self.query_date_range.date_to_as_hogql(), - }, - ) - ] + self.property_filters_without_pathname + properties = ( + [ + parse_expr( + "events.timestamp < {date_to} AND events.timestamp >= minus({date_from}, toIntervalHour(1))", + placeholders={ + "date_from": self.query_date_range.previous_period_date_from_as_hogql() + if include_previous_period + else self.query_date_range.date_from_as_hogql(), + "date_to": self.query_date_range.date_to_as_hogql(), + }, + ) + ] + + self.property_filters_without_pathname + + self._test_account_filters + ) return property_to_expr( properties, self.team, @@ -91,17 +95,29 @@ def session_having(self, include_previous_period: Optional[bool] = None): ) def events_where(self): - properties = [ - parse_expr( - "events.timestamp >= {date_from}", - placeholders={"date_from": self.query_date_range.date_from_as_hogql()}, - ) - ] + self.query.properties + properties = ( + [ + parse_expr( + "events.timestamp >= {date_from}", + placeholders={"date_from": self.query_date_range.date_from_as_hogql()}, + ) + ] + + self.query.properties + + self._test_account_filters + ) + return property_to_expr( properties, self.team, ) + @cached_property + def _test_account_filters(self): + if isinstance(self.team.test_account_filters, list) and len(self.team.test_account_filters) > 0: + return self.team.test_account_filters + else: + return [] + def _is_stale(self, cached_result_package): date_to = self.query_date_range.date_to() interval = self.query_date_range.interval_name diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 19a587245443d..bd7aba12364dd 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -104,6 +104,7 @@ def calculate(self): query=self.to_query(), team=self.team, timings=self.timings, + modifiers=self.modifiers, ) row = response.results[0] diff --git a/posthog/migrations/0364_team_external_data_workspace_rows.py b/posthog/migrations/0364_team_external_data_workspace_rows.py new file mode 100644 index 0000000000000..ec9478becd1c9 --- /dev/null +++ b/posthog/migrations/0364_team_external_data_workspace_rows.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.19 on 2023-11-07 20:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0363_add_replay_payload_capture_config"), + ] + + operations = [ + migrations.AddField( + model_name="team", + name="external_data_workspace_last_synced_at", + field=models.DateTimeField(blank=True, null=True), + ) + ] diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index 800b937d51f15..abd4e6c89920c 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Set, Tuple, Union import structlog from dateutil import parser @@ -468,3 +468,53 @@ def get_dependent_cohorts( continue return cohorts + + +def sort_cohorts_topologically(cohort_ids: Set[int], seen_cohorts_cache: Dict[str, Cohort]) -> List[int]: + """ + Sorts the given cohorts in an order where cohorts with no dependencies are placed first, + followed by cohorts that depend on the preceding ones. It ensures that each cohort in the sorted list + only depends on cohorts that appear earlier in the list. + """ + + if not cohort_ids: + return [] + + dependency_graph: Dict[int, List[int]] = {} + seen = set() + + # build graph (adjacency list) + def traverse(cohort): + # add parent + dependency_graph[cohort.id] = [] + for prop in cohort.properties.flat: + if prop.type == "cohort" and not isinstance(prop.value, list): + # add child + dependency_graph[cohort.id].append(int(prop.value)) + + neighbor_cohort = seen_cohorts_cache[str(prop.value)] + if cohort.id not in seen: + seen.add(cohort.id) + traverse(neighbor_cohort) + + for cohort_id in cohort_ids: + cohort = seen_cohorts_cache[str(cohort_id)] + traverse(cohort) + + # post-order DFS (children first, then the parent) + def dfs(node, seen, sorted_arr): + neighbors = dependency_graph.get(node, []) + for neighbor in neighbors: + if neighbor not in seen: + dfs(neighbor, seen, sorted_arr) + sorted_arr.append(int(node)) + seen.add(node) + + sorted_cohort_ids: List[int] = [] + seen = set() + for cohort_id in cohort_ids: + if cohort_id not in seen: + seen.add(cohort_id) + dfs(cohort_id, seen, sorted_cohort_ids) + + return sorted_cohort_ids diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index 97d5d3e1aace8..36379563aa7f7 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -260,8 +260,9 @@ def get_cohort_ids( self, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, + sort_by_topological_order=False, ) -> List[int]: - from posthog.models.cohort.util import get_dependent_cohorts + from posthog.models.cohort.util import get_dependent_cohorts, sort_cohorts_topologically if seen_cohorts_cache is None: seen_cohorts_cache = {} @@ -293,6 +294,8 @@ def get_cohort_ids( ) except Cohort.DoesNotExist: continue + if sort_by_topological_order: + return sort_cohorts_topologically(cohort_ids, seen_cohorts_cache) return list(cohort_ids) diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 648679a965912..c236a5d28a6fa 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -49,7 +49,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -106,7 +107,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -163,7 +165,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -220,7 +223,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -277,7 +281,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 4e1c7af79838c..869ba9f0f6e75 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -53,6 +53,7 @@ class OrganizationUsageResource(TypedDict): class OrganizationUsageInfo(TypedDict): events: Optional[OrganizationUsageResource] recordings: Optional[OrganizationUsageResource] + rows_synced: Optional[OrganizationUsageResource] period: Optional[List[str]] diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 2f5654e0f039a..d03799ad2343b 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -247,6 +247,7 @@ def aggregate_users_by_distinct_id(self) -> bool: event_properties_with_usage: models.JSONField = models.JSONField(default=list, blank=True) event_properties_numerical: models.JSONField = models.JSONField(default=list, blank=True) external_data_workspace_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) + external_data_workspace_last_synced_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) objects: TeamManager = TeamManager() diff --git a/posthog/ph_client.py b/posthog/ph_client.py new file mode 100644 index 0000000000000..e81161a59d470 --- /dev/null +++ b/posthog/ph_client.py @@ -0,0 +1,27 @@ +from posthog.utils import get_instance_region +from posthog.cloud_utils import is_cloud + + +def get_ph_client(): + from posthoganalytics import Posthog + + if not is_cloud(): + return + + # send EU data to EU, US data to US + api_key = None + host = None + region = get_instance_region() + if region == "EU": + api_key = "phc_dZ4GK1LRjhB97XozMSkEwPXx7OVANaJEwLErkY1phUF" + host = "https://eu.posthog.com" + elif region == "US": + api_key = "sTMFPsFhdP1Ssg" + host = "https://app.posthog.com" + + if not api_key: + return + + ph_client = Posthog(api_key, host=host) + + return ph_client diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py index 63b7024d3d6bf..2dfe50e24b7d9 100644 --- a/posthog/queries/test/test_trends.py +++ b/posthog/queries/test/test_trends.py @@ -474,14 +474,14 @@ def test_trends_breakdown_cumulative(self): self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") + self.assertEqual(response[0]["label"], "none") self.assertEqual(response[0]["labels"][4], "1-Jan-2020") self.assertEqual(response[0]["data"], [0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]) - self.assertEqual(response[1]["label"], "sign up - other_value") + self.assertEqual(response[1]["label"], "other_value") self.assertEqual(response[1]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0]) - self.assertEqual(response[2]["label"], "sign up - value") + self.assertEqual(response[2]["label"], "value") self.assertEqual(response[2]["data"], [0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]) def test_trends_single_aggregate_dau(self): @@ -919,13 +919,14 @@ def test_trends_breakdown_single_aggregate_cohorts(self): ) for result in event_response: - if result["label"] == "sign up - cohort1": + if result["label"] == "cohort1": self.assertEqual(result["aggregated_value"], 2) - elif result["label"] == "sign up - cohort2": + elif result["label"] == "cohort2": self.assertEqual(result["aggregated_value"], 2) - elif result["label"] == "sign up - cohort3": + elif result["label"] == "cohort3": self.assertEqual(result["aggregated_value"], 3) else: + self.assertEqual(result["label"], "all users") self.assertEqual(result["aggregated_value"], 7) def test_trends_breakdown_single_aggregate(self): @@ -3869,7 +3870,7 @@ def test_breakdown_by_empty_cohort(self): self.team, ) - self.assertEqual(event_response[0]["label"], "$pageview - all users") + self.assertEqual(event_response[0]["label"], "all users") self.assertEqual(sum(event_response[0]["data"]), 1) @also_test_with_person_on_events_v2 @@ -3935,15 +3936,15 @@ def test_breakdown_by_cohort(self): counts[res["label"]] = sum(res["data"]) break_val[res["label"]] = res["breakdown_value"] - self.assertEqual(counts["watched movie - cohort1"], 1) - self.assertEqual(counts["watched movie - cohort2"], 3) - self.assertEqual(counts["watched movie - cohort3"], 4) - self.assertEqual(counts["watched movie - all users"], 7) + self.assertEqual(counts["cohort1"], 1) + self.assertEqual(counts["cohort2"], 3) + self.assertEqual(counts["cohort3"], 4) + self.assertEqual(counts["all users"], 7) - self.assertEqual(break_val["watched movie - cohort1"], cohort.pk) - self.assertEqual(break_val["watched movie - cohort2"], cohort2.pk) - self.assertEqual(break_val["watched movie - cohort3"], cohort3.pk) - self.assertEqual(break_val["watched movie - all users"], "all") + self.assertEqual(break_val["cohort1"], cohort.pk) + self.assertEqual(break_val["cohort2"], cohort2.pk) + self.assertEqual(break_val["cohort3"], cohort3.pk) + self.assertEqual(break_val["all users"], "all") self.assertEntityResponseEqual(event_response, action_response) @@ -4085,7 +4086,7 @@ def test_breakdown_by_person_property(self): for response in event_response: if response["breakdown_value"] == "person1": self.assertEqual(response["count"], 1) - self.assertEqual(response["label"], "watched movie - person1") + self.assertEqual(response["label"], "person1") if response["breakdown_value"] == "person2": self.assertEqual(response["count"], 3) if response["breakdown_value"] == "person3": @@ -4126,7 +4127,7 @@ def test_breakdown_by_person_property_for_person_on_events(self): for response in event_response: if response["breakdown_value"] == "person1": self.assertEqual(response["count"], 1) - self.assertEqual(response["label"], "watched movie - person1") + self.assertEqual(response["label"], "person1") if response["breakdown_value"] == "person2": self.assertEqual(response["count"], 3) if response["breakdown_value"] == "person3": @@ -4666,9 +4667,9 @@ def test_trends_aggregate_by_distinct_id(self): self.team, ) self.assertEqual(daily_response[0]["data"][0], 2) - self.assertEqual(daily_response[0]["label"], "sign up - some_val") + self.assertEqual(daily_response[0]["label"], "some_val") self.assertEqual(daily_response[1]["data"][0], 1) - self.assertEqual(daily_response[1]["label"], "sign up - none") + self.assertEqual(daily_response[1]["label"], "none") # MAU with freeze_time("2019-12-31T13:00:01Z"): @@ -4809,8 +4810,8 @@ def test_breakdown_filtering(self): ) self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[2]["label"], "sign up - other_value") self.assertEqual(response[1]["label"], "sign up - value") + self.assertEqual(response[2]["label"], "sign up - other_value") self.assertEqual(response[3]["label"], "no events - none") self.assertEqual(sum(response[0]["data"]), 2) @@ -4869,9 +4870,9 @@ def test_breakdown_filtering_persons(self): ), self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[1]["label"], "sign up - test@gmail.com") - self.assertEqual(response[2]["label"], "sign up - test@posthog.com") + self.assertEqual(response[0]["label"], "none") + self.assertEqual(response[1]["label"], "test@gmail.com") + self.assertEqual(response[2]["label"], "test@posthog.com") self.assertEqual(response[0]["count"], 1) self.assertEqual(response[1]["count"], 1) @@ -4927,9 +4928,9 @@ def test_breakdown_filtering_persons_with_action_props(self): ), self.team, ) - self.assertEqual(response[0]["label"], "sign up - none") - self.assertEqual(response[1]["label"], "sign up - test@gmail.com") - self.assertEqual(response[2]["label"], "sign up - test@posthog.com") + self.assertEqual(response[0]["label"], "none") + self.assertEqual(response[1]["label"], "test@gmail.com") + self.assertEqual(response[2]["label"], "test@posthog.com") self.assertEqual(response[0]["count"], 1) self.assertEqual(response[1]["count"], 1) @@ -5003,8 +5004,8 @@ def test_breakdown_filtering_with_properties(self): ) response = sorted(response, key=lambda x: x["label"]) - self.assertEqual(response[0]["label"], "sign up - first url") - self.assertEqual(response[1]["label"], "sign up - second url") + self.assertEqual(response[0]["label"], "first url") + self.assertEqual(response[1]["label"], "second url") self.assertEqual(sum(response[0]["data"]), 1) self.assertEqual(response[0]["breakdown_value"], "first url") @@ -5086,7 +5087,7 @@ def test_breakdown_filtering_with_properties_in_new_format(self): ) response = sorted(response, key=lambda x: x["label"]) - self.assertEqual(response[0]["label"], "sign up - second url") + self.assertEqual(response[0]["label"], "second url") self.assertEqual(sum(response[0]["data"]), 1) self.assertEqual(response[0]["breakdown_value"], "second url") @@ -5170,8 +5171,8 @@ def test_mau_with_breakdown_filtering_and_prop_filter(self): self.team, ) - self.assertEqual(event_response[0]["label"], "sign up - some_val") - self.assertEqual(event_response[1]["label"], "sign up - some_val2") + self.assertEqual(event_response[0]["label"], "some_val") + self.assertEqual(event_response[1]["label"], "some_val2") self.assertEqual(sum(event_response[0]["data"]), 2) self.assertEqual(event_response[0]["data"][5], 1) @@ -5211,8 +5212,8 @@ def test_dau_with_breakdown_filtering(self): self.team, ) - self.assertEqual(event_response[1]["label"], "sign up - other_value") - self.assertEqual(event_response[2]["label"], "sign up - value") + self.assertEqual(event_response[1]["label"], "other_value") + self.assertEqual(event_response[2]["label"], "value") self.assertEqual(sum(event_response[1]["data"]), 1) self.assertEqual(event_response[1]["data"][5], 1) @@ -5256,8 +5257,8 @@ def test_dau_with_breakdown_filtering_with_sampling(self): self.team, ) - self.assertEqual(event_response[1]["label"], "sign up - other_value") - self.assertEqual(event_response[2]["label"], "sign up - value") + self.assertEqual(event_response[1]["label"], "other_value") + self.assertEqual(event_response[2]["label"], "value") self.assertEqual(sum(event_response[1]["data"]), 1) self.assertEqual(event_response[1]["data"][5], 1) @@ -5301,7 +5302,7 @@ def test_dau_with_breakdown_filtering_with_prop_filter(self): self.team, ) - self.assertEqual(event_response[0]["label"], "sign up - other_value") + self.assertEqual(event_response[0]["label"], "other_value") self.assertEqual(sum(event_response[0]["data"]), 1) self.assertEqual(event_response[0]["data"][5], 1) # property not defined diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py index e891190f6e310..458aabdc14198 100644 --- a/posthog/queries/trends/breakdown.py +++ b/posthog/queries/trends/breakdown.py @@ -676,7 +676,11 @@ def _breakdown_result_descriptors(self, breakdown_value, filter: Filter, entity: extra_label = self._determine_breakdown_label( breakdown_value, filter.breakdown_type, filter.breakdown, breakdown_value ) - label = "{} - {}".format(entity.name, extra_label) + if len(filter.entities) > 1: + # if there are multiple entities in the query, include the entity name in the labels + label = "{} - {}".format(entity.name, extra_label) + else: + label = extra_label additional_values = {"label": label} if filter.breakdown_type == "cohort": additional_values["breakdown_value"] = "all" if breakdown_value == ALL_USERS_COHORT_ID else breakdown_value diff --git a/posthog/schema.py b/posthog/schema.py index 3b86559f8fc78..a2057f903768f 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -3,6 +3,7 @@ from __future__ import annotations +from datetime import datetime from enum import Enum from typing import Any, Dict, List, Optional, Union @@ -255,6 +256,13 @@ class InCohortVia(str, Enum): subquery = "subquery" +class MaterializationMode(str, Enum): + auto = "auto" + legacy_null_as_string = "legacy_null_as_string" + legacy_null_as_null = "legacy_null_as_null" + disabled = "disabled" + + class PersonsArgMaxVersion(str, Enum): auto = "auto" v1 = "v1" @@ -273,6 +281,7 @@ class HogQLQueryModifiers(BaseModel): extra="forbid", ) inCohortVia: Optional[InCohortVia] = None + materializationMode: Optional[MaterializationMode] = None personsArgMaxVersion: Optional[PersonsArgMaxVersion] = None personsOnEventsMode: Optional[PersonsOnEventsMode] = None @@ -431,6 +440,23 @@ class PropertyOperator(str, Enum): max = "max" +class QueryStatus(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + complete: Optional[bool] = False + end_time: Optional[datetime] = None + error: Optional[bool] = False + error_message: Optional[str] = "" + expiration_time: Optional[datetime] = None + id: str + query_async: Optional[bool] = True + results: Optional[Any] = None + start_time: Optional[datetime] = None + task_id: Optional[str] = None + team_id: int + + class QueryTiming(BaseModel): model_config = ConfigDict( extra="forbid", @@ -467,57 +493,6 @@ class RetentionType(str, Enum): retention_first_time = "retention_first_time" -class SavedInsightNode(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - allowSorting: Optional[bool] = Field( - default=None, description="Can the user click on column headers to sort the table? (default: true)" - ) - embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component") - expandable: Optional[bool] = Field( - default=None, description="Can expand row to show raw event data (default: true)" - ) - full: Optional[bool] = Field( - default=None, description="Show with most visual options enabled. Used in insight scene." - ) - hidePersonsModal: Optional[bool] = None - kind: Literal["SavedInsightNode"] = "SavedInsightNode" - propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)") - shortId: str - showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row") - showColumnConfigurator: Optional[bool] = Field( - default=None, description="Show a button to configure the table's columns if possible" - ) - showCorrelationTable: Optional[bool] = None - showDateRange: Optional[bool] = Field(default=None, description="Show date range selector") - showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query") - showEventFilter: Optional[bool] = Field( - default=None, description="Include an event filter above the table (EventsNode only)" - ) - showExport: Optional[bool] = Field(default=None, description="Show the export button") - showFilters: Optional[bool] = None - showHeader: Optional[bool] = None - showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables") - showLastComputation: Optional[bool] = None - showLastComputationRefresh: Optional[bool] = None - showOpenEditorButton: Optional[bool] = Field( - default=None, description="Show a button to open the current query as a new insight. (default: true)" - ) - showPersistentColumnConfigurator: Optional[bool] = Field( - default=None, description="Show a button to configure and persist the table's default columns if possible" - ) - showPropertyFilter: Optional[bool] = Field(default=None, description="Include a property filter above the table") - showReload: Optional[bool] = Field(default=None, description="Show a reload button") - showResults: Optional[bool] = None - showResultsTable: Optional[bool] = Field(default=None, description="Show a results table") - showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries") - showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)") - showTable: Optional[bool] = None - showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown") - suppressSessionAnalysisWarning: Optional[bool] = None - - class SessionPropertyFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -614,6 +589,22 @@ class TrendsQueryResponse(BaseModel): timings: Optional[List[QueryTiming]] = None +class RETENTION(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + hideLineGraph: Optional[bool] = None + hideSizeColumn: Optional[bool] = None + useSmallLayout: Optional[bool] = None + + +class VizSpecificOptions(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + RETENTION: Optional[RETENTION] = None + + class Kind(str, Enum): unit = "unit" duration_s = "duration_s" @@ -922,6 +913,58 @@ class RetentionFilter(BaseModel): total_intervals: Optional[float] = None +class SavedInsightNode(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + allowSorting: Optional[bool] = Field( + default=None, description="Can the user click on column headers to sort the table? (default: true)" + ) + embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component") + expandable: Optional[bool] = Field( + default=None, description="Can expand row to show raw event data (default: true)" + ) + full: Optional[bool] = Field( + default=None, description="Show with most visual options enabled. Used in insight scene." + ) + hidePersonsModal: Optional[bool] = None + kind: Literal["SavedInsightNode"] = "SavedInsightNode" + propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)") + shortId: str + showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row") + showColumnConfigurator: Optional[bool] = Field( + default=None, description="Show a button to configure the table's columns if possible" + ) + showCorrelationTable: Optional[bool] = None + showDateRange: Optional[bool] = Field(default=None, description="Show date range selector") + showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query") + showEventFilter: Optional[bool] = Field( + default=None, description="Include an event filter above the table (EventsNode only)" + ) + showExport: Optional[bool] = Field(default=None, description="Show the export button") + showFilters: Optional[bool] = None + showHeader: Optional[bool] = None + showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables") + showLastComputation: Optional[bool] = None + showLastComputationRefresh: Optional[bool] = None + showOpenEditorButton: Optional[bool] = Field( + default=None, description="Show a button to open the current query as a new insight. (default: true)" + ) + showPersistentColumnConfigurator: Optional[bool] = Field( + default=None, description="Show a button to configure and persist the table's default columns if possible" + ) + showPropertyFilter: Optional[bool] = Field(default=None, description="Include a property filter above the table") + showReload: Optional[bool] = Field(default=None, description="Show a reload button") + showResults: Optional[bool] = None + showResultsTable: Optional[bool] = Field(default=None, description="Show a results table") + showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries") + showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)") + showTable: Optional[bool] = None + showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown") + suppressSessionAnalysisWarning: Optional[bool] = None + vizSpecificOptions: Optional[VizSpecificOptions] = None + + class SessionsTimelineQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -1439,7 +1482,6 @@ class StickinessQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) - aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation") dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query") filterTestAccounts: Optional[bool] = Field( default=None, description="Exclude internal and test users by applying the respective filters" @@ -1637,7 +1679,6 @@ class LifecycleQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) - aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation") dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query") filterTestAccounts: Optional[bool] = Field( default=None, description="Exclude internal and test users by applying the respective filters" @@ -1732,6 +1773,7 @@ class InsightVizNode(BaseModel): showTable: Optional[bool] = None source: Union[TrendsQuery, FunnelsQuery, RetentionQuery, PathsQuery, StickinessQuery, LifecycleQuery] suppressSessionAnalysisWarning: Optional[bool] = None + vizSpecificOptions: Optional[VizSpecificOptions] = None class InsightPersonsQuery(BaseModel): diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index 883b68b6b75f1..0afe5e0ac247d 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -49,7 +49,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -106,7 +107,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -163,7 +165,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -220,7 +223,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -277,7 +281,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -356,7 +361,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -487,7 +493,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -681,7 +688,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -753,7 +761,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -810,7 +819,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -867,7 +877,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -924,7 +935,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -981,7 +993,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -1038,7 +1051,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -1117,7 +1131,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -1185,7 +1200,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -1264,7 +1280,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -1540,7 +1557,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -1619,7 +1637,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -1908,7 +1927,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -1987,7 +2007,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -2234,7 +2255,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -2321,7 +2343,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -2400,7 +2423,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -2700,7 +2724,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -2779,7 +2804,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -2829,7 +2855,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -3545,7 +3572,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -3624,7 +3652,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -3892,7 +3921,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -3982,7 +4012,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -4252,7 +4283,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -4331,7 +4363,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ @@ -4614,7 +4647,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -4693,7 +4727,8 @@ "posthog_team"."extra_settings", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 622798774ec1d..8f6fffd0c9f90 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -7,7 +7,7 @@ from django.http import QueryDict from sentry_sdk import capture_exception, push_scope -from posthog.api.query import process_query +from posthog.api.services.query import process_query from posthog.jwt import PosthogJwtAudience, encode_jwt from posthog.models.exported_asset import ExportedAsset, save_content from posthog.utils import absolute_uri diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr index 74f71be82a5cc..9cabd193acff2 100644 --- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr +++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr @@ -255,6 +255,24 @@ GROUP BY team_id ' --- +# name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.24 + ' + + SELECT team, + sum(rows_synced) + FROM + (SELECT JSONExtractString(properties, 'job_id') AS job_id, + distinct_id AS team, + any(JSONExtractInt(properties, 'count')) AS rows_synced + FROM events + WHERE team_id = 2 + AND event = 'external data sync job' + AND parseDateTimeBestEffort(JSONExtractString(properties, 'startTime')) BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' + GROUP BY job_id, + team) + GROUP BY team + ' +--- # name: TestFeatureFlagsUsageReport.test_usage_report_decide_requests.3 ' diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index 715c3829855d2..a10a16e17893a 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -399,6 +399,7 @@ def _test_usage_report(self) -> List[dict]: "event_explorer_api_bytes_read": 0, "event_explorer_api_rows_read": 0, "event_explorer_api_duration_ms": 0, + "rows_synced_in_period": 0, "date": "2022-01-09", "organization_id": str(self.organization.id), "organization_name": "Test", @@ -440,6 +441,7 @@ def _test_usage_report(self) -> List[dict]: "event_explorer_api_bytes_read": 0, "event_explorer_api_rows_read": 0, "event_explorer_api_duration_ms": 0, + "rows_synced_in_period": 0, }, str(self.org_1_team_2.id): { "event_count_lifetime": 11, @@ -475,6 +477,7 @@ def _test_usage_report(self) -> List[dict]: "event_explorer_api_bytes_read": 0, "event_explorer_api_rows_read": 0, "event_explorer_api_duration_ms": 0, + "rows_synced_in_period": 0, }, }, }, @@ -533,6 +536,7 @@ def _test_usage_report(self) -> List[dict]: "event_explorer_api_bytes_read": 0, "event_explorer_api_rows_read": 0, "event_explorer_api_duration_ms": 0, + "rows_synced_in_period": 0, "date": "2022-01-09", "organization_id": str(self.org_2.id), "organization_name": "Org 2", @@ -574,6 +578,7 @@ def _test_usage_report(self) -> List[dict]: "event_explorer_api_bytes_read": 0, "event_explorer_api_rows_read": 0, "event_explorer_api_duration_ms": 0, + "rows_synced_in_period": 0, } }, }, @@ -980,6 +985,95 @@ def test_usage_report_survey_responses(self, billing_task_mock: MagicMock, posth assert org_2_report["teams"]["5"]["survey_responses_count_in_month"] == 7 +@freeze_time("2022-01-10T00:01:00Z") +class TestExternalDataSyncUsageReport(ClickhouseDestroyTablesMixin, TestCase, ClickhouseTestMixin): + def setUp(self) -> None: + Team.objects.all().delete() + return super().setUp() + + def _setup_teams(self) -> None: + self.analytics_org = Organization.objects.create(name="PostHog") + self.org_1 = Organization.objects.create(name="Org 1") + self.org_2 = Organization.objects.create(name="Org 2") + + self.analytics_team = Team.objects.create(pk=2, organization=self.analytics_org, name="Analytics") + + self.org_1_team_1 = Team.objects.create(pk=3, organization=self.org_1, name="Team 1 org 1") + self.org_1_team_2 = Team.objects.create(pk=4, organization=self.org_1, name="Team 2 org 1") + self.org_2_team_3 = Team.objects.create(pk=5, organization=self.org_2, name="Team 3 org 2") + + @patch("posthog.tasks.usage_report.Client") + @patch("posthog.tasks.usage_report.send_report_to_billing_service") + def test_external_data_rows_synced_response( + self, billing_task_mock: MagicMock, posthog_capture_mock: MagicMock + ) -> None: + self._setup_teams() + + for i in range(5): + start_time = (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ") + _create_event( + distinct_id="3", + event="external data sync job", + properties={ + "count": 10, + "job_id": 10924, + "startTime": start_time, + }, + timestamp=now() - relativedelta(hours=i), + team=self.analytics_team, + ) + # identical job id should be deduped and not counted + _create_event( + distinct_id="3", + event="external data sync job", + properties={ + "count": 10, + "job_id": 10924, + "startTime": start_time, + }, + timestamp=now() - relativedelta(hours=i, minutes=i), + team=self.analytics_team, + ) + + for i in range(5): + _create_event( + distinct_id="4", + event="external data sync job", + properties={ + "count": 10, + "job_id": 10924, + "startTime": (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ"), + }, + timestamp=now() - relativedelta(hours=i), + team=self.analytics_team, + ) + + flush_persons_and_events() + + period = get_previous_day(at=now() + relativedelta(days=1)) + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) + + assert len(all_reports) == 3 + + org_1_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period)) + ) + + org_2_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period)) + ) + + assert org_1_report["organization_name"] == "Org 1" + assert org_1_report["rows_synced_in_period"] == 20 + + assert org_1_report["teams"]["3"]["rows_synced_in_period"] == 10 + assert org_1_report["teams"]["4"]["rows_synced_in_period"] == 10 + + assert org_2_report["organization_name"] == "Org 2" + assert org_2_report["rows_synced_in_period"] == 0 + + class SendUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest): def setUp(self) -> None: super().setUp() @@ -1039,6 +1133,10 @@ def _usage_report_response(self) -> Any: "usage": 1000, "limit": None, }, + "rows_synced": { + "usage": 1000, + "limit": None, + }, }, } } @@ -1185,6 +1283,7 @@ def test_org_usage_updated_correctly(self, mock_post: MagicMock, mock_client: Ma assert self.team.organization.usage == { "events": {"limit": None, "usage": 10000, "todays_usage": 0}, "recordings": {"limit": None, "usage": 1000, "todays_usage": 0}, + "rows_synced": {"limit": None, "usage": 1000, "todays_usage": 0}, "period": ["2021-10-01T00:00:00Z", "2021-10-31T00:00:00Z"], } diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py new file mode 100644 index 0000000000000..20b669b754995 --- /dev/null +++ b/posthog/tasks/test/test_warehouse.py @@ -0,0 +1,167 @@ +from posthog.test.base import APIBaseTest +import datetime +from unittest.mock import patch, MagicMock +from posthog.tasks.warehouse import ( + _traverse_jobs_by_field, + capture_workspace_rows_synced_by_team, + check_external_data_source_billing_limit_by_team, +) +from posthog.warehouse.models import ExternalDataSource +from freezegun import freeze_time + + +class TestWarehouse(APIBaseTest): + @patch("posthog.tasks.warehouse.send_request") + @freeze_time("2023-11-07") + def test_traverse_jobs_by_field(self, send_request_mock: MagicMock) -> None: + send_request_mock.return_value = { + "data": [ + { + "jobId": 5827835, + "status": "succeeded", + "jobType": "sync", + "startTime": "2023-11-07T16:50:49Z", + "connectionId": "fake", + "lastUpdatedAt": "2023-11-07T16:52:54Z", + "duration": "PT2M5S", + "rowsSynced": 93353, + }, + { + "jobId": 5783573, + "status": "succeeded", + "jobType": "sync", + "startTime": "2023-11-05T18:32:41Z", + "connectionId": "fake-2", + "lastUpdatedAt": "2023-11-05T18:35:11Z", + "duration": "PT2M30S", + "rowsSynced": 97747, + }, + ] + } + mock_capture = MagicMock() + response = _traverse_jobs_by_field(mock_capture, self.team, "fake-url", "rowsSynced") + + self.assertEqual( + response, + [ + {"count": 93353, "startTime": "2023-11-07T16:50:49Z"}, + {"count": 97747, "startTime": "2023-11-05T18:32:41Z"}, + ], + ) + + self.assertEqual(mock_capture.capture.call_count, 2) + mock_capture.capture.assert_called_with( + self.team.pk, + "external data sync job", + { + "count": 97747, + "workspace_id": self.team.external_data_workspace_id, + "team_id": self.team.pk, + "team_uuid": self.team.uuid, + "startTime": "2023-11-05T18:32:41Z", + "job_id": "5783573", + }, + ) + + @patch("posthog.tasks.warehouse._traverse_jobs_by_field") + @patch("posthog.tasks.warehouse.get_ph_client") + @freeze_time("2023-11-07") + def test_capture_workspace_rows_synced_by_team( + self, mock_capture: MagicMock, traverse_jobs_mock: MagicMock + ) -> None: + traverse_jobs_mock.return_value = [ + {"count": 97747, "startTime": "2023-11-05T18:32:41Z"}, + {"count": 93353, "startTime": "2023-11-07T16:50:49Z"}, + ] + + capture_workspace_rows_synced_by_team(self.team.pk) + + self.team.refresh_from_db() + self.assertEqual( + self.team.external_data_workspace_last_synced_at, + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), + ) + + @patch("posthog.tasks.warehouse._traverse_jobs_by_field") + @patch("posthog.tasks.warehouse.get_ph_client") + @freeze_time("2023-11-07") + def test_capture_workspace_rows_synced_by_team_month_cutoff( + self, mock_capture: MagicMock, traverse_jobs_mock: MagicMock + ) -> None: + # external_data_workspace_last_synced_at unset + traverse_jobs_mock.return_value = [ + {"count": 93353, "startTime": "2023-11-07T16:50:49Z"}, + ] + + capture_workspace_rows_synced_by_team(self.team.pk) + + self.team.refresh_from_db() + self.assertEqual( + self.team.external_data_workspace_last_synced_at, + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), + ) + + @patch("posthog.tasks.warehouse._traverse_jobs_by_field") + @patch("posthog.tasks.warehouse.get_ph_client") + @freeze_time("2023-11-07") + def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set( + self, mock_capture: MagicMock, traverse_jobs_mock: MagicMock + ) -> None: + self.team.external_data_workspace_last_synced_at = datetime.datetime( + 2023, 10, 29, 18, 32, 41, tzinfo=datetime.timezone.utc + ) + self.team.save() + traverse_jobs_mock.return_value = [ + {"count": 97747, "startTime": "2023-10-30T18:32:41Z"}, + {"count": 93353, "startTime": "2023-11-07T16:50:49Z"}, + ] + + capture_workspace_rows_synced_by_team(self.team.pk) + + self.team.refresh_from_db() + self.assertEqual( + self.team.external_data_workspace_last_synced_at, + datetime.datetime(2023, 11, 7, 16, 50, 49, tzinfo=datetime.timezone.utc), + ) + + @patch("posthog.warehouse.external_data_source.connection.send_request") + @patch("ee.billing.quota_limiting.list_limited_team_attributes") + def test_external_data_source_billing_limit_deactivate( + self, usage_limit_mock: MagicMock, send_request_mock: MagicMock + ) -> None: + usage_limit_mock.return_value = [self.team.pk] + + external_source = ExternalDataSource.objects.create( + source_id="test_id", + connection_id="fake connectino_id", + destination_id="fake destination_id", + team=self.team, + status="running", + source_type="Stripe", + ) + + check_external_data_source_billing_limit_by_team(self.team.pk) + + external_source.refresh_from_db() + self.assertEqual(external_source.status, "inactive") + + @patch("posthog.warehouse.external_data_source.connection.send_request") + @patch("ee.billing.quota_limiting.list_limited_team_attributes") + def test_external_data_source_billing_limit_activate( + self, usage_limit_mock: MagicMock, send_request_mock: MagicMock + ) -> None: + usage_limit_mock.return_value = [] + + external_source = ExternalDataSource.objects.create( + source_id="test_id", + connection_id="fake connectino_id", + destination_id="fake destination_id", + team=self.team, + status="inactive", + source_type="Stripe", + ) + + check_external_data_source_billing_limit_by_team(self.team.pk) + + external_source.refresh_from_db() + self.assertEqual(external_source.status, "running") diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index a9a06ecbff7c5..3e8d4907d4f3f 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -16,7 +16,6 @@ ) import requests -from retry import retry import structlog from dateutil import parser from django.conf import settings @@ -24,6 +23,7 @@ from django.db.models import Count, Q from posthoganalytics.client import Client from psycopg2 import sql +from retry import retry from sentry_sdk import capture_exception from posthog import version_requirement @@ -110,6 +110,8 @@ class UsageReportCounters: # Surveys survey_responses_count_in_period: int survey_responses_count_in_month: int + # Data Warehouse + rows_synced_in_period: int # Instance metadata to be included in oveall report @@ -591,6 +593,34 @@ def get_teams_with_survey_responses_count_in_period( return results +@timed_log() +@retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) +def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> List[Tuple[int, int]]: + team_to_query = 1 if get_instance_region() == "EU" else 2 + + # dedup by job id incase there were duplicates sent + results = sync_execute( + """ + SELECT team, sum(rows_synced) FROM ( + SELECT JSONExtractString(properties, 'job_id') AS job_id, distinct_id AS team, any(JSONExtractInt(properties, 'count')) AS rows_synced + FROM events + WHERE team_id = %(team_to_query)s AND event = 'external data sync job' AND parseDateTimeBestEffort(JSONExtractString(properties, 'startTime')) BETWEEN %(begin)s AND %(end)s + GROUP BY job_id, team + ) + GROUP BY team + """, + { + "begin": begin, + "end": end, + "team_to_query": team_to_query, + }, + workload=Workload.OFFLINE, + settings=CH_BILLING_SETTINGS, + ) + + return results + + @app.task(ignore_result=True, max_retries=0) def capture_report( capture_event_name: str, @@ -784,6 +814,7 @@ def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[st teams_with_survey_responses_count_in_month=get_teams_with_survey_responses_count_in_period( period_start.replace(day=1), period_end ), + teams_with_rows_synced_in_period=get_teams_with_rows_synced_in_period(period_start, period_end), ) @@ -854,6 +885,7 @@ def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounter event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0), survey_responses_count_in_period=all_data["teams_with_survey_responses_count_in_period"].get(team.id, 0), survey_responses_count_in_month=all_data["teams_with_survey_responses_count_in_month"].get(team.id, 0), + rows_synced_in_period=all_data["teams_with_rows_synced_in_period"].get(team.id, 0), ) diff --git a/posthog/tasks/warehouse.py b/posthog/tasks/warehouse.py new file mode 100644 index 0000000000000..2450251830c59 --- /dev/null +++ b/posthog/tasks/warehouse.py @@ -0,0 +1,167 @@ +from django.conf import settings +import datetime +from posthog.models import Team +from posthog.warehouse.external_data_source.client import send_request +from posthog.warehouse.models.external_data_source import ExternalDataSource +from posthog.warehouse.models import DataWarehouseCredential, DataWarehouseTable +from posthog.warehouse.external_data_source.connection import retrieve_sync +from urllib.parse import urlencode +from posthog.ph_client import get_ph_client +from typing import Any, Dict, List, TYPE_CHECKING +from posthog.celery import app +import structlog + +logger = structlog.get_logger(__name__) + +AIRBYTE_JOBS_URL = "https://api.airbyte.com/v1/jobs" +DEFAULT_DATE_TIME = datetime.datetime(2023, 11, 7, tzinfo=datetime.timezone.utc) + +if TYPE_CHECKING: + from posthoganalytics import Posthog + + +def sync_resources() -> None: + resources = ExternalDataSource.objects.filter(are_tables_created=False, status__in=["running", "error"]) + + for resource in resources: + sync_resource.delay(resource.pk) + + +@app.task(ignore_result=True) +def sync_resource(resource_id: str) -> None: + resource = ExternalDataSource.objects.get(pk=resource_id) + + try: + job = retrieve_sync(resource.connection_id) + except Exception as e: + logger.exception("Data Warehouse: Sync Resource failed with an unexpected exception.", exc_info=e) + resource.status = "error" + resource.save() + return + + if job is None: + logger.error(f"Data Warehouse: No jobs found for connection: {resource.connection_id}") + resource.status = "error" + resource.save() + return + + if job["status"] == "succeeded": + resource = ExternalDataSource.objects.get(pk=resource_id) + credential, _ = DataWarehouseCredential.objects.get_or_create( + team_id=resource.team.pk, + access_key=settings.AIRBYTE_BUCKET_KEY, + access_secret=settings.AIRBYTE_BUCKET_SECRET, + ) + + data = { + "credential": credential, + "name": "stripe_customers", + "format": "Parquet", + "url_pattern": f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/airbyte/{resource.team.pk}/customers/*.parquet", + "team_id": resource.team.pk, + } + + table = DataWarehouseTable(**data) + try: + table.columns = table.get_columns() + except Exception as e: + logger.exception( + f"Data Warehouse: Sync Resource failed with an unexpected exception for connection: {resource.connection_id}", + exc_info=e, + ) + else: + table.save() + + resource.are_tables_created = True + resource.status = job["status"] + resource.save() + + else: + resource.status = job["status"] + resource.save() + + +DEFAULT_USAGE_LIMIT = 1000000 +ROWS_PER_DOLLAR = 66666 # 1 million rows per $15 + + +@app.task(ignore_result=True, max_retries=2) +def check_external_data_source_billing_limit_by_team(team_id: int) -> None: + from posthog.warehouse.external_data_source.connection import deactivate_connection_by_id, activate_connection_by_id + from ee.billing.quota_limiting import list_limited_team_attributes, QuotaResource + + limited_teams_rows_synced = list_limited_team_attributes(QuotaResource.ROWS_SYNCED) + + team = Team.objects.get(pk=team_id) + all_active_connections = ExternalDataSource.objects.filter(team=team, status__in=["running", "succeeded"]) + all_inactive_connections = ExternalDataSource.objects.filter(team=team, status="inactive") + + # TODO: consider more boundaries + if team_id in limited_teams_rows_synced: + for connection in all_active_connections: + deactivate_connection_by_id(connection.connection_id) + connection.status = "inactive" + connection.save() + else: + for connection in all_inactive_connections: + activate_connection_by_id(connection.connection_id) + connection.status = "running" + connection.save() + + +@app.task(ignore_result=True, max_retries=2) +def capture_workspace_rows_synced_by_team(team_id: int) -> None: + ph_client = get_ph_client() + team = Team.objects.get(pk=team_id) + now = datetime.datetime.now(datetime.timezone.utc) + begin = team.external_data_workspace_last_synced_at or DEFAULT_DATE_TIME + + params = { + "workspaceIds": team.external_data_workspace_id, + "limit": 100, + "offset": 0, + "status": "succeeded", + "orderBy": "createdAt|ASC", + "updatedAtStart": begin.strftime("%Y-%m-%dT%H:%M:%SZ"), + "updatedAtEnd": now.strftime("%Y-%m-%dT%H:%M:%SZ"), + } + result_totals = _traverse_jobs_by_field(ph_client, team, AIRBYTE_JOBS_URL + "?" + urlencode(params), "rowsSynced") + + # TODO: check assumption that ordering is possible with API + team.external_data_workspace_last_synced_at = result_totals[-1]["startTime"] if result_totals else now + team.save() + + ph_client.shutdown() + + +def _traverse_jobs_by_field( + ph_client: "Posthog", team: Team, url: str, field: str, acc: List[Dict[str, Any]] = [] +) -> List[Dict[str, Any]]: + response = send_request(url, method="GET") + response_data = response.get("data", []) + response_next = response.get("next", None) + + for job in response_data: + acc.append( + { + "count": job[field], + "startTime": job["startTime"], + } + ) + ph_client.capture( + team.pk, + "external data sync job", + { + "count": job[field], + "workspace_id": team.external_data_workspace_id, + "team_id": team.pk, + "team_uuid": team.uuid, + "startTime": job["startTime"], + "job_id": str(job["jobId"]), + }, + ) + + if response_next: + return _traverse_jobs_by_field(ph_client, team, response_next, field, acc) + + return acc diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py index 176b487ff94a0..cea71a458013f 100644 --- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py @@ -26,6 +26,7 @@ RedshiftBatchExportWorkflow, RedshiftInsertInputs, insert_into_redshift_activity, + remove_escaped_whitespace_recursive, ) REQUIRED_ENV_VARS = ( @@ -63,14 +64,14 @@ async def assert_events_in_redshift(connection, schema, table_name, events, excl if exclude_events is not None and event_name in exclude_events: continue - properties = event.get("properties", None) - elements_chain = event.get("elements_chain", None) + raw_properties = event.get("properties", None) + properties = remove_escaped_whitespace_recursive(raw_properties) if raw_properties else None expected_event = { "distinct_id": event.get("distinct_id"), - "elements": json.dumps(elements_chain) if elements_chain else None, + "elements": "", "event": event_name, "ip": properties.get("$ip", None) if properties else None, - "properties": json.dumps(properties) if properties else None, + "properties": json.dumps(properties, ensure_ascii=False) if properties else None, "set": properties.get("$set", None) if properties else None, "set_once": properties.get("$set_once", None) if properties else None, # Kept for backwards compatibility, but not exported anymore. @@ -114,7 +115,7 @@ def redshift_config(): return { "user": user, "password": password, - "database": "dev", + "database": "posthog_batch_exports_test_2", "schema": "exports_test_schema", "host": host, "port": int(port), @@ -124,7 +125,10 @@ def redshift_config(): @pytest.fixture def postgres_config(redshift_config): """We shadow this name so that setup_postgres_test_db works with Redshift.""" - return redshift_config + psycopg._encodings._py_codecs["UNICODE"] = "utf-8" + psycopg._encodings.py_codecs.update((k.encode(), v) for k, v in psycopg._encodings._py_codecs.items()) + + yield redshift_config @pytest_asyncio.fixture @@ -137,6 +141,7 @@ async def psycopg_connection(redshift_config, setup_postgres_test_db): host=redshift_config["host"], port=redshift_config["port"], ) + connection.prepare_threshold = None yield connection @@ -176,7 +181,14 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( count_outside_range=10, count_other_team=10, duplicate=True, - properties={"$browser": "Chrome", "$os": "Mac OS X"}, + properties={ + "$browser": "Chrome", + "$os": "Mac OS X", + "whitespace": "hi\t\n\r\f\bhi", + "nested_whitespace": {"whitespace": "hi\t\n\r\f\bhi"}, + "sequence": {"mucho_whitespace": ["hi", "hi\t\n\r\f\bhi", "hi\t\n\r\f\bhi", "hi"]}, + "multi-byte": "é", + }, person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, ) @@ -344,3 +356,20 @@ async def test_redshift_export_workflow( events=events, exclude_events=exclude_events, ) + + +@pytest.mark.parametrize( + "value,expected", + [ + ([1, 2, 3], [1, 2, 3]), + ("hi\t\n\r\f\bhi", "hi hi"), + ([["\t\n\r\f\b"]], [[""]]), + (("\t\n\r\f\b",), ("",)), + ({"\t\n\r\f\b"}, {""}), + ({"key": "\t\n\r\f\b"}, {"key": ""}), + ({"key": ["\t\n\r\f\b"]}, {"key": [""]}), + ], +) +def test_remove_escaped_whitespace_recursive(value, expected): + """Test we remove some whitespace values.""" + assert remove_escaped_whitespace_recursive(value) == expected diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 53f8d5f855e5d..9561a8bf2ea35 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -2,13 +2,17 @@ import datetime as dt import gzip import json +import os +import random import re +import unittest.mock from collections import deque from uuid import uuid4 import pytest import pytest_asyncio import responses +import snowflake.connector from django.conf import settings from django.test import override_settings from requests.models import PreparedRequest @@ -19,7 +23,6 @@ from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker -from posthog.temporal.tests.utils.datetimes import to_isoformat from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import acreate_batch_export, adelete_batch_export, afetch_batch_export_runs from posthog.temporal.workflows.batch_exports import ( @@ -36,6 +39,92 @@ pytestmark = [pytest.mark.asyncio, pytest.mark.django_db] +class FakeSnowflakeCursor: + """A fake Snowflake cursor that can fail on PUT and COPY queries.""" + + def __init__(self, *args, failure_mode: str | None = None, **kwargs): + self._execute_calls = [] + self._execute_async_calls = [] + self._sfqid = 1 + self._fail = failure_mode + + @property + def sfqid(self): + current = self._sfqid + self._sfqid += 1 + return current + + def execute(self, query, params=None, file_stream=None): + self._execute_calls.append({"query": query, "params": params, "file_stream": file_stream}) + + def execute_async(self, query, params=None, file_stream=None): + self._execute_async_calls.append({"query": query, "params": params, "file_stream": file_stream}) + + def get_results_from_sfqid(self, query_id): + pass + + def fetchone(self): + if self._fail == "put": + return ( + "test", + "test.gz", + 456, + 0, + "NONE", + "GZIP", + "FAILED", + "Some error on put", + ) + else: + return ( + "test", + "test.gz", + 456, + 0, + "NONE", + "GZIP", + "UPLOADED", + None, + ) + + def fetchall(self): + if self._fail == "copy": + return [("test", "LOAD FAILED", 100, 99, 1, 1, "Some error on copy", 3)] + else: + return [("test", "LOADED", 100, 99, 1, 1, "Some error on copy", 3)] + + +class FakeSnowflakeConnection: + def __init__( + self, + *args, + failure_mode: str | None = None, + **kwargs, + ): + self._cursors = [] + self._is_running = True + self.failure_mode = failure_mode + + def cursor(self) -> FakeSnowflakeCursor: + cursor = FakeSnowflakeCursor(failure_mode=self.failure_mode) + self._cursors.append(cursor) + return cursor + + def get_query_status_throw_if_error(self, query_id): + return snowflake.connector.constants.QueryStatus.SUCCESS + + def is_still_running(self, status): + current_status = self._is_running + self._is_running = not current_status + return current_status + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs): + pass + + def contains_queries_in_order(queries: list[str], *queries_to_find: str): """Check if a list of queries contains a list of queries in order.""" # We use a deque to pop the queries we find off the list of queries to @@ -204,21 +293,52 @@ def query_request_handler(request: PreparedRequest): return queries, staged_files +@pytest.fixture +def database(): + """Generate a unique database name for tests.""" + return f"test_batch_exports_{uuid4()}" + + +@pytest.fixture +def schema(): + """Generate a unique schema name for tests.""" + return f"test_batch_exports_{uuid4()}" + + +@pytest.fixture +def table_name(ateam, interval): + return f"test_workflow_table_{ateam.pk}_{interval}" + + +@pytest.fixture +def snowflake_config(database, schema) -> dict[str, str]: + """Return a Snowflake configuration dictionary to use in tests. + + We set default configuration values to support tests against the Snowflake API + and tests that mock it. + """ + password = os.getenv("SNOWFLAKE_PASSWORD", "password") + warehouse = os.getenv("SNOWFLAKE_WAREHOUSE", "COMPUTE_WH") + account = os.getenv("SNOWFLAKE_ACCOUNT", "account") + username = os.getenv("SNOWFLAKE_USERNAME", "hazzadous") + + return { + "password": password, + "user": username, + "warehouse": warehouse, + "account": account, + "database": database, + "schema": schema, + } + + @pytest_asyncio.fixture -async def snowflake_batch_export(ateam, interval, temporal_client): +async def snowflake_batch_export(ateam, table_name, snowflake_config, interval, exclude_events, temporal_client): + """Manage BatchExport model (and associated Temporal Schedule) for tests""" destination_data = { "type": "Snowflake", - "config": { - "user": "hazzadous", - "password": "password", - "account": "account", - "database": "PostHog", - "schema": "test", - "warehouse": "COMPUTE_WH", - "table_name": "events", - }, + "config": {**snowflake_config, "table_name": table_name, "exclude_events": exclude_events}, } - batch_export_data = { "name": "my-production-snowflake-export", "destination": destination_data, @@ -238,7 +358,9 @@ async def snowflake_batch_export(ateam, interval, temporal_client): @pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) -async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client, snowflake_batch_export, interval): +async def test_snowflake_export_workflow_exports_events( + ateam, clickhouse_client, database, schema, snowflake_batch_export, interval, table_name +): """Test that the whole workflow not just the activity works. It should update the batch export run status to completed, as well as updating the record @@ -247,7 +369,7 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta - (events, _, _) = await generate_test_events_in_clickhouse( + await generate_test_events_in_clickhouse( client=clickhouse_client, team_id=ateam.pk, start_time=data_interval_start, @@ -281,10 +403,12 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - queries, staged_files = add_mock_snowflake_api(rsps) + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + ) as mock, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + fake_conn = FakeSnowflakeConnection() + mock.return_value = fake_conn + await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, inputs, @@ -294,49 +418,27 @@ async def test_snowflake_export_workflow_exports_events(ateam, clickhouse_client retry_policy=RetryPolicy(maximum_attempts=1), ) - assert contains_queries_in_order( - queries, - 'USE DATABASE "PostHog"', - 'USE SCHEMA "test"', - 'CREATE TABLE IF NOT EXISTS "PostHog"."test"."events"', - # NOTE: we check that we at least have two PUT queries to - # ensure we hit the multi file upload code path - 'PUT file://.* @%"events"', - 'PUT file://.* @%"events"', - 'COPY INTO "events"', - ) + execute_calls = [] + for cursor in fake_conn._cursors: + for call in cursor._execute_calls: + execute_calls.append(call["query"]) - staged_data = "\n".join(staged_files) + execute_async_calls = [] + for cursor in fake_conn._cursors: + for call in cursor._execute_async_calls: + execute_async_calls.append(call["query"]) - # Check that the data is correct. - json_data = [json.loads(line) for line in staged_data.split("\n") if line] - # Pull out the fields we inserted only - json_data = [ - { - "uuid": event["uuid"], - "event": event["event"], - "timestamp": event["timestamp"], - "properties": event["properties"], - "person_id": event["person_id"], - } - for event in json_data + assert execute_calls[0:3] == [ + f'USE DATABASE "{database}"', + f'USE SCHEMA "{schema}"', + "SET ABORT_DETACHED_QUERY = FALSE", ] - json_data.sort(key=lambda x: x["timestamp"]) - # Drop _timestamp and team_id from events - expected_events = [] - for event in events: - expected_event = { - key: value - for key, value in event.items() - if key in ("uuid", "event", "timestamp", "properties", "person_id") - } - expected_event["timestamp"] = to_isoformat(event["timestamp"]) - expected_events.append(expected_event) - expected_events.sort(key=lambda x: x["timestamp"]) + assert all(query.startswith("PUT") for query in execute_calls[3:12]) + assert all(f"_{n}.jsonl" in query for n, query in enumerate(execute_calls[3:12])) - assert json_data[0] == expected_events[0] - assert json_data == expected_events + assert execute_async_calls[0].strip().startswith(f'CREATE TABLE IF NOT EXISTS "{table_name}"') + assert execute_async_calls[1].strip().startswith(f'COPY INTO "{table_name}"') runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) assert len(runs) == 1 @@ -451,11 +553,15 @@ async def test_snowflake_export_workflow_raises_error_on_put_fail( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - add_mock_snowflake_api(rsps, fail="put") + class FakeSnowflakeConnectionFailOnPut(FakeSnowflakeConnection): + def __init__(self, *args, **kwargs): + super().__init__(*args, failure_mode="put", **kwargs) + + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + side_effect=FakeSnowflakeConnectionFailOnPut, + ): with pytest.raises(WorkflowFailureError) as exc_info: await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, @@ -513,11 +619,15 @@ async def test_snowflake_export_workflow_raises_error_on_copy_fail( ], workflow_runner=UnsandboxedWorkflowRunner(), ): - with responses.RequestsMock( - target="snowflake.connector.vendored.requests.adapters.HTTPAdapter.send" - ) as rsps, override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1**2): - add_mock_snowflake_api(rsps, fail="copy") + class FakeSnowflakeConnectionFailOnCopy(FakeSnowflakeConnection): + def __init__(self, *args, **kwargs): + super().__init__(*args, failure_mode="copy", **kwargs) + + with unittest.mock.patch( + "posthog.temporal.workflows.snowflake_batch_export.snowflake.connector.connect", + side_effect=FakeSnowflakeConnectionFailOnCopy, + ): with pytest.raises(WorkflowFailureError) as exc_info: await activity_environment.client.execute_workflow( SnowflakeBatchExportWorkflow.run, @@ -577,8 +687,11 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str assert run.latest_error == "ValueError: A useful error message" -async def test_snowflake_export_workflow_handles_cancellation(ateam, snowflake_batch_export): - """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data.""" +async def test_snowflake_export_workflow_handles_cancellation_mocked(ateam, snowflake_batch_export): + """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data. + + We mock the insert_into_snowflake_activity for this test. + """ workflow_id = str(uuid4()) inputs = SnowflakeBatchExportInputs( team_id=ateam.pk, @@ -624,3 +737,462 @@ async def never_finish_activity(_: SnowflakeInsertInputs) -> str: run = runs[0] assert run.status == "Cancelled" assert run.latest_error == "Cancelled" + + +def assert_events_in_snowflake( + cursor: snowflake.connector.cursor.SnowflakeCursor, table_name: str, events: list, exclude_events: list[str] +): + """Assert provided events are present in Snowflake table.""" + cursor.execute(f'SELECT * FROM "{table_name}"') + + rows = cursor.fetchall() + + columns = {index: metadata.name for index, metadata in enumerate(cursor.description)} + json_columns = ("properties", "elements", "people_set", "people_set_once") + + # Rows are tuples, so we construct a dictionary using the metadata from cursor.description. + # We rely on the order of the columns in each row matching the order set in cursor.description. + # This seems to be the case, at least for now. + inserted_events = [ + { + columns[index]: json.loads(row[index]) + if columns[index] in json_columns and row[index] is not None + else row[index] + for index in columns.keys() + } + for row in rows + ] + inserted_events.sort(key=lambda x: (x["event"], x["timestamp"])) + + expected_events = [] + for event in events: + event_name = event.get("event") + + if exclude_events is not None and event_name in exclude_events: + continue + + properties = event.get("properties", None) + elements_chain = event.get("elements_chain", None) + expected_event = { + "distinct_id": event.get("distinct_id"), + "elements": json.dumps(elements_chain), + "event": event_name, + "ip": properties.get("$ip", None) if properties else None, + "properties": event.get("properties"), + "people_set": properties.get("$set", None) if properties else None, + "people_set_once": properties.get("$set_once", None) if properties else None, + "site_url": "", + "timestamp": dt.datetime.fromisoformat(event.get("timestamp")), + "team_id": event.get("team_id"), + "uuid": event.get("uuid"), + } + expected_events.append(expected_event) + + expected_events.sort(key=lambda x: (x["event"], x["timestamp"])) + + assert inserted_events[0] == expected_events[0] + assert inserted_events == expected_events + + +REQUIRED_ENV_VARS = ( + "SNOWFLAKE_WAREHOUSE", + "SNOWFLAKE_PASSWORD", + "SNOWFLAKE_ACCOUNT", + "SNOWFLAKE_USERNAME", +) + +SKIP_IF_MISSING_REQUIRED_ENV_VARS = pytest.mark.skipif( + any(env_var not in os.environ for env_var in REQUIRED_ENV_VARS), + reason="Snowflake required env vars are not set", +) + + +@pytest.fixture +def snowflake_cursor(snowflake_config): + """Manage a snowflake cursor that cleans up after we are done.""" + with snowflake.connector.connect( + user=snowflake_config["user"], + password=snowflake_config["password"], + account=snowflake_config["account"], + warehouse=snowflake_config["warehouse"], + ) as connection: + cursor = connection.cursor() + cursor.execute(f"CREATE DATABASE \"{snowflake_config['database']}\"") + cursor.execute(f"CREATE SCHEMA \"{snowflake_config['database']}\".\"{snowflake_config['schema']}\"") + cursor.execute(f"USE SCHEMA \"{snowflake_config['database']}\".\"{snowflake_config['schema']}\"") + + yield cursor + + cursor.execute(f"DROP DATABASE IF EXISTS \"{snowflake_config['database']}\" CASCADE") + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_insert_into_snowflake_activity_inserts_data_into_snowflake_table( + clickhouse_client, activity_environment, snowflake_cursor, snowflake_config, exclude_events +): + """Test that the insert_into_snowflake_activity function inserts data into a PostgreSQL table. + + We use the generate_test_events_in_clickhouse function to generate several sets + of events. Some of these sets are expected to be exported, and others not. Expected + events are those that: + * Are created for the team_id of the batch export. + * Are created in the date range of the batch export. + * Are not duplicates of other events that are in the same batch. + * Do not have an event name contained in the batch export's exclude_events. + + Once we have these events, we pass them to the assert_events_in_snowflake function to check + that they appear in the expected Snowflake table. This function runs against a real Snowflake + instance, so the environment should be populated with the necessary credentials. + """ + data_interval_start = dt.datetime(2023, 4, 20, 14, 0, 0, tzinfo=dt.timezone.utc) + data_interval_end = dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.timezone.utc) + + team_id = random.randint(1, 1000000) + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=1000, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + if exclude_events: + for event_name in exclude_events: + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=5, + count_outside_range=0, + count_other_team=0, + event_name=event_name, + ) + + table_name = f"test_insert_activity_table_{team_id}" + insert_inputs = SnowflakeInsertInputs( + team_id=team_id, + table_name=table_name, + data_interval_start=data_interval_start.isoformat(), + data_interval_end=data_interval_end.isoformat(), + exclude_events=exclude_events, + **snowflake_config, + ) + + await activity_environment.run(insert_into_snowflake_activity, insert_inputs) + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=table_name, + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_snowflake_export_workflow( + clickhouse_client, + snowflake_cursor, + interval, + snowflake_batch_export, + ateam, + exclude_events, +): + """Test Redshift Export Workflow end-to-end. + + The workflow should update the batch export run status to completed and produce the expected + records to the provided Redshift instance. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=100, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + if exclude_events: + for event_name in exclude_events: + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=5, + count_outside_range=0, + count_other_team=0, + event_name=event_name, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + await activity_environment.client.execute_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=10), + ) + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Completed" + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=snowflake_batch_export.destination.config["table_name"], + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +@pytest.mark.parametrize("interval", ["hour", "day"], indirect=True) +@pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +async def test_snowflake_export_workflow_with_many_files( + clickhouse_client, + snowflake_cursor, + interval, + snowflake_batch_export, + ateam, + exclude_events, +): + """Test Snowflake Export Workflow end-to-end with multiple file uploads. + + This test overrides the chunk size and sets it to 1 byte to trigger multiple file uploads. + We want to assert that all files are properly copied into the table. Of course, 1 byte limit + means we are uploading one file at a time, which is very innefficient. For this reason, this test + can take longer, so we keep the event count low and bump the Workflow timeout. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + await activity_environment.client.execute_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=20), + ) + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Completed" + + assert_events_in_snowflake( + cursor=snowflake_cursor, + table_name=snowflake_batch_export.destination.config["table_name"], + events=events, + exclude_events=exclude_events, + ) + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +async def test_snowflake_export_workflow_handles_cancellation( + clickhouse_client, ateam, snowflake_batch_export, interval, snowflake_cursor +): + """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data.""" + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=100, + count_outside_range=10, + count_other_team=10, + duplicate=True, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + person_properties={"utm_medium": "referral", "$initial_os": "Linux"}, + ) + + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=ateam.pk, + batch_export_id=str(snowflake_batch_export.id), + data_interval_end=data_interval_end.isoformat(), + interval=interval, + **snowflake_batch_export.destination.config, + ) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[ + create_export_run, + insert_into_snowflake_activity, + update_export_run_status, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + # We set the chunk size low on purpose to slow things down and give us time to cancel. + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + handle = await activity_environment.client.start_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + # We need to wait a bit for the activity to start running. + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=snowflake_batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" + + +@SKIP_IF_MISSING_REQUIRED_ENV_VARS +async def test_insert_into_snowflake_activity_heartbeats( + clickhouse_client, + ateam, + snowflake_batch_export, + snowflake_cursor, + snowflake_config, + activity_environment, +): + """Test that the insert_into_snowflake_activity activity sends heartbeats. + + We use a function that runs on_heartbeat to check and track the heartbeat contents. + """ + data_interval_end = dt.datetime.fromisoformat("2023-04-20T14:30:00.000000+00:00") + data_interval_start = data_interval_end - snowflake_batch_export.interval_time_delta + + events_in_files = [] + n_expected_files = 3 + + for i in range(1, n_expected_files + 1): + part_inserted_at = data_interval_end - snowflake_batch_export.interval_time_delta / i + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=ateam.pk, + start_time=data_interval_start, + end_time=data_interval_end, + count=1, + count_outside_range=0, + count_other_team=0, + duplicate=False, + inserted_at=part_inserted_at, + ) + events_in_files += events + + captured_details = [] + + def capture_heartbeat_details(*details): + """A function to track what we heartbeat.""" + nonlocal captured_details + + captured_details.append(details) + + activity_environment.on_heartbeat = capture_heartbeat_details + + table_name = f"test_insert_activity_table_{ateam.pk}" + insert_inputs = SnowflakeInsertInputs( + team_id=ateam.pk, + table_name=table_name, + data_interval_start=data_interval_start.isoformat(), + data_interval_end=data_interval_end.isoformat(), + **snowflake_config, + ) + + with override_settings(BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES=1): + await activity_environment.run(insert_into_snowflake_activity, insert_inputs) + + assert n_expected_files == len(captured_details) + + for index, details_captured in enumerate(captured_details): + assert dt.datetime.fromisoformat( + details_captured[0] + ) == data_interval_end - snowflake_batch_export.interval_time_delta / (index + 1) + assert details_captured[1] == index + 1 + + assert_events_in_snowflake(snowflake_cursor, table_name, events_in_files, exclude_events=[]) diff --git a/posthog/temporal/utils.py b/posthog/temporal/utils.py index d8757e2949a29..efc19c9e8ef4a 100644 --- a/posthog/temporal/utils.py +++ b/posthog/temporal/utils.py @@ -1,35 +1,115 @@ -import asyncio -from functools import wraps -from typing import Any, Awaitable, Callable, TypeVar, cast +import collections.abc +import dataclasses +import datetime as dt +import typing -from temporalio import activity -F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) +class EmptyHeartbeatError(Exception): + """Raised when an activity heartbeat is empty. + This is also the error we expect when no heartbeatting is happening, as the sequence will be empty. + """ + + def __init__(self): + super().__init__(f"Heartbeat details sequence is empty") + + +class NotEnoughHeartbeatValuesError(Exception): + """Raised when an activity heartbeat doesn't contain the right amount of values we expect.""" + + def __init__(self, details_len: int, expected: int): + super().__init__(f"Not enough values in heartbeat details (expected {expected}, got {details_len})") + + +class HeartbeatParseError(Exception): + """Raised when an activity heartbeat cannot be parsed into it's expected types.""" + + def __init__(self, field: str): + super().__init__(f"Parsing {field} from heartbeat details encountered an error") + + +@dataclasses.dataclass +class HeartbeatDetails: + """The batch export details included in every heartbeat. + + Each batch export destination should subclass this and implement whatever details are specific to that + batch export and required to resume it. + + Attributes: + last_inserted_at: The last inserted_at we managed to upload or insert, depending on the destination. + _remaining: Any remaining values in the heartbeat_details tuple that we do not parse. + """ + + last_inserted_at: dt.datetime + _remaining: collections.abc.Sequence[typing.Any] + + @property + def total_details(self) -> int: + """The total number of details that we have parsed + those remaining to parse.""" + return (len(dataclasses.fields(self.__class__)) - 1) + len(self._remaining) + + @classmethod + def from_activity(cls, activity): + """Attempt to initialize HeartbeatDetails from an activity's info.""" + details = activity.info().heartbeat_details + + if len(details) == 0: + raise EmptyHeartbeatError() -def auto_heartbeater(fn: F) -> F: - # We want to ensure that the type hints from the original callable are - # available via our wrapper, so we use the functools wraps decorator - @wraps(fn) - async def wrapper(*args, **kwargs): - heartbeat_timeout = activity.info().heartbeat_timeout - heartbeat_task = None - if heartbeat_timeout: - # Heartbeat twice as often as the timeout - heartbeat_task = asyncio.create_task(heartbeat_every(heartbeat_timeout.total_seconds() / 2)) try: - return await fn(*args, **kwargs) - finally: - if heartbeat_task: - heartbeat_task.cancel() - # Wait for heartbeat cancellation to complete - await asyncio.wait([heartbeat_task]) + last_inserted_at = dt.datetime.fromisoformat(details[0]) + except (TypeError, ValueError) as e: + raise HeartbeatParseError("last_inserted_at") from e + + return cls(last_inserted_at, _remaining=details[1:]) + + +HeartbeatType = typing.TypeVar("HeartbeatType", bound=HeartbeatDetails) + + +async def should_resume_from_activity_heartbeat( + activity, heartbeat_type: typing.Type[HeartbeatType], logger +) -> tuple[bool, HeartbeatType | None]: + """Check if a batch export should resume from an activity's heartbeat details. + + We understand that a batch export should resume any time that we receive heartbeat details and + those details can be correctly parsed. However, the decision is ultimately up to the batch export + activity to decide if it must resume and how to do so. + + Returns: + A tuple with the first element indicating if the batch export should resume. If the first element + is True, the second tuple element will be the heartbeat details themselves, otherwise None. + """ + try: + heartbeat_details = heartbeat_type.from_activity(activity) + + except EmptyHeartbeatError: + # We don't log this as a warning/error because it's the expected exception when heartbeat is empty. + heartbeat_details = None + received = False + logger.debug("Did not receive details from previous activity execution") + + except NotEnoughHeartbeatValuesError: + heartbeat_details = None + received = False + logger.warning("Details from previous activity execution did not contain the expected amount of values") + + except HeartbeatParseError: + heartbeat_details = None + received = False + logger.warning("Details from previous activity execution could not be parsed.") - return cast(F, wrapper) + except Exception: + # We should start from the beginning, but we make a point to log unexpected errors. + # Ideally, any new exceptions should be added to the previous blocks after the first time and we will never land here. + heartbeat_details = None + received = False + logger.exception("Did not receive details from previous activity Excecution due to an unexpected error") + else: + received = True + logger.debug( + f"Received details from previous activity: {heartbeat_details}", + ) -async def heartbeat_every(delay: float, *details: Any) -> None: - # Heartbeat every so often while not cancelled - while True: - await asyncio.sleep(delay) - activity.heartbeat(*details) + return received, heartbeat_details diff --git a/posthog/temporal/workflows/redshift_batch_export.py b/posthog/temporal/workflows/redshift_batch_export.py index a853c91e79e1f..fd2ba4c9e9193 100644 --- a/posthog/temporal/workflows/redshift_batch_export.py +++ b/posthog/temporal/workflows/redshift_batch_export.py @@ -32,6 +32,61 @@ ) +def remove_escaped_whitespace_recursive(value): + """Remove all escaped whitespace characters from given value. + + PostgreSQL supports constant escaped strings by appending an E' to each string that + contains whitespace in them (amongst other characters). See: + https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS-ESCAPE + + However, Redshift does not support this syntax. So, to avoid any escaping by + underlying PostgreSQL library, we remove the whitespace ourselves as defined in the + translation table WHITESPACE_TRANSLATE. + + This function is recursive just to be extremely careful and catch any whitespace that + may be sneaked in a dictionary key or sequence. + """ + match value: + case str(s): + return " ".join(s.replace("\b", " ").split()) + + case bytes(b): + return remove_escaped_whitespace_recursive(b.decode("utf-8")) + + case [*sequence]: + # mypy could be bugged as it's raising a Statement unreachable error. + # But we are definitely reaching this statement in tests; hence the ignore comment. + # Maybe: https://github.com/python/mypy/issues/16272. + return type(value)(remove_escaped_whitespace_recursive(sequence_value) for sequence_value in sequence) # type: ignore + + case set(elements): + return set(remove_escaped_whitespace_recursive(element) for element in elements) + + case {**mapping}: + return {k: remove_escaped_whitespace_recursive(v) for k, v in mapping.items()} + + case value: + return value + + +@contextlib.asynccontextmanager +async def redshift_connection(inputs) -> typing.AsyncIterator[psycopg.AsyncConnection]: + """Manage a Redshift connection. + + This just yields a Postgres connection but we adjust a couple of things required for + psycopg to work with Redshift: + 1. Set UNICODE encoding to utf-8 as Redshift reports back UNICODE. + 2. Set prepare_threshold to None on the connection as psycopg attempts to run DEALLOCATE ALL otherwise + which is not supported on Redshift. + """ + psycopg._encodings._py_codecs["UNICODE"] = "utf-8" + psycopg._encodings.py_codecs.update((k.encode(), v) for k, v in psycopg._encodings._py_codecs.items()) + + async with postgres_connection(inputs) as connection: + connection.prepare_threshold = None + yield connection + + async def insert_records_to_redshift( records: collections.abc.Iterator[dict[str, typing.Any]], redshift_connection: psycopg.AsyncConnection, @@ -74,27 +129,28 @@ async def insert_records_to_redshift( rows_exported = get_rows_exported_metric() async with async_client_cursor_from_connection(redshift_connection) as cursor: - batch = [pre_query.as_string(cursor).encode("utf-8")] + batch = [] + pre_query_str = pre_query.as_string(cursor).encode("utf-8") async def flush_to_redshift(batch): - await cursor.execute(b"".join(batch)) - rows_exported.add(len(batch) - 1) + values = b",".join(batch).replace(b" E'", b" '") + + await cursor.execute(pre_query_str + values) + rows_exported.add(len(batch)) # It would be nice to record BYTES_EXPORTED for Redshift, but it's not worth estimating # the byte size of each batch the way things are currently written. We can revisit this # in the future if we decide it's useful enough. for record in itertools.chain([first_record], records): batch.append(cursor.mogrify(template, record).encode("utf-8")) - if len(batch) < batch_size: - batch.append(b",") continue await flush_to_redshift(batch) - batch = [pre_query.as_string(cursor).encode("utf-8")] + batch = [] if len(batch) > 0: - await flush_to_redshift(batch[:-1]) + await flush_to_redshift(batch) @contextlib.asynccontextmanager @@ -186,7 +242,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs): ) properties_type = "VARCHAR(65535)" if inputs.properties_data_type == "varchar" else "SUPER" - async with postgres_connection(inputs) as connection: + async with redshift_connection(inputs) as connection: await create_table_in_postgres( connection, schema=inputs.schema, @@ -223,10 +279,14 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs): def map_to_record(row: dict) -> dict: """Map row to a record to insert to Redshift.""" - return { - key: json.dumps(row[key]) if key in json_columns and row[key] is not None else row[key] + record = { + key: json.dumps(remove_escaped_whitespace_recursive(row[key]), ensure_ascii=False) + if key in json_columns and row[key] is not None + else row[key] for key in schema_columns } + record["elements"] = "" + return record async with postgres_connection(inputs) as connection: await insert_records_to_redshift( diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py index fc29b414d2274..42e66f10d2ae3 100644 --- a/posthog/temporal/workflows/s3_batch_export.py +++ b/posthog/temporal/workflows/s3_batch_export.py @@ -276,7 +276,7 @@ class HeartbeatDetails(typing.NamedTuple): def from_activity_details(cls, details): last_uploaded_part_timestamp = details[0] upload_state = S3MultiPartUploadState(*details[1]) - return HeartbeatDetails(last_uploaded_part_timestamp, upload_state) + return cls(last_uploaded_part_timestamp, upload_state) @dataclass diff --git a/posthog/temporal/workflows/snowflake_batch_export.py b/posthog/temporal/workflows/snowflake_batch_export.py index 1831f87fa2f87..b216f20af0412 100644 --- a/posthog/temporal/workflows/snowflake_batch_export.py +++ b/posthog/temporal/workflows/snowflake_batch_export.py @@ -1,17 +1,28 @@ +import asyncio +import contextlib +import dataclasses import datetime as dt +import functools +import io import json -import tempfile -from dataclasses import dataclass +import typing import snowflake.connector from django.conf import settings -from snowflake.connector.cursor import SnowflakeCursor +from snowflake.connector.connection import SnowflakeConnection from temporalio import activity, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import SnowflakeBatchExportInputs +from posthog.temporal.utils import ( + HeartbeatDetails, + HeartbeatParseError, + NotEnoughHeartbeatValuesError, + should_resume_from_activity_heartbeat, +) from posthog.temporal.workflows.base import PostHogWorkflow from posthog.temporal.workflows.batch_exports import ( + BatchExportTemporaryFile, CreateBatchExportRunInputs, UpdateBatchExportRunStatusInputs, create_export_run, @@ -43,7 +54,32 @@ def __init__(self, table_name: str, status: str, errors_seen: int, first_error: ) -@dataclass +@dataclasses.dataclass +class SnowflakeHeartbeatDetails(HeartbeatDetails): + """The Snowflake batch export details included in every heartbeat. + + Attributes: + file_no: The file number of the last file we managed to upload. + """ + + file_no: int + + @classmethod + def from_activity(cls, activity): + details = super().from_activity(activity) + + if details.total_details < 2: + raise NotEnoughHeartbeatValuesError(details.total_details, 2) + + try: + file_no = int(details._remaining[1]) + except (TypeError, ValueError) as e: + raise HeartbeatParseError("file_no") from e + + return cls(last_inserted_at=details.last_inserted_at, file_no=file_no, _remaining=details._remaining[2:]) + + +@dataclasses.dataclass class SnowflakeInsertInputs: """Inputs for Snowflake.""" @@ -66,23 +102,137 @@ class SnowflakeInsertInputs: include_events: list[str] | None = None -def put_file_to_snowflake_table(cursor: SnowflakeCursor, file_name: str, table_name: str): +def use_namespace(connection: SnowflakeConnection, database: str, schema: str) -> None: + """Switch to a namespace given by database and schema. + + This allows all queries that follow to ignore database and schema. + """ + cursor = connection.cursor() + cursor.execute(f'USE DATABASE "{database}"') + cursor.execute(f'USE SCHEMA "{schema}"') + + +@contextlib.contextmanager +def snowflake_connection(inputs) -> typing.Generator[SnowflakeConnection, None, None]: + """Context manager that yields a Snowflake connection. + + Before yielding we ensure we are in the right namespace, and we set ABORT_DETACHED_QUERY + to FALSE to avoid Snowflake cancelling any async queries. + """ + with snowflake.connector.connect( + user=inputs.user, + password=inputs.password, + account=inputs.account, + warehouse=inputs.warehouse, + database=inputs.database, + schema=inputs.schema, + role=inputs.role, + ) as connection: + use_namespace(connection, inputs.database, inputs.schema) + connection.cursor().execute("SET ABORT_DETACHED_QUERY = FALSE") + + yield connection + + +async def execute_async_query( + connection: SnowflakeConnection, + query: str, + parameters: dict | None = None, + file_stream=None, + poll_interval: float = 1.0, +) -> str: + """Wrap Snowflake connector's polling API in a coroutine. + + This enables asynchronous execution of queries to release the event loop to execute other tasks + while we poll for a query to be done. For example, the event loop may use this time for heartbeating. + + Args: + connection: A SnowflakeConnection object as produced by snowflake.connector.connect. + query: A query string to run asynchronously. + parameters: An optional dictionary of parameters to bind to the query. + poll_interval: Specify how long to wait in between polls. + """ + cursor = connection.cursor() + + # Snowflake docs incorrectly state that the 'params' argument is named 'parameters'. + result = cursor.execute_async(query, params=parameters, file_stream=file_stream) + query_id = cursor.sfqid or result["queryId"] + + # Snowflake does a blocking HTTP request, so we send it to a thread. + query_status = await asyncio.to_thread(connection.get_query_status_throw_if_error, query_id) + + while connection.is_still_running(query_status): + query_status = await asyncio.to_thread(connection.get_query_status_throw_if_error, query_id) + await asyncio.sleep(poll_interval) + + return query_id + + +async def create_table_in_snowflake(connection: SnowflakeConnection, table_name: str) -> None: + """Asynchronously create the table if it doesn't exist. + + Note that we use the same schema as the snowflake-plugin for backwards compatibility.""" + await execute_async_query( + connection, + f""" + CREATE TABLE IF NOT EXISTS "{table_name}" ( + "uuid" STRING, + "event" STRING, + "properties" VARIANT, + "elements" VARIANT, + "people_set" VARIANT, + "people_set_once" VARIANT, + "distinct_id" STRING, + "team_id" INTEGER, + "ip" STRING, + "site_url" STRING, + "timestamp" TIMESTAMP + ) + COMMENT = 'PostHog generated events table' + """, + ) + + +async def put_file_to_snowflake_table( + connection: SnowflakeConnection, + file: BatchExportTemporaryFile, + table_name: str, + file_no: int, +): """Executes a PUT query using the provided cursor to the provided table_name. + Sadly, Snowflake's execute_async does not work with PUT statements. So, we pass the execute + call to run_in_executor: Since execute ends up boiling down to blocking IO (HTTP request), + the event loop should not be locked up. + + We add a file_no to the file_name when executing PUT as Snowflake will reject any files with the same + name. Since batch exports re-use the same file, our name does not change, but we don't want Snowflake + to reject or overwrite our new data. + Args: - cursor: A Snowflake cursor to execute the PUT query. - file_name: The name of the file to PUT. - table_name: The name of the table where to PUT the file. + connection: A SnowflakeConnection object as produced by snowflake.connector.connect. + file: The name of the local file to PUT. + table_name: The name of the Snowflake table where to PUT the file. + file_no: An int to identify which file number this is. Raises: TypeError: If we don't get a tuple back from Snowflake (should never happen). SnowflakeFileNotUploadedError: If the upload status is not 'UPLOADED'. """ - cursor.execute( - f""" - PUT file://{file_name} @%"{table_name}" - """ - ) + file.rewind() + + # We comply with the file-like interface of io.IOBase. + # So we ask mypy to be nice with us. + reader = io.BufferedReader(file) # type: ignore + query = f'PUT file://{file.name}_{file_no}.jsonl @%"{table_name}"' + cursor = connection.cursor() + + execute_put = functools.partial(cursor.execute, query, file_stream=reader) + + loop = asyncio.get_running_loop() + await loop.run_in_executor(None, func=execute_put) + reader.detach() # BufferedReader closes the file otherwise. + result = cursor.fetchone() if not isinstance(result, tuple): # Mostly to appease mypy, as this query should always return a tuple. @@ -93,6 +243,55 @@ def put_file_to_snowflake_table(cursor: SnowflakeCursor, file_name: str, table_n raise SnowflakeFileNotUploadedError(table_name, status, message) +async def copy_loaded_files_to_snowflake_table( + connection: SnowflakeConnection, + table_name: str, +): + """Execute a COPY query in Snowflake to load any files PUT into the table. + + The query is executed asynchronously using Snowflake's polling API. + + Args: + connection: A SnowflakeConnection as returned by snowflake.connector.connect. + table_name: The table we are COPY-ing files into. + """ + query = f""" + COPY INTO "{table_name}" + FILE_FORMAT = (TYPE = 'JSON') + MATCH_BY_COLUMN_NAME = CASE_SENSITIVE + PURGE = TRUE + """ + query_id = await execute_async_query(connection, query) + + cursor = connection.cursor() + cursor.get_results_from_sfqid(query_id) + results = cursor.fetchall() + + for query_result in results: + if not isinstance(query_result, tuple): + # Mostly to appease mypy, as this query should always return a tuple. + raise TypeError(f"Expected tuple from Snowflake COPY INTO query but got: '{type(query_result)}'") + + if len(query_result) < 2: + raise SnowflakeFileNotLoadedError( + table_name, + "NO STATUS", + 0, + query_result[0] if len(query_result) == 1 else "NO ERROR MESSAGE", + ) + + _, status = query_result[0:2] + + if status != "LOADED": + errors_seen, first_error = query_result[5:7] + raise SnowflakeFileNotLoadedError( + table_name, + status or "NO STATUS", + errors_seen or 0, + first_error or "NO ERROR MESSAGE", + ) + + @activity.defn async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): """Activity streams data from ClickHouse to Snowflake. @@ -106,6 +305,17 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): inputs.data_interval_end, ) + should_resume, details = await should_resume_from_activity_heartbeat(activity, SnowflakeHeartbeatDetails, logger) + + if should_resume is True and details is not None: + data_interval_start = details.last_inserted_at.isoformat() + last_inserted_at = details.last_inserted_at + file_no = details.file_no + else: + data_interval_start = inputs.data_interval_start + last_inserted_at = None + file_no = 0 + async with get_client() as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") @@ -113,7 +323,7 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): count = await get_rows_count( client=client, team_id=inputs.team_id, - interval_start=inputs.data_interval_start, + interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, @@ -129,42 +339,31 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): logger.info("BatchExporting %s rows", count) - conn = snowflake.connector.connect( - user=inputs.user, - password=inputs.password, - account=inputs.account, - warehouse=inputs.warehouse, - database=inputs.database, - schema=inputs.schema, - role=inputs.role, - ) + rows_exported = get_rows_exported_metric() + bytes_exported = get_bytes_exported_metric() - try: - cursor = conn.cursor() - cursor.execute(f'USE DATABASE "{inputs.database}"') - cursor.execute(f'USE SCHEMA "{inputs.schema}"') - - # Create the table if it doesn't exist. Note that we use the same schema - # as the snowflake-plugin for backwards compatibility. - cursor.execute( - f""" - CREATE TABLE IF NOT EXISTS "{inputs.database}"."{inputs.schema}"."{inputs.table_name}" ( - "uuid" STRING, - "event" STRING, - "properties" VARIANT, - "elements" VARIANT, - "people_set" VARIANT, - "people_set_once" VARIANT, - "distinct_id" STRING, - "team_id" INTEGER, - "ip" STRING, - "site_url" STRING, - "timestamp" TIMESTAMP - ) - COMMENT = 'PostHog generated events table' - """ + async def flush_to_snowflake( + connection: SnowflakeConnection, + file: BatchExportTemporaryFile, + table_name: str, + file_no: int, + last: bool = False, + ): + logger.info( + "Putting %sfile %s containing %s records with size %s bytes", + "last " if last else "", + file_no, + file.records_since_last_reset, + file.bytes_since_last_reset, ) + await put_file_to_snowflake_table(connection, file, table_name, file_no) + rows_exported.add(file.records_since_last_reset) + bytes_exported.add(file.bytes_since_last_reset) + + with snowflake_connection(inputs) as connection: + await create_table_in_snowflake(connection, inputs.table_name) + results_iterator = get_results_iterator( client=client, team_id=inputs.team_id, @@ -173,118 +372,59 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): exclude_events=inputs.exclude_events, include_events=inputs.include_events, ) + result = None - local_results_file = tempfile.NamedTemporaryFile(suffix=".jsonl") - rows_in_file = 0 - - rows_exported = get_rows_exported_metric() - bytes_exported = get_bytes_exported_metric() - - def flush_to_snowflake(lrf: tempfile._TemporaryFileWrapper, rows_in_file: int): - lrf.flush() - put_file_to_snowflake_table(cursor, lrf.name, inputs.table_name) - rows_exported.add(rows_in_file) - bytes_exported.add(lrf.tell()) - - try: - while True: - try: - result = results_iterator.__next__() - - except StopIteration: - break - - except json.JSONDecodeError: - logger.info( - "Failed to decode a JSON value while iterating, potentially due to a ClickHouse error" - ) - # This is raised by aiochclient as we try to decode an error message from ClickHouse. - # So far, this error message only indicated that we were too slow consuming rows. - # So, we can resume from the last result. - if result is None: - # We failed right at the beginning - new_interval_start = None - else: - new_interval_start = result.get("inserted_at", None) - - if not isinstance(new_interval_start, str): - new_interval_start = inputs.data_interval_start - - results_iterator = get_results_iterator( - client=client, - team_id=inputs.team_id, - interval_start=new_interval_start, # This means we'll generate at least one duplicate. - interval_end=inputs.data_interval_end, - ) - continue - - if not result: - break - - # Write the results to a local file - local_results_file.write(json.dumps(result).encode("utf-8")) - local_results_file.write("\n".encode("utf-8")) - rows_in_file += 1 - - # Write results to Snowflake when the file reaches 50MB and - # reset the file, or if there is nothing else to write. - if ( - local_results_file.tell() - and local_results_file.tell() > settings.BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES - ): - logger.info("Uploading to Snowflake") - - # Flush the file to make sure everything is written - flush_to_snowflake(local_results_file, rows_in_file) - - # Delete the temporary file and create a new one - local_results_file.close() - local_results_file = tempfile.NamedTemporaryFile(suffix=".jsonl") - rows_in_file = 0 - - # Flush the file to make sure everything is written - flush_to_snowflake(local_results_file, rows_in_file) - - # We don't need the file anymore, close (and delete) it. - local_results_file.close() - cursor.execute( - f""" - COPY INTO "{inputs.table_name}" - FILE_FORMAT = (TYPE = 'JSON') - MATCH_BY_COLUMN_NAME = CASE_SENSITIVE - PURGE = TRUE - """ - ) - results = cursor.fetchall() - - for query_result in results: - if not isinstance(query_result, tuple): - # Mostly to appease mypy, as this query should always return a tuple. - raise TypeError(f"Expected tuple from Snowflake COPY INTO query but got: '{type(result)}'") - - if len(query_result) < 2: - raise SnowflakeFileNotLoadedError( - inputs.table_name, - "NO STATUS", - 0, - query_result[1] if len(query_result) == 1 else "NO ERROR MESSAGE", - ) - - _, status = query_result[0:2] - - if status != "LOADED": - errors_seen, first_error = query_result[5:7] - raise SnowflakeFileNotLoadedError( - inputs.table_name, - status or "NO STATUS", - errors_seen or 0, - first_error or "NO ERROR MESSAGE", - ) - - finally: - local_results_file.close() - finally: - conn.close() + + async def worker_shutdown_handler(): + """Handle the Worker shutting down by heart-beating our latest status.""" + await activity.wait_for_worker_shutdown() + logger.bind(last_inserted_at=last_inserted_at, file_no=file_no).debug("Worker shutting down!") + + if last_inserted_at is None: + # Don't heartbeat if worker shuts down before we could even send anything + # Just start from the beginning again. + return + + activity.heartbeat(last_inserted_at, file_no) + + asyncio.create_task(worker_shutdown_handler()) + + with BatchExportTemporaryFile() as local_results_file: + for result in results_iterator: + record = { + "uuid": result["uuid"], + "event": result["event"], + "properties": result["properties"], + "elements": result["elements"], + "people_set": result["set"], + "people_set_once": result["set_once"], + "distinct_id": result["distinct_id"], + "team_id": result["team_id"], + "ip": result["ip"], + "site_url": result["site_url"], + "timestamp": result["timestamp"], + } + local_results_file.write_records_to_jsonl([record]) + + if local_results_file.tell() > settings.BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES: + await flush_to_snowflake(connection, local_results_file, inputs.table_name, file_no) + + last_inserted_at = result["inserted_at"] + file_no += 1 + + activity.heartbeat(last_inserted_at, file_no) + + local_results_file.reset() + + if local_results_file.tell() > 0 and result is not None: + await flush_to_snowflake(connection, local_results_file, inputs.table_name, file_no, last=True) + + last_inserted_at = result["inserted_at"] + file_no += 1 + + activity.heartbeat(last_inserted_at, file_no) + + await copy_loaded_files_to_snowflake_table(connection, inputs.table_name) @workflow.defn(name="snowflake-export") @@ -361,6 +501,4 @@ async def run(self, inputs: SnowflakeBatchExportInputs): "ForbiddenError", ], update_inputs=update_inputs, - # Disable heartbeat timeout until we add heartbeat support. - heartbeat_timeout_seconds=None, ) diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index d714f9a077c0d..a34d2ba545ae0 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -140,7 +140,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 @@ -525,7 +526,8 @@ "posthog_team"."event_properties", "posthog_team"."event_properties_with_usage", "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id" + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" WHERE "posthog_team"."id" = 2 LIMIT 21 diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 3bfcc64e497d1..7510ec26cd02b 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -10,7 +10,7 @@ from posthog.warehouse.external_data_source.source import StripeSourcePayload, create_stripe_source, delete_source from posthog.warehouse.external_data_source.connection import create_connection, start_sync from posthog.warehouse.external_data_source.destination import create_destination, delete_destination -from posthog.warehouse.sync_resource import sync_resource +from posthog.tasks.warehouse import sync_resource from posthog.api.routing import StructuredViewSetMixin from rest_framework.decorators import action diff --git a/posthog/warehouse/api/test/test_view_link.py b/posthog/warehouse/api/test/test_view_link.py index 3a2dcae6bf160..0bcb57e187b86 100644 --- a/posthog/warehouse/api/test/test_view_link.py +++ b/posthog/warehouse/api/test/test_view_link.py @@ -2,7 +2,7 @@ APIBaseTest, ) from posthog.warehouse.models import DataWarehouseViewLink, DataWarehouseSavedQuery -from posthog.api.query import process_query +from posthog.api.services.query import process_query class TestViewLinkQuery(APIBaseTest): diff --git a/posthog/warehouse/external_data_source/connection.py b/posthog/warehouse/external_data_source/connection.py index fc89f22abb65b..9a37222f9d8d4 100644 --- a/posthog/warehouse/external_data_source/connection.py +++ b/posthog/warehouse/external_data_source/connection.py @@ -37,6 +37,22 @@ def create_connection(source_id: str, destination_id: str) -> ExternalDataConnec ) +def activate_connection_by_id(connection_id: str): + update_connection_status_by_id(connection_id, "active") + + +def deactivate_connection_by_id(connection_id: str): + update_connection_status_by_id(connection_id, "inactive") + + +def update_connection_status_by_id(connection_id: str, status: str): + connection_id_url = f"{AIRBYTE_CONNECTION_URL}/{connection_id}" + + payload = {"status": status} + + send_request(connection_id_url, method="PATCH", payload=payload) + + def update_connection_stream(connection_id: str): connection_id_url = f"{AIRBYTE_CONNECTION_URL}/{connection_id}" diff --git a/posthog/warehouse/external_data_source/workspace.py b/posthog/warehouse/external_data_source/workspace.py index e92c07fc888cd..ceb8ed50ac33f 100644 --- a/posthog/warehouse/external_data_source/workspace.py +++ b/posthog/warehouse/external_data_source/workspace.py @@ -1,6 +1,7 @@ from posthog.models import Team from posthog.warehouse.external_data_source.client import send_request from django.conf import settings +import datetime AIRBYTE_WORKSPACE_URL = "https://api.airbyte.com/v1/workspaces" @@ -23,6 +24,8 @@ def get_or_create_workspace(team_id: int): if not team.external_data_workspace_id: workspace_id = create_workspace(team_id) team.external_data_workspace_id = workspace_id + # start tracking from now + team.external_data_workspace_last_synced_at = datetime.datetime.now(datetime.timezone.utc) team.save() return team.external_data_workspace_id diff --git a/posthog/warehouse/models/datawarehouse_saved_query.py b/posthog/warehouse/models/datawarehouse_saved_query.py index bca809bb30912..9117fa7c4eaf0 100644 --- a/posthog/warehouse/models/datawarehouse_saved_query.py +++ b/posthog/warehouse/models/datawarehouse_saved_query.py @@ -47,7 +47,7 @@ class Meta: ] def get_columns(self) -> Dict[str, str]: - from posthog.api.query import process_query + from posthog.api.services.query import process_query # TODO: catch and raise error response = process_query(self.team, self.query) diff --git a/posthog/warehouse/sync_resource.py b/posthog/warehouse/sync_resource.py deleted file mode 100644 index 3072bf43986d9..0000000000000 --- a/posthog/warehouse/sync_resource.py +++ /dev/null @@ -1,69 +0,0 @@ -from posthog.warehouse.models.external_data_source import ExternalDataSource -from posthog.warehouse.models import DataWarehouseCredential, DataWarehouseTable -from posthog.warehouse.external_data_source.connection import retrieve_sync -from posthog.celery import app - -from django.conf import settings -import structlog - -logger = structlog.get_logger(__name__) - - -def sync_resources(): - resources = ExternalDataSource.objects.filter(are_tables_created=False, status__in=["running", "error"]) - - for resource in resources: - sync_resource.delay(resource.pk) - - -@app.task(ignore_result=True) -def sync_resource(resource_id): - resource = ExternalDataSource.objects.get(pk=resource_id) - - try: - job = retrieve_sync(resource.connection_id) - except Exception as e: - logger.exception("Data Warehouse: Sync Resource failed with an unexpected exception.", exc_info=e) - resource.status = "error" - resource.save() - return - - if job is None: - logger.error(f"Data Warehouse: No jobs found for connection: {resource.connection_id}") - resource.status = "error" - resource.save() - return - - if job["status"] == "succeeded": - resource = ExternalDataSource.objects.get(pk=resource_id) - credential, _ = DataWarehouseCredential.objects.get_or_create( - team_id=resource.team.pk, - access_key=settings.AIRBYTE_BUCKET_KEY, - access_secret=settings.AIRBYTE_BUCKET_SECRET, - ) - - data = { - "credential": credential, - "name": "stripe_customers", - "format": "Parquet", - "url_pattern": f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/airbyte/{resource.team.pk}/customers/*.parquet", - "team_id": resource.team.pk, - } - - table = DataWarehouseTable(**data) - try: - table.columns = table.get_columns() - except Exception as e: - logger.exception( - f"Data Warehouse: Sync Resource failed with an unexpected exception for connection: {resource.connection_id}", - exc_info=e, - ) - else: - table.save() - - resource.are_tables_created = True - resource.status = job["status"] - resource.save() - else: - resource.status = job["status"] - resource.save() diff --git a/tsconfig.json b/tsconfig.json index 658bedd03e802..1c789a099ec9e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -33,8 +33,8 @@ "suppressImplicitAnyIndexErrors": true, // Index objects by number "lib": ["dom", "es2019"] }, - "include": ["frontend/**/*", ".storybook/**/*"], - "exclude": ["node_modules/**/*", "staticfiles/**/*", "frontend/dist/**/*", "plugin-server/**/*"], + "include": ["frontend/**/*", "cypress/**/*", ".storybook/**/*"], + "exclude": ["frontend/dist/**/*"], "ts-node": { "compilerOptions": { "module": "commonjs"