diff --git a/.github/workflows/ci-backend-depot.yml b/.github/workflows/ci-backend-depot.yml index e555082455c9f..29cf80ddab67c 100644 --- a/.github/workflows/ci-backend-depot.yml +++ b/.github/workflows/ci-backend-depot.yml @@ -5,15 +5,7 @@ name: Backend CI (depot) on: - push: - branches: - - master pull_request: - workflow_dispatch: - inputs: - clickhouseServerVersion: - description: ClickHouse server version. Leave blank for default - type: string concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -39,7 +31,7 @@ jobs: # Job to decide if we should run backend ci # See https://github.com/dorny/paths-filter#conditional-execution for more details changes: - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 timeout-minutes: 5 if: github.repository == 'PostHog/posthog' name: Determine need to run backend checks @@ -90,7 +82,7 @@ jobs: timeout-minutes: 30 name: Python code quality checks - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 steps: # If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined @@ -174,7 +166,7 @@ jobs: timeout-minutes: 10 name: Validate Django migrations - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 steps: - uses: actions/checkout@v3 @@ -237,7 +229,7 @@ jobs: timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) (depot) - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 strategy: fail-fast: false @@ -318,7 +310,7 @@ jobs: matrix: clickhouse-server-image: ['clickhouse/clickhouse-server:23.11.2.11-alpine'] if: needs.changes.outputs.backend == 'true' - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 steps: - name: 'Checkout repo' uses: actions/checkout@v3 diff --git a/.github/workflows/ci-e2e-depot.yml b/.github/workflows/ci-e2e-depot.yml index 2134d4d70f18f..4985dac9d746a 100644 --- a/.github/workflows/ci-e2e-depot.yml +++ b/.github/workflows/ci-e2e-depot.yml @@ -14,7 +14,7 @@ concurrency: jobs: changes: - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 timeout-minutes: 5 if: github.repository == 'PostHog/posthog' name: Determine need to run E2E checks @@ -55,7 +55,7 @@ jobs: chunks: needs: changes name: Cypress preparation - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 timeout-minutes: 5 outputs: chunks: ${{ steps.chunk.outputs.chunks }} @@ -70,7 +70,7 @@ jobs: container: name: Build and cache container image - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 timeout-minutes: 60 needs: [changes] permissions: @@ -94,7 +94,7 @@ jobs: cypress: name: Cypress E2E tests (${{ strategy.job-index }}) (depot) - runs-on: depot-ubuntu-latest + runs-on: depot-ubuntu-latest-4 timeout-minutes: 60 needs: [chunks, changes, container] permissions: diff --git a/.github/workflows/ci-hobby.yml b/.github/workflows/ci-hobby.yml index c5b878c8d2184..0025e656c8204 100644 --- a/.github/workflows/ci-hobby.yml +++ b/.github/workflows/ci-hobby.yml @@ -35,7 +35,16 @@ jobs: token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} - name: Get python deps run: pip install python-digitalocean==1.17.0 requests==2.28.1 + - name: Setup DO Hobby Instance + run: python3 bin/hobby-ci.py create + env: + DIGITALOCEAN_TOKEN: ${{ secrets.DIGITALOCEAN_TOKEN }} - name: Run smoke tests on DO - run: python3 bin/hobby-ci.py $GITHUB_HEAD_REF + run: python3 bin/hobby-ci.py test $GITHUB_HEAD_REF + env: + DIGITALOCEAN_TOKEN: ${{ secrets.DIGITALOCEAN_TOKEN }} + - name: Post-cleanup step + if: always() + run: python3 bin/hobby-ci.py destroy env: DIGITALOCEAN_TOKEN: ${{ secrets.DIGITALOCEAN_TOKEN }} diff --git a/bin/docker-worker-celery b/bin/docker-worker-celery index 5d1e7567fcabe..bbd9949d88352 100755 --- a/bin/docker-worker-celery +++ b/bin/docker-worker-celery @@ -71,6 +71,10 @@ FLAGS+=("-n node@%h") # On Heroku $WEB_CONCURRENCY contains suggested number of forks per dyno type # https://github.com/heroku/heroku-buildpack-python/blob/main/vendor/WEB_CONCURRENCY.sh [[ -n "${WEB_CONCURRENCY}" ]] && FLAGS+=" --concurrency $WEB_CONCURRENCY" +# Restart worker process after it processes this many tasks (to mitigate memory leaks) +[[ -n "${CELERY_MAX_TASKS_PER_CHILD}" ]] && FLAGS+=" --max-tasks-per-child $CELERY_MAX_TASKS_PER_CHILD" +# Restart worker process after it exceeds this much memory usage (to mitigate memory leaks) +[[ -n "${CELERY_MAX_MEMORY_PER_CHILD}" ]] && FLAGS+=" --max-memory-per-child $CELERY_MAX_MEMORY_PER_CHILD" if [[ -z "${CELERY_WORKER_QUEUES}" ]]; then source ./bin/celery-queues.env diff --git a/bin/hobby-ci.py b/bin/hobby-ci.py index 7eed9237e6c83..c19022692ece7 100644 --- a/bin/hobby-ci.py +++ b/bin/hobby-ci.py @@ -3,8 +3,6 @@ import datetime import os import random -import re -import signal import string import sys import time @@ -12,43 +10,73 @@ import digitalocean import requests -letters = string.ascii_lowercase -random_bit = "".join(random.choice(letters) for i in range(4)) -name = f"do-ci-hobby-deploy-{random_bit}" -region = "sfo3" -image = "ubuntu-22-04-x64" -size = "s-4vcpu-8gb" -release_tag = "latest-release" -branch_regex = re.compile("release-*.*") -branch = sys.argv[1] -if branch_regex.match(branch): - release_tag = f"{branch}-unstable" -hostname = f"{name}.posthog.cc" -user_data = ( - f"#!/bin/bash \n" - "mkdir hobby \n" - "cd hobby \n" - "sed -i \"s/#\\$nrconf{restart} = 'i';/\\$nrconf{restart} = 'a';/g\" /etc/needrestart/needrestart.conf \n" - "git clone https://github.com/PostHog/posthog.git \n" - "cd posthog \n" - f"git checkout {branch} \n" - "cd .. \n" - f"chmod +x posthog/bin/deploy-hobby \n" - f"./posthog/bin/deploy-hobby {release_tag} {hostname} 1 \n" -) -token = os.getenv("DIGITALOCEAN_TOKEN") + +DOMAIN = "posthog.cc" class HobbyTester: - def __init__(self, domain, droplet, record): - # Placeholders for DO resources + def __init__( + self, + token=None, + name=None, + region="sfo3", + image="ubuntu-22-04-x64", + size="s-4vcpu-8gb", + release_tag="latest-release", + branch=None, + hostname=None, + domain=DOMAIN, + droplet_id=None, + droplet=None, + record_id=None, + record=None, + ): + if not token: + token = os.getenv("DIGITALOCEAN_TOKEN") + self.token = token + self.branch = branch + self.release_tag = release_tag + + random_bit = "".join(random.choice(string.ascii_lowercase) for i in range(4)) + + if not name: + name = f"do-ci-hobby-deploy-{self.release_tag}-{random_bit}" + self.name = name + + if not hostname: + hostname = f"{name}.{DOMAIN}" + self.hostname = hostname + + self.region = region + self.image = image + self.size = size + self.domain = domain self.droplet = droplet + if droplet_id: + self.droplet = digitalocean.Droplet(token=self.token, id=droplet_id) + self.record = record + if record_id: + self.record = digitalocean.Record(token=self.token, id=record_id) - @staticmethod - def block_until_droplet_is_started(droplet): - actions = droplet.get_actions() + self.user_data = ( + f"#!/bin/bash \n" + "mkdir hobby \n" + "cd hobby \n" + "sed -i \"s/#\\$nrconf{restart} = 'i';/\\$nrconf{restart} = 'a';/g\" /etc/needrestart/needrestart.conf \n" + "git clone https://github.com/PostHog/posthog.git \n" + "cd posthog \n" + f"git checkout {self.branch} \n" + "cd .. \n" + f"chmod +x posthog/bin/deploy-hobby \n" + f"./posthog/bin/deploy-hobby {self.release_tag} {self.hostname} 1 \n" + ) + + def block_until_droplet_is_started(self): + if not self.droplet: + return + actions = self.droplet.get_actions() up = False while not up: for action in actions: @@ -60,42 +88,43 @@ def block_until_droplet_is_started(droplet): print("Droplet not booted yet - waiting a bit") time.sleep(5) - @staticmethod - def get_public_ip(droplet): + def get_public_ip(self): + if not self.droplet: + return ip = None while not ip: time.sleep(1) - droplet.load() - ip = droplet.ip_address + self.droplet.load() + ip = self.droplet.ip_address print(f"Public IP found: {ip}") # type: ignore return ip - @staticmethod - def create_droplet(ssh_enabled=False): + def create_droplet(self, ssh_enabled=False): keys = None if ssh_enabled: - manager = digitalocean.Manager(token=token) + manager = digitalocean.Manager(token=self.token) keys = manager.get_all_sshkeys() - droplet = digitalocean.Droplet( - token=token, - name=name, - region=region, - image=image, - size_slug=size, - user_data=user_data, + self.droplet = digitalocean.Droplet( + token=self.token, + name=self.name, + region=self.region, + image=self.image, + size_slug=self.size, + user_data=self.user_data, ssh_keys=keys, tags=["ci"], ) - droplet.create() - return droplet + self.droplet.create() + return self.droplet - @staticmethod - def wait_for_instance(hostname, timeout=20, retry_interval=15): + def test_deployment(self, timeout=20, retry_interval=15): + if not self.hostname: + return # timeout in minutes # return true if success or false if failure print("Attempting to reach the instance") print(f"We will time out after {timeout} minutes") - url = f"https://{hostname}/_health" + url = f"https://{self.hostname}/_health" start_time = datetime.datetime.now() while datetime.datetime.now() < start_time + datetime.timedelta(minutes=timeout): try: @@ -115,9 +144,29 @@ def wait_for_instance(hostname, timeout=20, retry_interval=15): print("Failure - we timed out before receiving a heartbeat") return False + def create_dns_entry(self, type, name, data, ttl=30): + self.domain = digitalocean.Domain(token=self.token, name=DOMAIN) + self.record = self.domain.create_new_domain_record(type=type, name=name, data=data, ttl=ttl) + return self.record + + def create_dns_entry_for_instance(self): + if not self.droplet: + return + self.record = self.create_dns_entry(type="A", name=self.name, data=self.get_public_ip()) + return self.record + + def destroy_self(self, retries=3): + if not self.droplet or not self.domain or not self.record: + return + droplet_id = self.droplet.id + self.destroy_environment(droplet_id, self.domain, self.record["domain_record"]["id"], retries=retries) + @staticmethod - def destroy_environment(droplet, domain, record, retries=3): + def destroy_environment(droplet_id, record_id, retries=3): print("Destroying the droplet") + token = os.getenv("DIGITALOCEAN_TOKEN") + droplet = digitalocean.Droplet(token=token, id=droplet_id) + domain = digitalocean.Domain(token=token, name=DOMAIN) attempts = 0 while attempts <= retries: attempts += 1 @@ -131,36 +180,83 @@ def destroy_environment(droplet, domain, record, retries=3): while attempts <= retries: attempts += 1 try: - domain.delete_domain_record(id=record["domain_record"]["id"]) + domain.delete_domain_record(id=record_id) break except Exception as e: print(f"Could not destroy the dns entry because\n{e}") def handle_sigint(self): - self.destroy_environment(self.droplet, self.domain, self.record) + self.destroy_self() + + def export_droplet(self): + if not self.droplet: + print("Droplet not found. Exiting") + exit(1) + if not self.record: + print("DNS record not found. Exiting") + exit(1) + record_id = self.record["domain_record"]["id"] + record_name = self.record["domain_record"]["name"] + droplet_id = self.droplet.id + + print(f"Exporting the droplet ID: {self.droplet.id} and DNS record ID: {record_id} for name {self.name}") + env_file_name = os.getenv("GITHUB_ENV") + with open(env_file_name, "a") as env_file: + env_file.write(f"HOBBY_DROPLET_ID={droplet_id}\n") + with open(env_file_name, "a") as env_file: + env_file.write(f"HOBBY_DNS_RECORD_ID={record_id}\n") + env_file.write(f"HOBBY_DNS_RECORD_NAME={record_name}\n") + env_file.write(f"HOBBY_NAME={self.name}\n") + + def ensure_droplet(self, ssh_enabled=True): + self.create_droplet(ssh_enabled=ssh_enabled) + self.block_until_droplet_is_started() + self.create_dns_entry_for_instance() + self.export_droplet() def main(): - print("Creating droplet on Digitalocean for testing Hobby Deployment") - droplet = HobbyTester.create_droplet(ssh_enabled=True) - HobbyTester.block_until_droplet_is_started(droplet) - public_ip = HobbyTester.get_public_ip(droplet) - domain = digitalocean.Domain(token=token, name="posthog.cc") - record = domain.create_new_domain_record(type="A", name=name, data=public_ip) - - hobby_tester = HobbyTester(domain, droplet, record) - signal.signal(signal.SIGINT, hobby_tester.handle_sigint) # type: ignore - signal.signal(signal.SIGHUP, hobby_tester.handle_sigint) # type: ignore - print("Instance has started. You will be able to access it here after PostHog boots (~15 minutes):") - print(f"https://{hostname}") - health_success = HobbyTester.wait_for_instance(hostname) - HobbyTester.destroy_environment(droplet, domain, record) - if health_success: - print("We succeeded") - exit() - else: - print("We failed") - exit(1) + command = sys.argv[1] + if command == "create": + print("Creating droplet on Digitalocean for testing Hobby Deployment") + ht = HobbyTester() + ht.ensure_droplet(ssh_enabled=True) + print("Instance has started. You will be able to access it here after PostHog boots (~15 minutes):") + print(f"https://{ht.hostname}") + + if command == "destroy": + print("Destroying droplet on Digitalocean for testing Hobby Deployment") + droplet_id = os.environ.get("HOBBY_DROPLET_ID") + domain_record_id = os.environ.get("HOBBY_DNS_RECORD_ID") + print(f"Droplet ID: {droplet_id}") + print(f"Record ID: {domain_record_id}") + HobbyTester.destroy_environment(droplet_id=droplet_id, record_id=domain_record_id) + + if command == "test": + if len(sys.argv) < 3: + print("Please provide the branch name to test") + exit(1) + branch = sys.argv[2] + name = os.environ.get("HOBBY_NAME") + record_id = os.environ.get("HOBBY_DNS_RECORD_ID") + droplet_id = os.environ.get("HOBBY_DROPLET_ID") + print(f"Testing the deployment for {name} on branch {branch}") + print(f"Record ID: {record_id}") + print(f"Droplet ID: {droplet_id}") + + ht = HobbyTester( + branch=branch, + name=name, + record_id=record_id, + droplet_id=droplet_id, + ) + health_success = ht.test_deployment() + if health_success: + print("We succeeded") + exit() + else: + print("We failed") + exit(1) if __name__ == "__main__": diff --git a/cypress/e2e/before-onboarding.cy.ts b/cypress/e2e/before-onboarding.cy.ts new file mode 100644 index 0000000000000..2110950014488 --- /dev/null +++ b/cypress/e2e/before-onboarding.cy.ts @@ -0,0 +1,35 @@ +describe('Before Onboarding', () => { + before(() => { + cy.request({ + method: 'PATCH', + url: '/api/projects/1/', + body: { completed_snippet_onboarding: false }, + headers: { Authorization: 'Bearer e2e_demo_api_key' }, + }) + }) + + after(() => { + cy.request({ + method: 'PATCH', + url: '/api/projects/1/', + body: { completed_snippet_onboarding: true }, + headers: { Authorization: 'Bearer e2e_demo_api_key' }, + }) + }) + + it('Navigate to /products when a product has not been set up', () => { + cy.visit('/project/1/data-management/events') + + cy.get('[data-attr=top-bar-name] > span').contains('Products') + }) + + it('Navigate to a settings page even when a product has not been set up', () => { + cy.visit('/settings/user') + + cy.get('[data-attr=top-bar-name] > span').contains('User') + + cy.visit('/settings/organization') + + cy.get('[data-attr=top-bar-name] > span').contains('Organization') + }) +}) diff --git a/cypress/e2e/featureFlags.cy.ts b/cypress/e2e/featureFlags.cy.ts index a60f391f93325..e4f7e35edb718 100644 --- a/cypress/e2e/featureFlags.cy.ts +++ b/cypress/e2e/featureFlags.cy.ts @@ -4,13 +4,7 @@ describe('Feature Flags', () => { let name beforeEach(() => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'new-feature-flag-operators': true, - }) - ) - ) + cy.intercept('**/decide/*', (req) => req.reply(decideResponse({}))) cy.intercept('/api/projects/*/property_definitions?type=person*', { fixture: 'api/feature-flags/property_definition', @@ -116,7 +110,7 @@ describe('Feature Flags', () => { cy.get('.Toastify').contains('Undo').should('be.visible') }) - it.only('Move between property types smoothly, and support relative dates', () => { + it('Move between property types smoothly, and support relative dates', () => { // ensure unique names to avoid clashes cy.get('[data-attr=top-bar-name]').should('contain', 'Feature flags') cy.get('[data-attr=new-feature-flag]').click() @@ -125,8 +119,9 @@ describe('Feature Flags', () => { // select "add filter" and "property" cy.get('[data-attr=property-select-toggle-0').click() - // select the third property + // select the first property cy.get('[data-attr=taxonomic-filter-searchfield]').click() + cy.get('[data-attr=taxonomic-filter-searchfield]').type('is_demo') cy.get('[data-attr=taxonomic-tab-person_properties]').click() // select numeric $browser_version cy.get('[data-attr=prop-filter-person_properties-2]').click({ force: true }) diff --git a/cypress/e2e/onboarding.cy.ts b/cypress/e2e/onboarding.cy.ts index 56cb23bc45b70..19b7749265c4f 100644 --- a/cypress/e2e/onboarding.cy.ts +++ b/cypress/e2e/onboarding.cy.ts @@ -1,4 +1,3 @@ -import { urls } from 'scenes/urls' import { decideResponse } from '../fixtures/api/decide' describe('Onboarding', () => { diff --git a/cypress/e2e/signup.cy.ts b/cypress/e2e/signup.cy.ts index 4ef4036000378..8c660a69a93ff 100644 --- a/cypress/e2e/signup.cy.ts +++ b/cypress/e2e/signup.cy.ts @@ -12,7 +12,7 @@ describe('Signup', () => { cy.get('[data-attr=signup-email]').type('test@posthog.com').should('have.value', 'test@posthog.com') cy.get('[data-attr=password]').type('12345678').should('have.value', '12345678') cy.get('[data-attr=signup-start]').click() - cy.get('[data-attr=signup-first-name]').type('Jane').should('have.value', 'Jane') + cy.get('[data-attr=signup-name]').type('Jane Doe').should('have.value', 'Jane Doe') cy.get('[data-attr=signup-organization-name]').type('Hogflix Movies').should('have.value', 'Hogflix Movies') cy.get('[data-attr=signup-role-at-organization]').click() cy.get('.Popover li:first-child').click() @@ -39,18 +39,53 @@ describe('Signup', () => { cy.get('.text-danger').should('not.contain', 'Password must be at least 8 characters') // Validation error removed on keystroke }) - it('Can create user account', () => { + it('Can create user account with first name, last name and organization name', () => { + cy.intercept('POST', '/api/signup/').as('signupRequest') + const email = `new_user+${Math.floor(Math.random() * 10000)}@posthog.com` cy.get('[data-attr=signup-email]').type(email).should('have.value', email) cy.get('[data-attr=password]').type('12345678').should('have.value', '12345678') cy.get('[data-attr=signup-start]').click() - cy.get('[data-attr=signup-first-name]').type('Alice').should('have.value', 'Alice') + cy.get('[data-attr=signup-name]').type('Alice Bob').should('have.value', 'Alice Bob') cy.get('[data-attr=signup-organization-name]').type('Hogflix SpinOff').should('have.value', 'Hogflix SpinOff') cy.get('[data-attr=signup-role-at-organization]').click() cy.get('.Popover li:first-child').click() cy.get('[data-attr=signup-role-at-organization]').contains('Engineering') cy.get('[data-attr=signup-submit]').click() + cy.wait('@signupRequest').then((interception) => { + expect(interception.request.body).to.have.property('first_name') + expect(interception.request.body.first_name).to.equal('Alice') + expect(interception.request.body).to.have.property('last_name') + expect(interception.request.body.last_name).to.equal('Bob') + expect(interception.request.body).to.have.property('organization_name') + expect(interception.request.body.organization_name).to.equal('Hogflix SpinOff') + }) + + // lazy regex for a guid + cy.location('pathname').should('match', /\/verify_email\/[a-zA-Z0-9_.-]*/) + }) + + it('Can create user account with just a first name', () => { + cy.intercept('POST', '/api/signup/').as('signupRequest') + + const email = `new_user+${Math.floor(Math.random() * 10000)}@posthog.com` + cy.get('[data-attr=signup-email]').type(email).should('have.value', email) + cy.get('[data-attr=password]').type('12345678').should('have.value', '12345678') + cy.get('[data-attr=signup-start]').click() + cy.get('[data-attr=signup-name]').type('Alice').should('have.value', 'Alice') + cy.get('[data-attr=signup-role-at-organization]').click() + cy.get('.Popover li:first-child').click() + cy.get('[data-attr=signup-role-at-organization]').contains('Engineering') + cy.get('[data-attr=signup-submit]').click() + + cy.wait('@signupRequest').then((interception) => { + expect(interception.request.body).to.have.property('first_name') + expect(interception.request.body.first_name).to.equal('Alice') + expect(interception.request.body).to.not.have.property('last_name') + expect(interception.request.body).to.not.have.property('organization_name') + }) + // lazy regex for a guid cy.location('pathname').should('match', /\/verify_email\/[a-zA-Z0-9_.-]*/) }) @@ -74,7 +109,7 @@ describe('Signup', () => { cy.get('.Toastify [data-attr="error-toast"]').contains('Inactive social login session.') }) - it.only('Shows redirect notice if redirecting for maintenance', () => { + it('Shows redirect notice if redirecting for maintenance', () => { cy.intercept('**/decide/*', (req) => req.reply( decideResponse({ diff --git a/cypress/e2e/trends.cy.ts b/cypress/e2e/trends.cy.ts index a1aa9d31a5594..36809958d7c25 100644 --- a/cypress/e2e/trends.cy.ts +++ b/cypress/e2e/trends.cy.ts @@ -24,7 +24,7 @@ describe('Trends', () => { cy.get('[data-attr=trend-element-subject-1]').click() cy.get('[data-attr=taxonomic-tab-actions]').click() cy.get('[data-attr=taxonomic-filter-searchfield]').click().type('home') - cy.contains('Hogflix homepage view').click({ force: true }) + cy.contains('Hogflix homepage view').click() // then cy.get('[data-attr=trend-line-graph]').should('exist') @@ -66,15 +66,15 @@ describe('Trends', () => { it('Apply specific filter on default pageview event', () => { cy.get('[data-attr=trend-element-subject-0]').click() cy.get('[data-attr=taxonomic-filter-searchfield]').click().type('Pageview') - cy.get('.taxonomic-infinite-list').find('.taxonomic-list-row').contains('Pageview').click({ force: true }) + cy.get('.taxonomic-infinite-list').find('.taxonomic-list-row').contains('Pageview').click() cy.get('[data-attr=trend-element-subject-0]').should('have.text', 'Pageview') // Apply a property filter cy.get('[data-attr=show-prop-filter-0]').click() cy.get('[data-attr=property-select-toggle-0]').click() - cy.get('[data-attr=prop-filter-event_properties-1]').click({ force: true }) + cy.get('[data-attr=prop-filter-event_properties-1]').click() - cy.get('[data-attr=prop-val]').click({ force: true }) + cy.get('[data-attr=prop-val]').click() // cypress is odd and even though when a human clicks this the right dropdown opens // in the test that doesn't happen cy.get('body').then(($body) => { @@ -88,14 +88,14 @@ describe('Trends', () => { it('Apply 1 overall filter', () => { cy.get('[data-attr=trend-element-subject-0]').click() cy.get('[data-attr=taxonomic-filter-searchfield]').click().type('Pageview') - cy.get('.taxonomic-infinite-list').find('.taxonomic-list-row').contains('Pageview').click({ force: true }) + cy.get('.taxonomic-infinite-list').find('.taxonomic-list-row').contains('Pageview').click() cy.get('[data-attr=trend-element-subject-0]').should('have.text', 'Pageview') cy.get('[data-attr$=add-filter-group]').click() cy.get('[data-attr=property-select-toggle-0]').click() cy.get('[data-attr=taxonomic-filter-searchfield]').click() - cy.get('[data-attr=prop-filter-event_properties-1]').click({ force: true }) - cy.get('[data-attr=prop-val]').click({ force: true }) + cy.get('[data-attr=prop-filter-event_properties-1]').click() + cy.get('[data-attr=prop-val]').click() // cypress is odd and even though when a human clicks this the right dropdown opens // in the test that doesn't happen cy.get('body').then(($body) => { @@ -103,7 +103,7 @@ describe('Trends', () => { cy.get('[data-attr=taxonomic-value-select]').click() } }) - cy.get('[data-attr=prop-val-0]').click({ force: true }) + cy.get('[data-attr=prop-val-0]').click() cy.get('[data-attr=trend-line-graph]', { timeout: 8000 }).should('exist') }) @@ -117,14 +117,14 @@ describe('Trends', () => { it('Apply pie filter', () => { cy.get('[data-attr=chart-filter]').click() - cy.get('.Popover').find('.LemonButton').contains('Pie').click({ force: true }) + cy.get('.Popover').find('.LemonButton').contains('Pie').click() cy.get('[data-attr=trend-pie-graph]').should('exist') }) it('Apply table filter', () => { cy.get('[data-attr=chart-filter]').click() - cy.get('.Popover').find('.LemonButton').contains('Table').click({ force: true }) + cy.get('.Popover').find('.LemonButton').contains('Table').click() cy.get('[data-attr=insights-table-graph]').should('exist') @@ -144,7 +144,7 @@ describe('Trends', () => { it('Apply property breakdown', () => { cy.get('[data-attr=add-breakdown-button]').click() - cy.get('[data-attr=prop-filter-event_properties-1]').click({ force: true }) + cy.get('[data-attr=prop-filter-event_properties-1]').click() cy.get('[data-attr=trend-line-graph]').should('exist') }) @@ -154,4 +154,16 @@ describe('Trends', () => { cy.contains('All Users*').click() cy.get('[data-attr=trend-line-graph]').should('exist') }) + + it('Show warning on MAU math in total value insight', () => { + cy.get('[data-attr=chart-filter]').click() + cy.get('.Popover').find('.LemonButton').contains('Pie').click() + cy.get('[data-attr=trend-pie-graph]').should('exist') // Make sure the pie chart is loaded before proceeding + + cy.get('[data-attr=math-selector-0]').click() + cy.get('[data-attr=math-monthly_active-0] .LemonIcon').should('exist') // This should be the warning icon + + cy.get('[data-attr=math-monthly_active-0]').trigger('mouseenter') // Activate warning tooltip + cy.get('.Tooltip').contains('we recommend using "Unique users" here instead').should('exist') + }) }) diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index 6785e5bd69f0b..bfb16863482ec 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -32,7 +32,6 @@ beforeEach(() => { // set feature flags here e.g. // 'toolbar-launch-side-action': true, 'surveys-new-creation-flow': true, - 'surveys-results-visualizations': true, 'auto-redirect': true, hogql: true, 'data-exploration-insights': true, @@ -62,10 +61,15 @@ beforeEach(() => { email: 'test@posthog.com', password: '12345678', }) - cy.visit('/insights') - cy.wait('@getInsights').then(() => { - cy.get('.saved-insights tr').should('exist') - }) + + if (Cypress.spec.name.includes('before-onboarding')) { + cy.visit('/?no-preloaded-app-context=true') + } else { + cy.visit('/insights') + cy.wait('@getInsights').then(() => { + cy.get('.saved-insights tr').should('exist') + }) + } } }) diff --git a/ee/api/debug_ch_queries.py b/ee/api/debug_ch_queries.py index f4e7ec8760c26..6c4b1746b425b 100644 --- a/ee/api/debug_ch_queries.py +++ b/ee/api/debug_ch_queries.py @@ -15,7 +15,7 @@ class DebugCHQueries(viewsets.ViewSet): """ - Show recent queries for this user + List recent CH queries initiated by this user. """ def _get_path(self, query: str) -> Optional[str]: @@ -30,16 +30,21 @@ def list(self, request): response = sync_execute( """ - select - query, query_start_time, exception, toInt8(type), query_duration_ms - from clusterAllReplicas(%(cluster)s, system, query_log) - where - query LIKE %(query)s and - query_start_time > %(start_time)s and - type != 1 and - query not like %(not_query)s - order by query_start_time desc - limit 100""", + SELECT + query_id, argMax(query, type), argMax(query_start_time, type), argMax(exception, type), + argMax(query_duration_ms, type), max(type) AS status + FROM ( + SELECT + query_id, query, query_start_time, exception, query_duration_ms, toInt8(type) AS type + FROM clusterAllReplicas(%(cluster)s, system, query_log) + WHERE + query LIKE %(query)s AND + query NOT LIKE %(not_query)s AND + query_start_time > %(start_time)s + ORDER BY query_start_time desc + LIMIT 100 + ) + GROUP BY query_id""", { "query": f"/* user_id:{request.user.pk} %", "start_time": (now() - relativedelta(minutes=10)).timestamp(), @@ -50,12 +55,13 @@ def list(self, request): return Response( [ { - "query": resp[0], - "timestamp": resp[1], - "exception": resp[2], - "type": resp[3], + "query_id": resp[0], + "query": resp[1], + "timestamp": resp[2], + "exception": resp[3], "execution_time": resp[4], - "path": self._get_path(resp[0]), + "status": resp[5], + "path": self._get_path(resp[1]), } for resp in response ] diff --git a/ee/api/test/test_dashboard.py b/ee/api/test/test_dashboard.py index 39098247d411f..8c39a17135db0 100644 --- a/ee/api/test/test_dashboard.py +++ b/ee/api/test/test_dashboard.py @@ -4,10 +4,8 @@ from rest_framework import status from ee.api.test.base import APILicensedTest -from ee.api.test.fixtures.available_product_features import AVAILABLE_PRODUCT_FEATURES from ee.models.explicit_team_membership import ExplicitTeamMembership from ee.models.license import License -from posthog.constants import AvailableFeature from posthog.models import OrganizationMembership from posthog.models.dashboard import Dashboard from posthog.models.sharing_configuration import SharingConfiguration @@ -269,7 +267,12 @@ def test_sharing_edits_limited_to_collaborators(self): self.permission_denied_response("You don't have edit permissions for this dashboard."), ) - def test_cannot_edit_dashboard_description_when_collaboration_not_available(self): + def test_can_edit_dashboard_description_when_collaboration_not_available(self): + """ + Team collaboration feature is only available on some plans, but if the feature is + not available, the user should still be able to read/write for migration purposes. + The access to the feature is blocked in the UI, so this is unlikely to be truly abused. + """ self.client.logout() self.organization.available_features = [] @@ -288,44 +291,11 @@ def test_cannot_edit_dashboard_description_when_collaboration_not_available(self name="example dashboard", ) - response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", - { - "description": "i should not be allowed to edit this", - "name": "even though I am allowed to edit this", - }, - ) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - dashboard.refresh_from_db() - self.assertEqual(dashboard.description, "") - self.assertEqual(dashboard.name, "example dashboard") - - def test_can_edit_dashboard_description_when_collaboration_is_available(self): - self.client.logout() - - self.organization.available_features = [AvailableFeature.TEAM_COLLABORATION] - self.organization.available_product_features = AVAILABLE_PRODUCT_FEATURES - self.organization.save() - self.team.access_control = True - self.team.save() - - user_with_collaboration = User.objects.create_and_join( - self.organization, "no-collaboration-feature@posthog.com", None - ) - self.client.force_login(user_with_collaboration) - - dashboard: Dashboard = Dashboard.objects.create( - team=self.team, - name="example dashboard", - ) - response = self.client.patch( f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", { "description": "i should be allowed to edit this", - "name": "and so also to edit this", + "name": "as well as this", }, ) @@ -333,4 +303,4 @@ def test_can_edit_dashboard_description_when_collaboration_is_available(self): dashboard.refresh_from_db() self.assertEqual(dashboard.description, "i should be allowed to edit this") - self.assertEqual(dashboard.name, "and so also to edit this") + self.assertEqual(dashboard.name, "as well as this") diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr index 54b67fa7d359b..3c6b8ef78c385 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results ''' - /* user_id:108 celery:posthog.tasks.tasks.sync_insight_caching_state */ + /* user_id:107 celery:posthog.tasks.tasks.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events diff --git a/ee/frontend/mobile-replay/__mocks__/increment-with-child-duplication.json b/ee/frontend/mobile-replay/__mocks__/increment-with-child-duplication.json index c17efc6d9e246..7ffc2e5f38e5c 100644 --- a/ee/frontend/mobile-replay/__mocks__/increment-with-child-duplication.json +++ b/ee/frontend/mobile-replay/__mocks__/increment-with-child-duplication.json @@ -191,6 +191,13 @@ "x": 66, "y": 556 } + }, + { + "parentId": 209272202, + "wireframe": { + "id": 52129787123, + "type": "text" + } } ], "removes": [ diff --git a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap index a421f7ff220bf..9397106ef6b80 100644 --- a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap +++ b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap @@ -160,6 +160,147 @@ exports[`replay/transform transform can convert images 1`] = ` ] `; +exports[`replay/transform transform can convert invalid text wireframe 1`] = ` +[ + { + "data": { + "height": 600, + "href": "", + "width": 300, + }, + "timestamp": 1, + "type": 4, + }, + { + "data": { + "initialOffset": { + "left": 0, + "top": 0, + }, + "node": { + "childNodes": [ + { + "id": 2, + "name": "html", + "publicId": "", + "systemId": "", + "type": 1, + }, + { + "attributes": { + "data-rrweb-id": 3, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 4, + }, + "childNodes": [ + { + "attributes": { + "type": "text/css", + }, + "childNodes": [ + { + "id": 102, + "textContent": " + body { + margin: unset; + } + input, button, select, textarea { + font: inherit; + margin: 0; + padding: 0; + border: 0; + outline: 0; + background: transparent; + padding-block: 0 !important; + } + .input:focus { + outline: none; + } + img { + border-style: none; + } + ", + "type": 3, + }, + ], + "id": 101, + "tagName": "style", + "type": 2, + }, + ], + "id": 4, + "tagName": "head", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 5, + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": { + "data-rrweb-id": 12345, + "style": "border-width: 4px;border-radius: 10px;border-color: #ee3ee4;border-style: solid;color: #ee3ee4;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:normal;", + }, + "childNodes": [], + "id": 12345, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-render-reason": "a fixed placeholder to contain the keyboard in the correct stacking position", + "data-rrweb-id": 9, + }, + "childNodes": [], + "id": 9, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 7, + }, + "childNodes": [], + "id": 7, + "tagName": "div", + "type": 2, + }, + { + "attributes": { + "data-rrweb-id": 11, + }, + "childNodes": [], + "id": 11, + "tagName": "div", + "type": 2, + }, + ], + "id": 5, + "tagName": "body", + "type": 2, + }, + ], + "id": 3, + "tagName": "html", + "type": 2, + }, + ], + "id": 1, + "type": 0, + }, + }, + "timestamp": 1, + "type": 2, + }, +] +`; + exports[`replay/transform transform can convert navigation bar 1`] = ` [ { @@ -895,7 +1036,7 @@ exports[`replay/transform transform can process unknown types without error 1`] { "attributes": { "data-rrweb-id": 12345, - "style": "background-color: #f3f4ef;color: #35373e;width: 100px;height: 30px;position: fixed;left: 25px;top: 42px;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100px;height: 30px;position: fixed;left: 25px;top: 42px;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { @@ -1453,6 +1594,20 @@ exports[`replay/transform transform incremental mutations de-duplicate the tree }, "parentId": 52129787, }, + { + "nextId": null, + "node": { + "attributes": { + "data-rrweb-id": 52129787123, + "style": "position: fixed;left: 0px;top: 0px;overflow:hidden;white-space:normal;", + }, + "childNodes": [], + "id": 52129787123, + "tagName": "div", + "type": 2, + }, + "parentId": 209272202, + }, ], "attributes": [], "removes": [ @@ -1689,6 +1844,13 @@ AAAAAAAAAAAAAAAAAAAAAAAAgCN/AW0xMqHnNQceAAAAAElFTkSuQmCC "y": 556, }, }, + { + "parentId": 209272202, + "wireframe": { + "id": 52129787123, + "type": "text", + }, + }, ], "removes": [ { @@ -6873,7 +7035,7 @@ exports[`replay/transform transform inputs open keyboard custom event 1`] = ` "node": { "attributes": { "data-rrweb-id": 10, - "style": "background-color: #f3f4ef;color: #35373e;width: 100vw;height: 150px;bottom: 0;position: fixed;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100vw;height: 150px;bottom: 0;position: fixed;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { @@ -6983,7 +7145,7 @@ exports[`replay/transform transform inputs placeholder - $inputType - $value 1`] { "attributes": { "data-rrweb-id": 12365, - "style": "background-color: #f3f4ef;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { @@ -8133,7 +8295,7 @@ exports[`replay/transform transform inputs web_view - $inputType - $value 1`] = { "attributes": { "data-rrweb-id": 12365, - "style": "background-color: #f3f4ef;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { @@ -8269,7 +8431,7 @@ exports[`replay/transform transform inputs web_view with URL 1`] = ` { "attributes": { "data-rrweb-id": 12365, - "style": "background-color: #f3f4ef;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { @@ -8553,7 +8715,7 @@ exports[`replay/transform transform omitting x and y is equivalent to setting th { "attributes": { "data-rrweb-id": 12345, - "style": "background-color: #f3f4ef;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", + "style": "background-color: #f3f4ef;background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");background-size: auto;background-repeat: unset;color: #35373e;width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;align-items: center;justify-content: center;display: flex;", }, "childNodes": [ { diff --git a/ee/frontend/mobile-replay/transform.test.ts b/ee/frontend/mobile-replay/transform.test.ts index 788bb65655d3d..92384e48b2986 100644 --- a/ee/frontend/mobile-replay/transform.test.ts +++ b/ee/frontend/mobile-replay/transform.test.ts @@ -480,6 +480,43 @@ describe('replay/transform', () => { expect(converted).toMatchSnapshot() }) + test('can convert invalid text wireframe', () => { + const converted = posthogEEModule.mobileReplay?.transformToWeb([ + { + data: { + width: 300, + height: 600, + }, + timestamp: 1, + type: 4, + }, + { + type: 2, + data: { + wireframes: [ + { + id: 12345, + type: 'text', + x: 11, + y: 12, + width: 100, + height: 30, + style: { + color: '#ee3ee4', + borderColor: '#ee3ee4', + borderWidth: '4', + borderRadius: '10px', + }, + // text property is missing + }, + ], + }, + timestamp: 1, + }, + ]) + expect(converted).toMatchSnapshot() + }) + test('can set background image to base64 png', () => { const converted = posthogEEModule.mobileReplay?.transformToWeb([ { diff --git a/ee/frontend/mobile-replay/transformer/transformers.ts b/ee/frontend/mobile-replay/transformer/transformers.ts index 1527a24d7dbeb..2e2d0cbeb5169 100644 --- a/ee/frontend/mobile-replay/transformer/transformers.ts +++ b/ee/frontend/mobile-replay/transformer/transformers.ts @@ -11,6 +11,7 @@ import { } from '@rrweb/types' import { captureMessage } from '@sentry/react' import { isObject } from 'lib/utils' +import { PLACEHOLDER_SVG_DATA_IMAGE_URL } from 'scenes/session-recordings/player/rrweb' import { attributes, @@ -105,6 +106,10 @@ export function _isPositiveInteger(id: unknown): id is number { return typeof id === 'number' && id > 0 && id % 1 === 0 } +function _isNullish(x: unknown): x is null | undefined { + return x === null || x === undefined +} + function isRemovedNodeMutation(x: addedNodeMutation | removedNodeMutation): x is removedNodeMutation { return isObject(x) && 'id' in x } @@ -218,6 +223,17 @@ function makeTextElement( // because we might have to style the text, we always wrap it in a div // and apply styles to that const id = context.idSequence.next().value + + const childNodes = [...children] + if (!_isNullish(wireframe.text)) { + childNodes.unshift({ + type: NodeType.Text, + textContent: wireframe.text, + // since the text node is wrapped, we assign it a synthetic id + id, + }) + } + return { result: { type: NodeType.Element, @@ -227,15 +243,7 @@ function makeTextElement( 'data-rrweb-id': wireframe.id, }, id: wireframe.id, - childNodes: [ - { - type: NodeType.Text, - textContent: wireframe.text, - // since the text node is wrapped, we assign it a synthetic id - id: id, - }, - ...children, - ], + childNodes, }, context, } @@ -270,6 +278,9 @@ export function makePlaceholderElement( horizontalAlign: 'center', backgroundColor: wireframe.style?.backgroundColor || BACKGROUND, color: wireframe.style?.color || FOREGROUND, + backgroundImage: PLACEHOLDER_SVG_DATA_IMAGE_URL, + backgroundSize: 'auto', + backgroundRepeat: 'unset', ...context.styleOverride, }), 'data-rrweb-id': wireframe.id, @@ -983,6 +994,7 @@ function isMobileIncrementalSnapshotEvent(x: unknown): x is MobileIncrementalSna function makeIncrementalAdd(add: MobileNodeMutation, context: ConversionContext): addedNodeMutation[] | null { const converted = convertWireframe(add.wireframe, context) + if (!converted) { return null } diff --git a/ee/frontend/mobile-replay/transformer/types.ts b/ee/frontend/mobile-replay/transformer/types.ts index 103b5ac2e865f..3ba93d6fc2ebd 100644 --- a/ee/frontend/mobile-replay/transformer/types.ts +++ b/ee/frontend/mobile-replay/transformer/types.ts @@ -14,4 +14,4 @@ export interface ConversionContext { // StyleOverride is defined here and not in the schema // because these are overrides that the transformer is allowed to make // not that clients are allowed to request -export type StyleOverride = MobileStyles & { bottom?: true } +export type StyleOverride = MobileStyles & { bottom?: true; backgroundRepeat?: 'no-repeat' | 'unset' } diff --git a/ee/frontend/mobile-replay/transformer/wireframeStyle.ts b/ee/frontend/mobile-replay/transformer/wireframeStyle.ts index ccd06bfcc662c..1719060589dd1 100644 --- a/ee/frontend/mobile-replay/transformer/wireframeStyle.ts +++ b/ee/frontend/mobile-replay/transformer/wireframeStyle.ts @@ -229,10 +229,13 @@ export function makeBackgroundStyles(wireframe: wireframe, styleOverride?: Style } if (combinedStyles.backgroundImage) { + const backgroundImageURL = combinedStyles.backgroundImage.startsWith('url(') + ? combinedStyles.backgroundImage + : `url('${dataURIOrPNG(combinedStyles.backgroundImage)}')` styleParts = styleParts.concat([ - `background-image: url('${dataURIOrPNG(combinedStyles.backgroundImage)}')`, + `background-image: ${backgroundImageURL}`, `background-size: ${combinedStyles.backgroundSize || 'contain'}`, - 'background-repeat: no-repeat', + `background-repeat: ${combinedStyles.backgroundRepeat || 'no-repeat'}`, ]) } diff --git a/ee/session_recordings/ai/error_clustering.py b/ee/session_recordings/ai/error_clustering.py index 03ea4f62d2789..7a3c12c44dec0 100644 --- a/ee/session_recordings/ai/error_clustering.py +++ b/ee/session_recordings/ai/error_clustering.py @@ -6,6 +6,7 @@ import pandas as pd import numpy as np from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed +from datetime import date CLUSTER_REPLAY_ERRORS_TIMING = Histogram( "posthog_session_recordings_cluster_replay_errors", @@ -30,7 +31,7 @@ def error_clustering(team: Team, user: User): if not results: return [] - df = pd.DataFrame(results, columns=["session_id", "input", "embeddings"]) + df = pd.DataFrame(results, columns=["session_id", "error", "embeddings", "timestamp"]) df["cluster"] = cluster_embeddings(df["embeddings"].tolist()) @@ -42,7 +43,7 @@ def error_clustering(team: Team, user: User): def fetch_error_embeddings(team_id: int): query = """ SELECT - session_id, input, embeddings + session_id, input, embeddings, generation_timestamp FROM session_replay_embeddings WHERE @@ -76,13 +77,21 @@ def construct_response(df: pd.DataFrame, team: Team, user: User): clusters = [] for cluster, rows in df.groupby("cluster"): session_ids = rows["session_id"].unique() - sample = rows.sample(n=1)[["session_id", "input"]].rename(columns={"input": "error"}).to_dict("records") + sample = rows.sample(n=1)[["session_id", "error"]].to_dict("records")[0] + + date_series = ( + df.groupby([df["timestamp"].dt.date]) + .size() + .reindex(pd.date_range(end=date.today(), periods=7), fill_value=0) + ) + sparkline = dict(zip(date_series.index.astype(str), date_series)) clusters.append( { "cluster": cluster, - "sample": sample, - "session_ids": session_ids, + "sample": sample.get("error"), + "session_ids": np.random.choice(session_ids, size=DBSCAN_MIN_SAMPLES - 1), "occurrences": rows.size, + "sparkline": sparkline, "unique_sessions": len(session_ids), "viewed": len(np.intersect1d(session_ids, viewed_session_ids, assume_unique=True)), } diff --git a/frontend/__snapshots__/exporter-exporter--funnel-top-to-bottom-breakdown-insight--dark.png b/frontend/__snapshots__/exporter-exporter--funnel-top-to-bottom-breakdown-insight--dark.png index 223b0b5a5e0fa..edfba77bef6d2 100644 Binary files a/frontend/__snapshots__/exporter-exporter--funnel-top-to-bottom-breakdown-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--funnel-top-to-bottom-breakdown-insight--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-batchexports--exports--dark.png b/frontend/__snapshots__/scenes-app-batchexports--exports--dark.png index c26bfcde41da7..8b08d084fcd3b 100644 Binary files a/frontend/__snapshots__/scenes-app-batchexports--exports--dark.png and b/frontend/__snapshots__/scenes-app-batchexports--exports--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-batchexports--exports--light.png b/frontend/__snapshots__/scenes-app-batchexports--exports--light.png index c9f83584af268..2cb1ba1077f09 100644 Binary files a/frontend/__snapshots__/scenes-app-batchexports--exports--light.png and b/frontend/__snapshots__/scenes-app-batchexports--exports--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png index a3170412155a9..c94024858f014 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png index e8a50e37eebb7..27fe79337869e 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png index bc6d2e800b233..d6b7eb401f2d2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png index 575234ed7ef07..344f72c3deef4 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png index 455a435c50b31..324355d0243e7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png index daa7e7d3e7dc0..7b67b19f852de 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark--webkit.png index 076bbf95dd5f2..342604e30b9da 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark.png index ac37058ebc358..6fc5f450fcf0c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light--webkit.png index 9f03b27692951..6d92b76f76832 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light.png index 6b2954224253c..b9399d595b1e6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark--webkit.png index 4f5951d4022dc..1c8d0e37c4c3b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark.png index a213a4a4dbe41..beb778892dc98 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light--webkit.png index e5b86ed13abf0..55ca04bf91a1e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light.png index ef84a29eea45f..8ebee9031d744 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark--webkit.png index c895df395644b..8f746a8691900 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark.png index f286b0fbe282d..073fd1cfd9b4d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light--webkit.png index 5f06ddcb4e2a9..d533e0d8f9f33 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light.png index 4bbcbba2ecc43..291f8b46aeaa7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark--webkit.png index 5b8cec66360e4..a3ee2db3746af 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png index 0ed6f0abedd6e..94a5ee02ec4d8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light--webkit.png index f207448633c95..b34882ace11cc 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light.png index 7cf5ae0c0fa08..4edbfeac60e82 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark--webkit.png index 2cc3b8ce22b5b..de7854b8a268c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark.png index e56c597585fe1..450f4df2e5086 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light--webkit.png index 4038bd965b877..4f3aaff555e73 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light.png index 854baf356aebf..2caada3e5f0aa 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-time-to-convert-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark--webkit.png index 1bd2d3cb65439..59c4fad04bb91 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark.png index f7b3ed2806926..07b63f63955a0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light--webkit.png index 841d652c1b668..221765aaf0f35 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light.png index 15864cbd8c7c6..2eef62fcbf80c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark--webkit.png index 734e97aa02c7f..cfacc6abdac7d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png index ac6911fadb1af..32d5c490f4f9d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light--webkit.png index eeb591feab5e9..d14b3f0256a8d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light.png index b5849b6f6b456..2ed59b8746a73 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark--webkit.png index 5f62b18cf66e2..5a8dd279c4098 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png index f3b61f2d8518e..2e7597327dd95 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light--webkit.png index f08d761957ebd..f07390be2bde5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index de2118e4c8386..43b1e9d375df0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark--webkit.png index 2bbeb51f8515f..e4f2f16fb20cb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index 43a8dd18ccad5..5ef12b9284334 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light--webkit.png index 3cae496371812..63239ee7e9c1b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png index ba97a5b9298bc..bc480012967fe 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark--webkit.png index 21553a50b6531..c287ce9981c4b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark.png index d057c280e852a..ab96b0d690b72 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light--webkit.png index ba877420fbab8..095df26be5723 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light.png b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light.png index e340ca1e2fec8..9fac34d754133 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--lifecycle-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark--webkit.png index 879dbd09a75ce..c950df2fccc84 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark.png index ac0f6986bff7d..85bd4ba1299cd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light--webkit.png index 0212d628ef242..89166c108bf90 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light.png index 6af782a80368f..c8ae397ec27b7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--retention-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--dark--webkit.png index 33375878685ff..c68cbd4afaa7b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--dark.png index c141849f38c7b..150fd74f16754 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--light--webkit.png index c560bb36af1df..ad6cef32c6170 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--retention-edit--light.png b/frontend/__snapshots__/scenes-app-insights--retention-edit--light.png index 2af238075ff92..8085a27d8023e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--retention-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--retention-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark--webkit.png index 7f4281a6737e4..b3c2aac2663cb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark.png index 1b7682973150f..500aabe4c16dd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png index 6b5fa648a1d4b..2b971daa6d8aa 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png index 5fedaa853ecac..f9e2e405fe7ab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png index 9d1ff6c58a2e4..f15a52e0adeb7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png index 42935a08f7b53..c63bec3bce157 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png index 4d3ced623edbe..efc991b09979b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png index 942b7487b4190..bd7561034d2b6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png index 24e3a2afed4b3..7eda5ccb5ec8f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark.png index 2cb7a8fb914d0..031416c77e653 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png index fa9f84a4ba714..cb3392ec04d8c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png index dabf1687084f3..96ee57257d5db 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png index c6f88a5b288f2..f6c83a9e6c855 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png index 78323ed226515..1d7e811296376 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png index 63b6264d0ddcb..bd0e45b8b0482 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png index 497451f111003..103c6119e6fb2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png index 72afaae55a6de..1fb56f38f495c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png index 7696eacfcae73..e4e23a2d8c764 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png index 057845cb3d383..03ef62763c14f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png index 72e4f2a949c45..7316fb7aa2f1e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png index fbf8ab4b5a97f..1523b84cc86e1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark.png index 8fb3b78df8373..256d49c9cec1a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png index e3e4c32f81aa6..780c2c6bb3cbc 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png index f97959a7caec2..ef943248640ba 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark--webkit.png index 1ecfd71f121ad..a24f6138139ca 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark.png index 602535c876235..ba7351d0470e8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light--webkit.png index 829a8dbcb2414..0663ea14afe27 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png index ec763aa79982a..74ee2a99f91ef 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png index 58c16c493ecd2..c488cb571ccc9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png index 183894fab5689..93dbea178b4cf 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png index 5335ee79ea2c5..b8a21d2a9f0ab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png index 5490be89c2d59..964c47e56fe9c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark--webkit.png index 339c1436bea19..976663635232c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark.png index ba4b8e99f82fe..67b0af12b8809 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png index 3990924ee4605..42b5f935da567 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png index 87039d7c7e288..c7176cd1facc8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark--webkit.png index 51b2f45767cb7..bd1f122c8871b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark.png index fb80bc1e47f74..b1a9e60f7d5bd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png index 52668967146b3..2282f2e9ba1ed 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png index bd333c9141210..c1b8261c6a5ee 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark--webkit.png index 58c196a6b6a65..2cc6079eb8e50 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark.png index 6bc096cdc5906..348137ecb0667 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light--webkit.png index 51f2149df17f4..6455ad387b355 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light.png index a54c0c7d17c72..5633ed07bf6ec 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-world-map-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark--webkit.png index 85ce2e0d9b33c..2f8a18cd1672a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark.png index 771cc514bfd3a..104a4d9515fe1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light--webkit.png index a1b1f2e19445e..3bfa65b4ca644 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png index 8945a55c6ae91..db3a5409f56f8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png index 6933b1ab628b3..b24a61098927e 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png index f5102f58516f8..4fab34860029c 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--cloud--dark.png b/frontend/__snapshots__/scenes-other-invitesignup--cloud--dark.png index 10c71817bc470..4946aedfeb38a 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--cloud--dark.png and b/frontend/__snapshots__/scenes-other-invitesignup--cloud--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--cloud--light.png b/frontend/__snapshots__/scenes-other-invitesignup--cloud--light.png index 0952b5a96532a..b7eef2e8b826c 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--cloud--light.png and b/frontend/__snapshots__/scenes-other-invitesignup--cloud--light.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--dark.png b/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--dark.png index dd4c5ed9fd9a8..6767462f173e0 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--dark.png and b/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--light.png b/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--light.png index 0f3cdaf9bdd5e..18f965fbc6bf6 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--light.png and b/frontend/__snapshots__/scenes-other-invitesignup--cloud-eu--light.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--dark.png b/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--dark.png index 4a6a270e80ade..605ac3e495e9c 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--dark.png and b/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--light.png b/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--light.png index 62d41c8a92f04..5108cb2db25da 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--light.png and b/frontend/__snapshots__/scenes-other-invitesignup--invalid-link--light.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--logged-in--dark.png b/frontend/__snapshots__/scenes-other-invitesignup--logged-in--dark.png index 1dc24c67f58cf..fc126241a2102 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--logged-in--dark.png and b/frontend/__snapshots__/scenes-other-invitesignup--logged-in--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--logged-in--light.png b/frontend/__snapshots__/scenes-other-invitesignup--logged-in--light.png index 74bd780e5f791..8783f29981d49 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--logged-in--light.png and b/frontend/__snapshots__/scenes-other-invitesignup--logged-in--light.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--dark.png b/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--dark.png index cea25c91e4463..1aa1af55b6ba0 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--dark.png and b/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--light.png b/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--light.png index f2f87bcabf0e4..e11aec2c721fe 100644 Binary files a/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--light.png and b/frontend/__snapshots__/scenes-other-invitesignup--self-hosted--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud--dark.png b/frontend/__snapshots__/scenes-other-login--cloud--dark.png index 79be2f6da084b..7a14791959cef 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud--dark.png and b/frontend/__snapshots__/scenes-other-login--cloud--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud--light.png b/frontend/__snapshots__/scenes-other-login--cloud--light.png index 089113c318132..187cb08b520a1 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud--light.png and b/frontend/__snapshots__/scenes-other-login--cloud--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-eu--dark.png b/frontend/__snapshots__/scenes-other-login--cloud-eu--dark.png index 3759b88e7ce6c..1708f185a9db4 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-eu--dark.png and b/frontend/__snapshots__/scenes-other-login--cloud-eu--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-eu--light.png b/frontend/__snapshots__/scenes-other-login--cloud-eu--light.png index 34ff210eb5352..2f675dc967660 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-eu--light.png and b/frontend/__snapshots__/scenes-other-login--cloud-eu--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--dark.png b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--dark.png index 5e3dc60309789..d0e0904bb1796 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--dark.png and b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--light.png b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--light.png index e67a6a0e2ff33..0f51547c0ad9b 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--light.png and b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--second-factor--dark.png b/frontend/__snapshots__/scenes-other-login--second-factor--dark.png index 3be47d83ee98e..442c6d925ac8f 100644 Binary files a/frontend/__snapshots__/scenes-other-login--second-factor--dark.png and b/frontend/__snapshots__/scenes-other-login--second-factor--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--second-factor--light.png b/frontend/__snapshots__/scenes-other-login--second-factor--light.png index 661e5be1063bb..7f2ee5e3636e9 100644 Binary files a/frontend/__snapshots__/scenes-other-login--second-factor--light.png and b/frontend/__snapshots__/scenes-other-login--second-factor--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--self-hosted--dark.png b/frontend/__snapshots__/scenes-other-login--self-hosted--dark.png index 4d54c69d31e1b..00219a94898ad 100644 Binary files a/frontend/__snapshots__/scenes-other-login--self-hosted--dark.png and b/frontend/__snapshots__/scenes-other-login--self-hosted--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--self-hosted--light.png b/frontend/__snapshots__/scenes-other-login--self-hosted--light.png index f859af95307bb..12d1a15ba4fdb 100644 Binary files a/frontend/__snapshots__/scenes-other-login--self-hosted--light.png and b/frontend/__snapshots__/scenes-other-login--self-hosted--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--dark.png b/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--dark.png index f83fd6633b093..752d4e4ee3c7d 100644 Binary files a/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--dark.png and b/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--light.png b/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--light.png index bc653908edc34..02c129a707f6d 100644 Binary files a/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--light.png and b/frontend/__snapshots__/scenes-other-login--self-hosted-with-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-login--sso-error--dark.png b/frontend/__snapshots__/scenes-other-login--sso-error--dark.png index 2d4ed894b8f09..d3e245bf489c0 100644 Binary files a/frontend/__snapshots__/scenes-other-login--sso-error--dark.png and b/frontend/__snapshots__/scenes-other-login--sso-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-login--sso-error--light.png b/frontend/__snapshots__/scenes-other-login--sso-error--light.png index 9702eef8a92dd..a6286b20801f7 100644 Binary files a/frontend/__snapshots__/scenes-other-login--sso-error--light.png and b/frontend/__snapshots__/scenes-other-login--sso-error--light.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset-complete--default--dark.png b/frontend/__snapshots__/scenes-other-password-reset-complete--default--dark.png index b3969f7948c77..81780bef94ddb 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset-complete--default--dark.png and b/frontend/__snapshots__/scenes-other-password-reset-complete--default--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset-complete--default--light.png b/frontend/__snapshots__/scenes-other-password-reset-complete--default--light.png index cf50642150875..c721ccfce7107 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset-complete--default--light.png and b/frontend/__snapshots__/scenes-other-password-reset-complete--default--light.png differ diff --git a/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link--light.png b/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link--light.png index d94a85300a4bd..4e8728bcfada1 100644 Binary files a/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link--light.png and b/frontend/__snapshots__/scenes-other-password-reset-complete--invalid-link--light.png differ diff --git a/frontend/__snapshots__/scenes-other-preflight--preflight--dark.png b/frontend/__snapshots__/scenes-other-preflight--preflight--dark.png index 0f038280670e4..da76826c30718 100644 Binary files a/frontend/__snapshots__/scenes-other-preflight--preflight--dark.png and b/frontend/__snapshots__/scenes-other-preflight--preflight--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-preflight--preflight--light.png b/frontend/__snapshots__/scenes-other-preflight--preflight--light.png index 1fb61449ce120..e312576737901 100644 Binary files a/frontend/__snapshots__/scenes-other-preflight--preflight--light.png and b/frontend/__snapshots__/scenes-other-preflight--preflight--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png index ab33e8563f5f7..8e7bc11ba0074 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png index 08a1cffcd03ae..89b7d283640fb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index fe51119bcbbf4..651d180143e39 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 25bda7486cff0..807819111d701 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--cloud--dark.png b/frontend/__snapshots__/scenes-other-signup--cloud--dark.png index 22483eefa81c9..3812da12f2360 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--cloud--dark.png and b/frontend/__snapshots__/scenes-other-signup--cloud--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--cloud--light.png b/frontend/__snapshots__/scenes-other-signup--cloud--light.png index c527248055ffa..a16b63058f6fd 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--cloud--light.png and b/frontend/__snapshots__/scenes-other-signup--cloud--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png b/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png index d5e0428cc0cef..4070b44e89d74 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png b/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png index a46e014a8fb4e..603977e87add8 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted--light.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png index 7744a05479fe6..cdb034b88aba3 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png index 3ce828b31ab1a..5ecd2c576c64d 100644 Binary files a/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png and b/frontend/__snapshots__/scenes-other-signup--self-hosted-sso--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png index cc32a507690ea..0724cce928ed6 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png index 9d5b574363ee9..f19ddfa738b17 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png index d9a8ef34ae661..53dd61dfec0b7 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png index 9cf7721957e98..1480c76289b9d 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png index f5283e47bdf43..dc149beabe203 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png index 15b96030b8a25..ace3a2b7f9373 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png and b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png differ diff --git a/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene--light.png b/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene--light.png index 88a84bc3e21f3..2f60dada396b9 100644 Binary files a/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene--light.png and b/frontend/__snapshots__/scenes-other-unsubscribe--unsubscribe-scene--light.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--dark.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--dark.png index a996167a4d6f2..dd975c4acc401 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--dark.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--light.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--light.png index fee2278660097..c45bfc650efc8 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--light.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-invalid--light.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--dark.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--dark.png index fa760226c666b..0d23b6e09b819 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--dark.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--light.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--light.png index fe0bff82ad081..a867c9f022e0a 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--light.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-pending--light.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--dark.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--dark.png index 2df3dabc00d20..2bf2567a3e6db 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--dark.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--light.png b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--light.png index 3cfa63c456086..f76014d8ffa51 100644 Binary files a/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--light.png and b/frontend/__snapshots__/scenes-other-verify-email--verify-email-success--light.png differ diff --git a/frontend/public/zendesk-logo.png b/frontend/public/zendesk-logo.png new file mode 100644 index 0000000000000..080bd000a41e0 Binary files /dev/null and b/frontend/public/zendesk-logo.png differ diff --git a/frontend/src/layout/FeaturePreviews/featurePreviewsLogic.test.ts b/frontend/src/layout/FeaturePreviews/featurePreviewsLogic.test.ts index f0ff2f6404de7..3bd8f0590f20d 100644 --- a/frontend/src/layout/FeaturePreviews/featurePreviewsLogic.test.ts +++ b/frontend/src/layout/FeaturePreviews/featurePreviewsLogic.test.ts @@ -2,6 +2,7 @@ import { expectLogic } from 'kea-test-utils' import { MOCK_DEFAULT_USER } from 'lib/api.mock' import { userLogic } from 'scenes/userLogic' +import { useMocks } from '~/mocks/jest' import { initKeaTests } from '~/test/init' import { featurePreviewsLogic } from './featurePreviewsLogic' @@ -10,6 +11,11 @@ describe('featurePreviewsLogic', () => { let logic: ReturnType beforeEach(() => { + useMocks({ + post: { + 'https://posthoghelp.zendesk.com/api/v2/requests.json': [200, {}], + }, + }) initKeaTests() logic = featurePreviewsLogic() logic.mount() diff --git a/frontend/src/layout/GlobalModals.tsx b/frontend/src/layout/GlobalModals.tsx index 7ef5f0d546afb..1ef66f6c78899 100644 --- a/frontend/src/layout/GlobalModals.tsx +++ b/frontend/src/layout/GlobalModals.tsx @@ -1,9 +1,7 @@ import { LemonModal } from '@posthog/lemon-ui' import { actions, kea, path, reducers, useActions, useValues } from 'kea' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { HedgehogBuddyWithLogic } from 'lib/components/HedgehogBuddy/HedgehogBuddyWithLogic' import { UpgradeModal } from 'lib/components/UpgradeModal/UpgradeModal' -import { Prompt } from 'lib/logic/newPrompt/Prompt' import { Setup2FA } from 'scenes/authentication/Setup2FA' import { CreateOrganizationModal } from 'scenes/organization/CreateOrganizationModal' import { membersLogic } from 'scenes/organization/membersLogic' @@ -72,9 +70,6 @@ export function GlobalModals(): JSX.Element { /> )} - - - ) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activation/SidePanelActivation.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activation/SidePanelActivation.tsx index c78aa76f0faec..5aef55b51a523 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activation/SidePanelActivation.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activation/SidePanelActivation.tsx @@ -97,6 +97,9 @@ const ActivationTask = ({ if (url) { params.to = url params.targetBlank = true + params.onClick = () => { + reportActivationSideBarTaskClicked(id) + } } else { params.onClick = () => { runTask(id) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activation/activationLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/activation/activationLogic.ts index 7a182c47ba2b1..b0be99d68f438 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activation/activationLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activation/activationLogic.ts @@ -2,8 +2,9 @@ import { actions, connect, events, kea, listeners, path, reducers, selectors } f import { loaders } from 'kea-loaders' import { router } from 'kea-router' import api from 'lib/api' -import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { reverseProxyCheckerLogic } from 'lib/components/ReverseProxyChecker/reverseProxyCheckerLogic' import { permanentlyMount } from 'lib/utils/kea-logic-builders' +import posthog from 'posthog-js' import { membersLogic } from 'scenes/organization/membersLogic' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' @@ -25,6 +26,7 @@ export enum ActivationTasks { SetupSessionRecordings = 'setup_session_recordings', TrackCustomEvents = 'track_custom_events', InstallFirstApp = 'install_first_app', + SetUpReverseProxy = 'set_up_reverse_proxy', } export type ActivationTaskType = { @@ -57,6 +59,8 @@ export const activationLogic = kea([ ['insights'], dashboardsModel, ['rawDashboards'], + reverseProxyCheckerLogic, + ['hasReverseProxy'], ], actions: [ inviteLogic, @@ -65,8 +69,6 @@ export const activationLogic = kea([ ['loadPluginsSuccess', 'loadPluginsFailure'], sidePanelStateLogic, ['openSidePanel'], - eventUsageLogic, - ['reportActivationSideBarShown'], savedInsightsLogic, ['loadInsights', 'loadInsightsSuccess', 'loadInsightsFailure'], dashboardsModel, @@ -194,6 +196,7 @@ export const activationLogic = kea([ s.customEventsCount, s.installedPlugins, s.currentTeamSkippedTasks, + s.hasReverseProxy, ], ( currentTeam, @@ -203,7 +206,8 @@ export const activationLogic = kea([ dashboards, customEventsCount, installedPlugins, - skippedTasks + skippedTasks, + hasReverseProxy ) => { const tasks: ActivationTaskType[] = [] for (const task of Object.values(ActivationTasks)) { @@ -282,6 +286,17 @@ export const activationLogic = kea([ skipped: skippedTasks.includes(ActivationTasks.InstallFirstApp), }) break + case ActivationTasks.SetUpReverseProxy: + tasks.push({ + id: ActivationTasks.SetUpReverseProxy, + name: 'Set up a reverse proxy', + description: 'Send your events from your own domain to avoid tracking blockers', + completed: hasReverseProxy || false, + canSkip: true, + skipped: skippedTasks.includes(ActivationTasks.SetUpReverseProxy), + url: 'https://posthog.com/docs/advanced/proxy', + }) + break default: break } @@ -342,6 +357,9 @@ export const activationLogic = kea([ } }, skipTask: ({ id }) => { + posthog.capture('activation sidebar task skipped', { + task: id, + }) if (values.currentTeam?.id) { actions.addSkippedTask(values.currentTeam.id, id) } diff --git a/frontend/src/layout/navigation-3000/sidepanel/sidePanelStateLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/sidePanelStateLogic.tsx index 6d1360cdf90ac..e4188d68842c7 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/sidePanelStateLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/sidePanelStateLogic.tsx @@ -1,6 +1,7 @@ import { actions, kea, listeners, path, reducers } from 'kea' import { actionToUrl, router, urlToAction } from 'kea-router' import { windowValues } from 'kea-window-values' +import posthog from 'posthog-js' import { SidePanelTab } from '~/types' @@ -55,10 +56,12 @@ export const sidePanelStateLogic = kea([ listeners(({ actions, values }) => ({ // NOTE: We explicitly reference the actions instead of connecting so that people don't accidentally // use this logic instead of sidePanelStateLogic - openSidePanel: () => { + openSidePanel: ({ tab }) => { + posthog.capture('sidebar opened', { tab }) actions.setSidePanelOpen(true) }, closeSidePanel: ({ tab }) => { + posthog.capture('sidebar closed', { tab }) if (!tab) { // If we aren't specifiying the tab we always close actions.setSidePanelOpen(false) diff --git a/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts b/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts index 02d59125c852f..b5ec3468d12f1 100644 --- a/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts +++ b/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts @@ -143,6 +143,7 @@ function useInsightMocks(interval: string = 'day', timezone: string = 'UTC'): vo [`/api/projects/:team_id/insights/${MOCK_INSIGHT_NUMERIC_ID}`]: () => { return [200, insight] }, + '/api/users/@me/': [200, {}], }, }) } @@ -162,6 +163,7 @@ function useAnnotationsMocks(): void { MOCK_ANNOTATION_PROJECT_SCOPED_FROM_INSIGHT_3, ], }, + '/api/users/@me/': [200, {}], }, }) } @@ -171,6 +173,7 @@ describe('annotationsOverlayLogic', () => { beforeEach(() => { useAnnotationsMocks() + initKeaTests() }) afterEach(() => { @@ -178,8 +181,6 @@ describe('annotationsOverlayLogic', () => { }) it('loads annotations on mount', async () => { - initKeaTests() - useInsightMocks() logic = annotationsOverlayLogic({ @@ -193,8 +194,6 @@ describe('annotationsOverlayLogic', () => { }) describe('relevantAnnotations', () => { - initKeaTests() - it('returns annotations scoped to the insight for a saved insight', async () => { useInsightMocks() @@ -224,8 +223,6 @@ describe('annotationsOverlayLogic', () => { }) it('returns annotations scoped to the project for a new insight', async () => { - initKeaTests() - useInsightMocks() logic = annotationsOverlayLogic({ @@ -250,8 +247,6 @@ describe('annotationsOverlayLogic', () => { }) it('excludes annotations that are outside of insight date range', async () => { - initKeaTests() - useInsightMocks() logic = annotationsOverlayLogic({ @@ -506,8 +501,6 @@ describe('annotationsOverlayLogic', () => { } it(`merges groups when one tick covers more than one date (UTC)`, async () => { - initKeaTests(true, MOCK_DEFAULT_TEAM) - useInsightMocks() logic = annotationsOverlayLogic({ @@ -572,8 +565,6 @@ describe('annotationsOverlayLogic', () => { }) it(`merges groups when one tick covers more than one hour (UTC)`, async () => { - initKeaTests(true, MOCK_DEFAULT_TEAM) - useInsightMocks('hour') logic = annotationsOverlayLogic({ diff --git a/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts b/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts index b21f9012925bb..772646e28882a 100644 --- a/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts +++ b/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts @@ -27,6 +27,9 @@ describe('the authorized urls list logic', () => { return [200, { result: ['result from api'] }] }, }, + patch: { + '/api/projects/:team': [200, {}], + }, }) initKeaTests() logic = authorizedUrlListLogic({ diff --git a/frontend/src/lib/components/BridgePage/BridgePage.scss b/frontend/src/lib/components/BridgePage/BridgePage.scss index a95676cd869fd..cbaa3daa9631c 100644 --- a/frontend/src/lib/components/BridgePage/BridgePage.scss +++ b/frontend/src/lib/components/BridgePage/BridgePage.scss @@ -23,7 +23,13 @@ } .BridgePage__content-wrapper { - max-width: 100%; + width: 100%; + max-width: 380px; + + @include screen($md) { + width: auto; + max-width: 100%; + } } .BridgePage__left-wrapper { diff --git a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx index 295298eb0fc28..9c73b502718dd 100644 --- a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx +++ b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx @@ -25,7 +25,11 @@ export interface Query { timestamp: string query: string exception: string - type: number + /** + * 1 means running, 2 means finished, 3 means errored before execution, 4 means errored during execution. + * + * @see `type` column in https://clickhouse.com/docs/en/operations/system-tables/query_log */ + status: 1 | 2 | 3 | 4 execution_time: number path: string } @@ -146,9 +150,13 @@ function DebugCHQueries(): JSX.Element { ) }, }, + { title: 'Duration', render: function exec(_, item) { + if (item.status === 1) { + return 'In progress…' + } return <>{Math.round((item.execution_time + Number.EPSILON) * 100) / 100} ms }, align: 'right', diff --git a/frontend/src/lib/components/CustomerLogo.tsx b/frontend/src/lib/components/CustomerLogo.tsx new file mode 100644 index 0000000000000..659f739d1d7dc --- /dev/null +++ b/frontend/src/lib/components/CustomerLogo.tsx @@ -0,0 +1,23 @@ +interface CustomerProps { + image: string + alt: string + className?: string +} + +interface LogoProps { + src: string + alt: string + className?: string +} + +const Logo = ({ src, alt, className = '' }: LogoProps): JSX.Element => ( + {alt} +) + +export const CustomerLogo = ({ image, alt, className = '' }: CustomerProps): JSX.Element => { + return ( +
  • + +
  • + ) +} diff --git a/frontend/src/lib/components/DatabaseTableTree/DatabaseTableTree.tsx b/frontend/src/lib/components/DatabaseTableTree/DatabaseTableTree.tsx index 2d7bfb893150e..8f42f841a8457 100644 --- a/frontend/src/lib/components/DatabaseTableTree/DatabaseTableTree.tsx +++ b/frontend/src/lib/components/DatabaseTableTree/DatabaseTableTree.tsx @@ -15,8 +15,9 @@ export type TreeItem = TreeItemFolder | TreeItemLeaf export interface TreeItemFolder { name: string - items: TreeItemLeaf[] + items: TreeItem[] emptyLabel?: JSX.Element + isLoading?: boolean } export interface TreeItemLeaf { @@ -33,7 +34,10 @@ export function DatabaseTableTree({ ...props }: TreeProps): JSX.Element { return ( -
      +
        {items.map((item, index) => { if ('items' in item) { return ( diff --git a/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx b/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx index b49167a4f6c67..d8f46d04adac5 100644 --- a/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx +++ b/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx @@ -1,6 +1,7 @@ import './TreeRow.scss' import { IconChevronDown } from '@posthog/icons' +import { Spinner } from '@posthog/lemon-ui' import clsx from 'clsx' import { IconChevronRight } from 'lib/lemon-ui/icons' import { useCallback, useState } from 'react' @@ -58,7 +59,7 @@ export function TreeFolderRow({ item, depth, onClick, selectedRow }: TreeFolderR depth={depth + 1} onSelectRow={onClick} selectedRow={selectedRow} - style={{ marginLeft: `${2 * depth}rem`, padding: 0 }} + style={{ marginLeft: `2rem`, padding: 0 }} /> ) : (
        - {emptyLabel ? emptyLabel : No tables found} + {item.isLoading ? ( + + ) : emptyLabel ? ( + emptyLabel + ) : ( + No tables found + )}
        ))} diff --git a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx index 1c9e4928ecb22..8be1a72543f44 100644 --- a/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx +++ b/frontend/src/lib/components/DefinitionPopover/DefinitionPopoverContents.tsx @@ -111,8 +111,16 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element } = useValues(definitionPopoverLogic) const { setLocalDefinition } = useActions(definitionPopoverLogic) + const { selectedItemMeta } = useValues(taxonomicFilterLogic) const { selectItem } = useActions(taxonomicFilterLogic) + // Use effect here to make definition view stateful. TaxonomicFilterLogic won't mount within definitionPopoverLogic + useEffect(() => { + if (selectedItemMeta && definition.name == selectedItemMeta.id) { + setLocalDefinition(selectedItemMeta) + } + }, [definition]) + if (!definition) { return <> } @@ -280,6 +288,7 @@ function DefinitionView({ group }: { group: TaxonomicFilterGroup }): JSX.Element value: column.key, })) const itemValue = localDefinition ? group?.getValue?.(localDefinition) : null + return (
        diff --git a/frontend/src/lib/components/EditableField/EditableField.tsx b/frontend/src/lib/components/EditableField/EditableField.tsx index 28cb2e4a247b7..0a83602cf5c5f 100644 --- a/frontend/src/lib/components/EditableField/EditableField.tsx +++ b/frontend/src/lib/components/EditableField/EditableField.tsx @@ -3,6 +3,7 @@ import './EditableField.scss' import { useMergeRefs } from '@floating-ui/react' import { IconPencil } from '@posthog/icons' import clsx from 'clsx' +import { useValues } from 'kea' import { useResizeObserver } from 'lib/hooks/useResizeObserver' import { IconMarkdown } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' @@ -12,6 +13,10 @@ import { pluralize } from 'lib/utils' import React, { useEffect, useLayoutEffect, useRef, useState } from 'react' import TextareaAutosize from 'react-textarea-autosize' +import { AvailableFeature } from '~/types' + +import { upgradeModalLogic } from '../UpgradeModal/upgradeModalLogic' + export interface EditableFieldProps { /** What this field stands for. */ name: string @@ -28,7 +33,7 @@ export interface EditableFieldProps { markdown?: boolean compactButtons?: boolean | 'xsmall' // The 'xsmall' is somewhat hacky, but necessary for 3000 breadcrumbs /** Whether this field should be gated behind a "paywall". */ - paywall?: boolean + paywallFeature?: AvailableFeature /** Controlled mode. */ mode?: 'view' | 'edit' onModeToggle?: (newMode: 'view' | 'edit') => void @@ -58,7 +63,7 @@ export function EditableField({ multiline = false, markdown = false, compactButtons = false, - paywall = false, + paywallFeature, mode, onModeToggle, editingIndication = 'outlined', @@ -68,6 +73,7 @@ export function EditableField({ saveButtonText = 'Save', notice, }: EditableFieldProps): JSX.Element { + const { guardAvailableFeature } = useValues(upgradeModalLogic) const [localIsEditing, setLocalIsEditing] = useState(mode === 'edit') const [localTentativeValue, setLocalTentativeValue] = useState(value) const [isDisplayTooltipNeeded, setIsDisplayTooltipNeeded] = useState(false) @@ -125,7 +131,7 @@ export function EditableField({ onModeToggle?.('view') } - const isEditing = !paywall && (mode === 'edit' || localIsEditing) + const isEditing = mode === 'edit' || localIsEditing const handleKeyDown = (e: React.KeyboardEvent): void => { if (isEditing) { @@ -156,123 +162,117 @@ export function EditableField({ style={style} ref={containerRef} > - -
        - {isEditing ? ( - <> - {multiline ? ( - { - onChange?.(e.target.value) - setLocalTentativeValue(e.target.value) - }} - onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} - onKeyDown={handleKeyDown} - placeholder={placeholder} - minLength={minLength} - maxLength={maxLength} - autoFocus={autoFocus} - ref={inputRef as React.RefObject} - /> - ) : ( - { +
        + {isEditing ? ( + <> + {multiline ? ( + { + onChange?.(e.target.value) + setLocalTentativeValue(e.target.value) + }} + onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} + onKeyDown={handleKeyDown} + placeholder={placeholder} + minLength={minLength} + maxLength={maxLength} + autoFocus={autoFocus} + ref={inputRef as React.RefObject} + /> + ) : ( + { + guardAvailableFeature(paywallFeature, () => { onChange?.(e.target.value) setLocalTentativeValue(e.target.value) - }} - onBlur={saveOnBlur ? (localTentativeValue !== value ? save : cancel) : undefined} - onKeyDown={handleKeyDown} - placeholder={placeholder} - minLength={minLength} - maxLength={maxLength} - autoFocus={autoFocus} - ref={inputRef as React.RefObject} - /> - )} - {(!mode || !!onModeToggle) && ( -
        - {markdown && ( - - - - - - )} - - Cancel - - - {saveButtonText} - -
        - )} - - ) : ( - <> - {localTentativeValue && markdown ? ( - {localTentativeValue} - ) : ( - } + /> + )} + {(!mode || !!onModeToggle) && ( +
        + {markdown && ( + + + + + + )} + + Cancel + + - - {localTentativeValue || {placeholder}} - - - )} - {(!mode || !!onModeToggle) && ( -
        - } - size={compactButtons ? 'small' : undefined} - onClick={() => { + {saveButtonText} + +
        + )} + + ) : ( + <> + {localTentativeValue && markdown ? ( + {localTentativeValue} + ) : ( + + + {localTentativeValue || {placeholder}} + + + )} + {(!mode || !!onModeToggle) && ( +
        + } + size={compactButtons ? 'small' : undefined} + onClick={() => { + guardAvailableFeature(paywallFeature, () => { setLocalIsEditing(true) onModeToggle?.('edit') - }} - data-attr={`edit-prop-${name}`} - disabled={paywall} - noPadding - /> -
        - )} - - )} -
        -
        + }) + }} + data-attr={`edit-prop-${name}`} + noPadding + /> +
        + )} + + )} +
        {!isEditing && notice && ( diff --git a/frontend/src/lib/components/MemberSelect.tsx b/frontend/src/lib/components/MemberSelect.tsx index 09e13a2626eae..c1a919dbf6b68 100644 --- a/frontend/src/lib/components/MemberSelect.tsx +++ b/frontend/src/lib/components/MemberSelect.tsx @@ -44,7 +44,7 @@ export function MemberSelect({ defaultLabel = 'Any user', value, onChange }: Mem setShowPopover(visible)} overlay={ diff --git a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx index 644b01f74b063..d416c1e0502c3 100644 --- a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx +++ b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx @@ -9,7 +9,6 @@ import { propertyFilterLogic } from 'lib/components/PropertyFilters/propertyFilt import { PropertyFilterInternalProps } from 'lib/components/PropertyFilters/types' import { isGroupPropertyFilter, - isPersonPropertyFilter, isPropertyFilterWithOperator, PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE, propertyFilterTypeToTaxonomicFilterType, @@ -64,7 +63,7 @@ export function TaxonomicPropertyFilter({ value, item ) => { - selectItem(taxonomicGroup, value, item) + selectItem(taxonomicGroup, value, item?.propertyFilterType) if ( taxonomicGroup.type === TaxonomicFilterGroupType.Cohorts || taxonomicGroup.type === TaxonomicFilterGroupType.HogQLExpression @@ -215,7 +214,6 @@ export function TaxonomicPropertyFilter({ value: newValue || null, operator: newOperator, type: filter?.type, - ...(isPersonPropertyFilter(filter) ? { table: filter?.table } : {}), ...(isGroupPropertyFilter(filter) ? { group_type_index: filter.group_type_index } : {}), diff --git a/frontend/src/lib/components/PropertyFilters/components/taxonomicPropertyFilterLogic.ts b/frontend/src/lib/components/PropertyFilters/components/taxonomicPropertyFilterLogic.ts index 48760e5ab6747..aa1a1ca685cc7 100644 --- a/frontend/src/lib/components/PropertyFilters/components/taxonomicPropertyFilterLogic.ts +++ b/frontend/src/lib/components/PropertyFilters/components/taxonomicPropertyFilterLogic.ts @@ -51,10 +51,14 @@ export const taxonomicPropertyFilterLogic = kea ({ + selectItem: ( + taxonomicGroup: TaxonomicFilterGroup, + propertyKey?: TaxonomicFilterValue, + itemPropertyFilterType?: PropertyFilterType + ) => ({ taxonomicGroup, propertyKey, - item, + itemPropertyFilterType, }), openDropdown: true, closeDropdown: true, @@ -89,8 +93,7 @@ export const taxonomicPropertyFilterLogic = kea ({ - selectItem: ({ taxonomicGroup, propertyKey, item }) => { - const itemPropertyFilterType = item?.propertyFilterType as PropertyFilterType + selectItem: ({ taxonomicGroup, propertyKey, itemPropertyFilterType }) => { const propertyType = itemPropertyFilterType ?? taxonomicFilterTypeToPropertyFilterType(taxonomicGroup.type) if (propertyKey && propertyType) { if (propertyType === PropertyFilterType.Cohort) { @@ -126,8 +129,8 @@ export const taxonomicPropertyFilterLogic = kea { + useMocks({ + post: { + '/api/projects/:team/query': () => [ + 200, + { + results, + }, + ], + }, + }) +} + +describe('reverseProxyCheckerLogic', () => { + let logic: ReturnType + + beforeEach(() => { + initKeaTests() + localStorage.clear() + logic = reverseProxyCheckerLogic() + }) + + afterEach(() => { + logic.unmount() + }) + + it('should not have a reverse proxy set - when no data', async () => { + useMockedValues([]) + + logic.mount() + await expectLogic(logic).toFinishAllListeners().toMatchValues({ + hasReverseProxy: false, + }) + }) + + it('should not have a reverse proxy set - when data with no lib_custom_api_host values', async () => { + useMockedValues(doesNotHaveReverseProxyValues) + + logic.mount() + await expectLogic(logic).toFinishAllListeners().toMatchValues({ + hasReverseProxy: false, + }) + }) + + it('should have a reverse proxy set', async () => { + useMockedValues(hasReverseProxyValues) + + logic.mount() + await expectLogic(logic).toFinishAllListeners().toMatchValues({ + hasReverseProxy: true, + }) + }) +}) diff --git a/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.ts b/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.ts new file mode 100644 index 0000000000000..6b945e5c94c48 --- /dev/null +++ b/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.ts @@ -0,0 +1,49 @@ +import { afterMount, kea, path, reducers } from 'kea' +import { loaders } from 'kea-loaders' +import api from 'lib/api' + +import { HogQLQuery, NodeKind } from '~/queries/schema' +import { hogql } from '~/queries/utils' + +import type { reverseProxyCheckerLogicType } from './reverseProxyCheckerLogicType' + +const CHECK_INTERVAL_MS = 1000 * 60 * 60 // 1 hour + +export const reverseProxyCheckerLogic = kea([ + path(['components', 'ReverseProxyChecker', 'reverseProxyCheckerLogic']), + loaders({ + hasReverseProxy: [ + false as boolean | null, + { + loadHasReverseProxy: async () => { + const query: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: hogql`SELECT properties.$lib_custom_api_host AS lib_custom_api_host + FROM events + WHERE timestamp >= now() - INTERVAL 1 DAY + AND timestamp <= now() + ORDER BY timestamp DESC + limit 10`, + } + + const res = await api.query(query) + return !!res.results?.find((x) => !!x[0]) + }, + }, + ], + }), + reducers({ + lastCheckedTimestamp: [ + 0, + { persist: true }, + { + loadHasReverseProxySuccess: () => Date.now(), + }, + ], + }), + afterMount(({ actions, values }) => { + if (values.lastCheckedTimestamp < Date.now() - CHECK_INTERVAL_MS) { + actions.loadHasReverseProxy() + } + }), +]) diff --git a/frontend/src/lib/components/SeriesGlyph.tsx b/frontend/src/lib/components/SeriesGlyph.tsx index ad4c25429f0da..d34a88de6de34 100644 --- a/frontend/src/lib/components/SeriesGlyph.tsx +++ b/frontend/src/lib/components/SeriesGlyph.tsx @@ -58,7 +58,7 @@ interface ExperimentVariantNumberProps { index: number } export function ExperimentVariantNumber({ className, index }: ExperimentVariantNumberProps): JSX.Element { - const color = getSeriesColor(index) + const color = getSeriesColor(index + 1) const { isDarkModeOn } = useValues(themeLogic) return ( diff --git a/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts b/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts index 820e8eb7d9786..1adc197e2c03c 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts @@ -32,6 +32,7 @@ describe('subscriptionLogic', () => { useMocks({ get: { '/api/projects/:team/subscriptions/1': fixtureSubscriptionResponse(1), + '/api/projects/:team/integrations': { count: 0, results: [] }, }, }) initKeaTests() diff --git a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx index eca954d86f94f..8e0237d36f252 100644 --- a/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/InfiniteList.tsx @@ -173,7 +173,6 @@ export function InfiniteList({ popupAnchorElement }: InfiniteListProps): JSX.Ele const { mouseInteractionsEnabled, activeTab, searchQuery, value, groupType, eventNames } = useValues(taxonomicFilterLogic) const { selectItem } = useActions(taxonomicFilterLogic) - const { isLoading, results, diff --git a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx index bd2f56b8dcfc9..52e99e1e432e6 100644 --- a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx @@ -21,6 +21,7 @@ export function TaxonomicFilter({ taxonomicFilterLogicKey: taxonomicFilterLogicKeyInput, groupType, value, + filter, onChange, onClose, taxonomicGroupTypes, @@ -48,6 +49,7 @@ export function TaxonomicFilter({ taxonomicFilterLogicKey, groupType, value, + filter, onChange, taxonomicGroupTypes, optionsFromProp, diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx index 9809e801308f6..cc3e727f7b10d 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx @@ -28,8 +28,7 @@ import { cohortsModel } from '~/models/cohortsModel' import { dashboardsModel } from '~/models/dashboardsModel' import { groupPropertiesModel } from '~/models/groupPropertiesModel' import { groupsModel } from '~/models/groupsModel' -import { personPropertiesModel } from '~/models/personPropertiesModel' -import { updateListOfPropertyDefinitions } from '~/models/propertyDefinitionsModel' +import { updatePropertyDefinitions } from '~/models/propertyDefinitionsModel' import { AnyDataNode, DatabaseSchemaQueryResponseField, NodeKind } from '~/queries/schema' import { ActionType, @@ -77,7 +76,7 @@ export const taxonomicFilterLogic = kea([ props({} as TaxonomicFilterLogicProps), key((props) => `${props.taxonomicFilterLogicKey}`), path(['lib', 'components', 'TaxonomicFilter', 'taxonomicFilterLogic']), - connect((props: TaxonomicFilterLogicProps) => ({ + connect({ values: [ teamLogic, ['currentTeamId'], @@ -87,13 +86,8 @@ export const taxonomicFilterLogic = kea([ ['allGroupProperties'], dataWarehouseSceneLogic, ['externalTables'], - personPropertiesModel({ - propertyAllowList: props.propertyAllowList, - taxonomicFilterLogicKey: props.taxonomicFilterLogicKey, - }), - ['combinedPersonProperties'], ], - })), + }), actions(() => ({ moveUp: true, moveDown: true, @@ -141,6 +135,7 @@ export const taxonomicFilterLogic = kea([ ], })), selectors({ + selectedItemMeta: [() => [(_, props) => props.filter], (filter) => filter], taxonomicFilterLogicKey: [ (_, p) => [p.taxonomicFilterLogicKey], (taxonomicFilterLogicKey) => taxonomicFilterLogicKey, @@ -170,7 +165,6 @@ export const taxonomicFilterLogic = kea([ s.metadataSource, s.excludedProperties, s.propertyAllowList, - s.taxonomicFilterLogicKey, ], ( teamId, @@ -180,8 +174,7 @@ export const taxonomicFilterLogic = kea([ schemaColumns, metadataSource, excludedProperties, - propertyAllowList, - taxonomicFilterLogicKey + propertyAllowList ): TaxonomicFilterGroup[] => { const groups: TaxonomicFilterGroup[] = [ { @@ -218,7 +211,7 @@ export const taxonomicFilterLogic = kea([ logic: dataWarehouseSceneLogic, value: 'externalTables', getName: (table: DataWarehouseTableType) => table.name, - getValue: (table: DataWarehouseTableType) => table.id, + getValue: (table: DataWarehouseTableType) => table.name, getPopoverHeader: () => 'Data Warehouse Table', getIcon: () => , }, @@ -332,15 +325,14 @@ export const taxonomicFilterLogic = kea([ name: 'Person properties', searchPlaceholder: 'person properties', type: TaxonomicFilterGroupType.PersonProperties, - logic: personPropertiesModel({ propertyAllowList, taxonomicFilterLogicKey }), - value: 'combinedPersonProperties', + endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + type: 'person', + properties: propertyAllowList?.[TaxonomicFilterGroupType.PersonProperties] + ? propertyAllowList[TaxonomicFilterGroupType.PersonProperties].join(',') + : undefined, + }).url, getName: (personProperty: PersonProperty) => personProperty.name, - getValue: (personProperty: PersonProperty) => { - if (personProperty.table) { - return personProperty.id - } - return personProperty.name - }, + getValue: (personProperty: PersonProperty) => personProperty.name, propertyAllowList: propertyAllowList?.[TaxonomicFilterGroupType.PersonProperties], ...propertyTaxonomicGroupProps(true), }, @@ -706,7 +698,14 @@ export const taxonomicFilterLogic = kea([ groupType === TaxonomicFilterGroupType.NumericalEventProperties) ) { const propertyDefinitions: PropertyDefinition[] = results.results as PropertyDefinition[] - updateListOfPropertyDefinitions(propertyDefinitions, groupType) + const apiType = groupType === TaxonomicFilterGroupType.PersonProperties ? 'person' : 'event' + const newPropertyDefinitions = Object.fromEntries( + propertyDefinitions.map((propertyDefinition) => [ + `${apiType}/${propertyDefinition.name}`, + propertyDefinition, + ]) + ) + updatePropertyDefinitions(newPropertyDefinitions) } }, })), diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index cde2e9d678ded..964847c6eacaf 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -1,6 +1,7 @@ import Fuse from 'fuse.js' -import { BuiltLogic, LogicWrapper } from 'kea' +import { LogicWrapper } from 'kea' import { DataWarehouseTableType } from 'scenes/data-warehouse/types' +import { LocalFilter } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { AnyDataNode, DatabaseSchemaQueryResponseField } from '~/queries/schema' import { @@ -22,6 +23,7 @@ export interface TaxonomicFilterProps { value?: TaxonomicFilterValue onChange?: (group: TaxonomicFilterGroup, value: TaxonomicFilterValue, item: any) => void onClose?: () => void + filter?: LocalFilter taxonomicGroupTypes: TaxonomicFilterGroupType[] taxonomicFilterLogicKey?: string optionsFromProp?: Partial> @@ -59,7 +61,7 @@ export interface TaxonomicFilterGroup { scopedEndpoint?: string expandLabel?: (props: { count: number; expandedCount: number }) => React.ReactNode options?: Record[] - logic?: LogicWrapper | BuiltLogic + logic?: LogicWrapper value?: string searchAlias?: string valuesEndpoint?: (key: string) => string diff --git a/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx b/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx index 4fe515646323c..3a7e7e33d0218 100644 --- a/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx +++ b/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx @@ -4,6 +4,7 @@ import { TaxonomicFilterGroupType, TaxonomicFilterValue } from 'lib/components/T import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' import { LemonDropdown } from 'lib/lemon-ui/LemonDropdown' import { useEffect, useState } from 'react' +import { LocalFilter } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { AnyDataNode, DatabaseSchemaQueryResponseField } from '~/queries/schema' @@ -13,6 +14,7 @@ export interface TaxonomicPopoverProps void + filter?: LocalFilter groupTypes?: TaxonomicFilterGroupType[] renderValue?: (value: ValueType) => JSX.Element | null eventNames?: string[] @@ -41,6 +43,7 @@ export function TaxonomicStringPopover(props: TaxonomicPopoverProps): JS export function TaxonomicPopover({ groupType, value, + filter, onChange, renderValue, groupTypes, @@ -81,6 +84,7 @@ export function TaxonomicPopover { onChange?.(payload as ValueType, type, item) setVisible(false) @@ -92,7 +96,7 @@ export function TaxonomicPopover } - sameWidth={false} + matchWidth={false} actionable visible={visible} onClickOutside={() => { diff --git a/frontend/src/lib/components/UpgradeModal/upgradeModalLogic.ts b/frontend/src/lib/components/UpgradeModal/upgradeModalLogic.ts index 9542ac6a208dc..51c6cf6fb670c 100644 --- a/frontend/src/lib/components/UpgradeModal/upgradeModalLogic.ts +++ b/frontend/src/lib/components/UpgradeModal/upgradeModalLogic.ts @@ -9,7 +9,7 @@ import { AvailableFeature } from '~/types' import type { upgradeModalLogicType } from './upgradeModalLogicType' export type GuardAvailableFeatureFn = ( - featureKey: AvailableFeature, + featureKey?: AvailableFeature, featureAvailableCallback?: () => void, options?: { guardOnCloud?: boolean @@ -60,6 +60,10 @@ export const upgradeModalLogic = kea([ (s) => [s.preflight, s.hasAvailableFeature], (preflight, hasAvailableFeature): GuardAvailableFeatureFn => { return (featureKey, featureAvailableCallback, options): boolean => { + if (!featureKey) { + featureAvailableCallback?.() + return true + } const { guardOnCloud = true, guardOnSelfHosted = true, diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts index cc26d0eff45fc..ce53ba46d5db8 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts @@ -7,7 +7,7 @@ import { hogql } from '~/queries/utils' import type { versionCheckerLogicType } from './versionCheckerLogicType' -const CHECK_INTERVAL_MS = 1000 * 60 * 60 // 6 hour +const CHECK_INTERVAL_MS = 1000 * 60 * 60 * 6 // 6 hour export type SDKVersion = { version: string diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 9d667db41b2ed..391c13b50a213 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -1,15 +1,23 @@ import { LemonSelectOptions } from '@posthog/lemon-ui' -import { ChartDisplayType, Region, SSOProvider } from '../types' +import { ChartDisplayCategory, ChartDisplayType, Region, SSOProvider } from '../types' + +// Sync with backend DISPLAY_TYPES_TO_CATEGORIES +export const DISPLAY_TYPES_TO_CATEGORIES: Record = { + [ChartDisplayType.ActionsLineGraph]: ChartDisplayCategory.TimeSeries, + [ChartDisplayType.ActionsBar]: ChartDisplayCategory.TimeSeries, + [ChartDisplayType.ActionsAreaGraph]: ChartDisplayCategory.TimeSeries, + [ChartDisplayType.ActionsLineGraphCumulative]: ChartDisplayCategory.CumulativeTimeSeries, + [ChartDisplayType.BoldNumber]: ChartDisplayCategory.TotalValue, + [ChartDisplayType.ActionsPie]: ChartDisplayCategory.TotalValue, + [ChartDisplayType.ActionsBarValue]: ChartDisplayCategory.TotalValue, + [ChartDisplayType.ActionsTable]: ChartDisplayCategory.TotalValue, + [ChartDisplayType.WorldMap]: ChartDisplayCategory.TotalValue, +} +export const NON_TIME_SERIES_DISPLAY_TYPES = Object.entries(DISPLAY_TYPES_TO_CATEGORIES) + .filter(([, category]) => category === ChartDisplayCategory.TotalValue) + .map(([displayType]) => displayType as ChartDisplayType) -/** Display types which don't allow grouping by unit of time. Sync with backend NON_TIME_SERIES_DISPLAY_TYPES. */ -export const NON_TIME_SERIES_DISPLAY_TYPES = [ - ChartDisplayType.ActionsTable, - ChartDisplayType.ActionsPie, - ChartDisplayType.ActionsBarValue, - ChartDisplayType.WorldMap, - ChartDisplayType.BoldNumber, -] /** Display types for which `breakdown` is hidden and ignored. Sync with backend NON_BREAKDOWN_DISPLAY_TYPES. */ export const NON_BREAKDOWN_DISPLAY_TYPES = [ChartDisplayType.BoldNumber] /** Display types which only work with a single series. */ @@ -99,6 +107,7 @@ export const INSTANTLY_AVAILABLE_PROPERTIES = [ 'distinct_id', ] export const MAX_EXPERIMENT_VARIANTS = 10 +export const EXPERIMENT_DEFAULT_DURATION = 14 // days // Event constants export const ACTION_TYPE = 'action_type' @@ -142,15 +151,13 @@ export const FEATURE_FLAGS = { DEBUG_REACT_RENDERS: 'debug-react-renders', // owner: @benjackwhite AUTO_ROLLBACK_FEATURE_FLAGS: 'auto-rollback-feature-flags', // owner: @EDsCODE ONBOARDING_V2_DEMO: 'onboarding-v2-demo', // owner: #team-growth - ROLE_BASED_ACCESS: 'role-based-access', // owner: #team-experiments, @liyiy QUERY_RUNNING_TIME: 'query_running_time', // owner: @mariusandra QUERY_TIMINGS: 'query-timings', // owner: @mariusandra QUERY_ASYNC: 'query-async', // owner: @webjunkie POSTHOG_3000_NAV: 'posthog-3000-nav', // owner: @Twixes - ENABLE_PROMPTS: 'enable-prompts', // owner: @lharries HEDGEHOG_MODE: 'hedgehog-mode', // owner: @benjackwhite HEDGEHOG_MODE_DEBUG: 'hedgehog-mode-debug', // owner: @benjackwhite - GENERIC_SIGNUP_BENEFITS: 'generic-signup-benefits', // experiment, owner: @raquelmsmith + SIGNUP_BENEFITS: 'signup-benefits', // experiment, owner: @zlwaterfield WEB_ANALYTICS: 'web-analytics', // owner @robbie-c #team-web-analytics WEB_ANALYTICS_SAMPLING: 'web-analytics-sampling', // owner @robbie-c #team-web-analytics HIGH_FREQUENCY_BATCH_EXPORTS: 'high-frequency-batch-exports', // owner: @tomasfarias @@ -165,6 +172,7 @@ export const FEATURE_FLAGS = { PRODUCT_SPECIFIC_ONBOARDING: 'product-specific-onboarding', // owner: @raquelmsmith REDIRECT_SIGNUPS_TO_INSTANCE: 'redirect-signups-to-instance', // owner: @raquelmsmith APPS_AND_EXPORTS_UI: 'apps-and-exports-ui', // owner: @benjackwhite + HOGQL_INSIGHTS: 'hogql-insights-preview', // owner: @mariusandra HOGQL_INSIGHTS_LIFECYCLE: 'hogql-insights-lifecycle', // owner: @mariusandra HOGQL_INSIGHTS_PATHS: 'hogql-insights-paths', // owner: @webjunkie HOGQL_INSIGHTS_RETENTION: 'hogql-insights-retention', // owner: @webjunkie @@ -175,25 +183,19 @@ export const FEATURE_FLAGS = { BI_VIZ: 'bi_viz', // owner: @Gilbert09 HOGQL_AUTOCOMPLETE: 'hogql-autocomplete', // owner: @Gilbert09 WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline - SURVEYS_RESULTS_VISUALIZATIONS: 'surveys-results-visualizations', // owner: @jurajmajerik - SURVEYS_PAYGATES: 'surveys-paygates', PERSONS_HOGQL_QUERY: 'persons-hogql-query', // owner: @mariusandra PIPELINE_UI: 'pipeline-ui', // owner: #team-pipeline SESSION_RECORDING_SAMPLING: 'session-recording-sampling', // owner: #team-replay PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas - MULTI_PROJECT_FEATURE_FLAGS: 'multi-project-feature-flags', // owner: @jurajmajerik #team-feature-success FEATURE_FLAG_COHORT_CREATION: 'feature-flag-cohort-creation', // owner: @neilkakkar #team-feature-success INSIGHT_HORIZONTAL_CONTROLS: 'insight-horizontal-controls', // owner: @benjackwhite SURVEYS_WIDGETS: 'surveys-widgets', // owner: @liyiy - SCHEDULED_CHANGES_FEATURE_FLAGS: 'scheduled-changes-feature-flags', // owner: @jurajmajerik #team-feature-success - SESSION_REPLAY_MOBILE: 'session-replay-mobile', // owner: #team-replay INVITE_TEAM_MEMBER_ONBOARDING: 'invite-team-member-onboarding', // owner: @biancayang YEAR_IN_HOG: 'year-in-hog', // owner: #team-replay SESSION_REPLAY_EXPORT_MOBILE_DATA: 'session-replay-export-mobile-data', // owner: #team-replay DISCUSSIONS: 'discussions', // owner: #team-replay REDIRECT_INSIGHT_CREATION_PRODUCT_ANALYTICS_ONBOARDING: 'redirect-insight-creation-product-analytics-onboarding', // owner: @biancayang SIDEPANEL_STATUS: 'sidepanel-status', // owner: @benjackwhite - NEW_FEATURE_FLAG_OPERATORS: 'new-feature-flag-operators', // owner: @neilkakkar AI_SESSION_SUMMARY: 'ai-session-summary', // owner: #team-replay AI_SESSION_PERMISSIONS: 'ai-session-permissions', // owner: #team-replay PRODUCT_INTRO_PAGES: 'product-intro-pages', // owner: @raquelmsmith @@ -211,6 +213,7 @@ export const FEATURE_FLAGS = { AUDIT_LOGS_ACCESS: 'audit-logs-access', // owner: #team-growth SUBSCRIBE_FROM_PAYGATE: 'subscribe-from-paygate', // owner: #team-growth REVERSE_PROXY_ONBOARDING: 'reverse-proxy-onboarding', // owner: @zlwaterfield + SESSION_REPLAY_MOBILE_ONBOARDING: 'session-replay-mobile-onboarding', // owner: #team-replay } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/customers/airbus.svg b/frontend/src/lib/customers/airbus.svg new file mode 100644 index 0000000000000..ff18cae1c8c0f --- /dev/null +++ b/frontend/src/lib/customers/airbus.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/lib/customers/hasura.svg b/frontend/src/lib/customers/hasura.svg new file mode 100644 index 0000000000000..1eb0373ecf1f4 --- /dev/null +++ b/frontend/src/lib/customers/hasura.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/lib/customers/staples.svg b/frontend/src/lib/customers/staples.svg new file mode 100644 index 0000000000000..0e1ff76715798 --- /dev/null +++ b/frontend/src/lib/customers/staples.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/lib/customers/y-combinator.svg b/frontend/src/lib/customers/y-combinator.svg new file mode 100644 index 0000000000000..1d19c5ff15d4a --- /dev/null +++ b/frontend/src/lib/customers/y-combinator.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx index 6664aac5c5fc6..245754ad1a090 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx @@ -386,7 +386,7 @@ WithDropdownToTheBottom.args = { ), placement: 'bottom', - sameWidth: true, + matchWidth: true, }, } @@ -404,7 +404,7 @@ WithVeryLongPopoverToTheBottom.args = { ), placement: 'bottom', - sameWidth: true, + matchWidth: true, }, } diff --git a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx index 9e5240a275a68..86869bbf9bb4d 100644 --- a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx @@ -244,7 +244,7 @@ export function LemonInputSelect({ return ( > = forwardRef(function LemonProgress( - { size = 'medium', percent, strokeColor = 'var(--brand-blue)', children, className }, + { + size = 'medium', + percent, + bgColor = 'var(--bg-3000)', + strokeColor = 'var(--brand-blue)', + children, + className, + }, ref ): JSX.Element { const width = isNaN(percent) ? 0 : Math.max(Math.min(percent, 100), 0) @@ -20,10 +28,12 @@ export const LemonProgress: React.FunctionComponent ({ ({ } : null } + tooltip={activeLeaf?.tooltip} {...buttonProps} > diff --git a/frontend/src/lib/lemon-ui/Popover/Popover.tsx b/frontend/src/lib/lemon-ui/Popover/Popover.tsx index 087ef32060d3b..956dcfaa8a010 100644 --- a/frontend/src/lib/lemon-ui/Popover/Popover.tsx +++ b/frontend/src/lib/lemon-ui/Popover/Popover.tsx @@ -40,8 +40,8 @@ export interface PopoverProps { fallbackPlacements?: Placement[] /** Whether the popover is actionable rather than just informative - actionable means a colored border. */ actionable?: boolean - /** Whether the popover's width should be synced with the children's width. */ - sameWidth?: boolean + /** Whether the popover's width should be synced with the children's width or bigger. */ + matchWidth?: boolean maxContentWidth?: boolean className?: string /** Whether default box padding should be applies. @default true */ @@ -89,7 +89,7 @@ export const Popover = React.forwardRef(function P className, padded = true, middleware, - sameWidth = false, + matchWidth = false, maxContentWidth = false, additionalRefs = [], closeParentPopoverOnClickInside = false, @@ -131,7 +131,10 @@ export const Popover = React.forwardRef(function P apply({ availableWidth, availableHeight, rects, elements: { floating } }) { floating.style.maxHeight = `${availableHeight}px` floating.style.maxWidth = `${availableWidth}px` - floating.style.width = sameWidth ? `${rects.reference.width}px` : 'initial' + floating.style.width = 'initial' + if (matchWidth) { + floating.style.minWidth = `${rects.reference.width}px` + } }, }), ...(showArrow ? [arrow({ element: arrowRef, padding: 8 })] : []), diff --git a/frontend/src/lib/logic/newPrompt/Prompt.tsx b/frontend/src/lib/logic/newPrompt/Prompt.tsx deleted file mode 100644 index 8392dfc95aa35..0000000000000 --- a/frontend/src/lib/logic/newPrompt/Prompt.tsx +++ /dev/null @@ -1,123 +0,0 @@ -import './prompt.scss' - -import { LemonButton, LemonModal } from '@posthog/lemon-ui' -import clsx from 'clsx' -import { useActions, useValues } from 'kea' -import { FallbackCoverImage } from 'lib/components/FallbackCoverImage/FallbackCoverImage' - -import { PromptButtonType, PromptFlag, PromptPayload } from '~/types' - -import { promptLogic } from './promptLogic' - -export function ModalPrompt({ - payload, - closePrompt, - openPromptFlag, - inline = false, -}: { - payload: PromptPayload - closePrompt: (promptFlag: PromptFlag, buttonType: PromptButtonType) => void - openPromptFlag: PromptFlag - inline?: boolean -}): JSX.Element { - return ( - closePrompt(openPromptFlag, 'secondary')} - footer={ - (payload.secondaryButtonText || payload.primaryButtonText) && ( -
        - closePrompt(openPromptFlag, 'secondary')} type="secondary"> - {payload.secondaryButtonText || 'Dismiss'} - - {payload.primaryButtonText && ( - closePrompt(openPromptFlag, 'primary')} type="primary"> - {payload.primaryButtonText} - - )} -
        - ) - } - inline={inline} - > -
        -
        -
        - -
        -
        - {payload.title &&

        {payload.title}

        } - - {payload.body && ( -
        - )} -
        - - ) -} - -export function PopupPrompt({ - payload, - openPromptFlag, - closePrompt, - inline = false, -}: { - payload: PromptPayload - openPromptFlag: PromptFlag - closePrompt: (promptFlag: PromptFlag, buttonType: PromptButtonType) => void - inline?: boolean -}): JSX.Element { - return ( -
        - {payload.image && ( - - )} -
        - {payload.title &&

        {payload.title}

        } - {payload.body && ( -
        - )} -
        -
        -
        - {payload?.secondaryButtonText && ( - closePrompt(openPromptFlag, 'secondary')} type="secondary"> - {payload.secondaryButtonText} - - )} - {payload.primaryButtonText && ( - closePrompt(openPromptFlag, 'primary')} type="primary"> - {payload.primaryButtonText} - - )} -
        -
        -
        - ) -} - -export function Prompt(): JSX.Element { - const { payload, openPromptFlag } = useValues(promptLogic) - const { closePrompt } = useActions(promptLogic) - - if (!payload || !openPromptFlag) { - return <> - } - - if (payload.type === 'modal') { - return - } - - return -} diff --git a/frontend/src/lib/logic/newPrompt/prompt.scss b/frontend/src/lib/logic/newPrompt/prompt.scss deleted file mode 100644 index 86a2aad37d1cc..0000000000000 --- a/frontend/src/lib/logic/newPrompt/prompt.scss +++ /dev/null @@ -1,14 +0,0 @@ -.PromptPopup { - position: fixed; - right: 10px; - bottom: 10px; - z-index: 2000; - flex-direction: column; - min-width: 300px; - min-height: 100px; - padding-top: 5px; - background: white; - border: 1px solid #f0f0f0; - border-radius: 8px; - box-shadow: -6px 0 16px -8px rgb(0 0 0 / 8%), -9px 0 28px 0 rgb(0 0 0 / 5%), -12px 0 48px 16px rgb(0 0 0 / 3%); -} diff --git a/frontend/src/lib/logic/newPrompt/prompt.stories.tsx b/frontend/src/lib/logic/newPrompt/prompt.stories.tsx deleted file mode 100644 index 58eb6c9647db9..0000000000000 --- a/frontend/src/lib/logic/newPrompt/prompt.stories.tsx +++ /dev/null @@ -1,60 +0,0 @@ -import { Meta } from '@storybook/react' -import { useActions } from 'kea' -import BlankDashboardHog from 'public/blank-dashboard-hog.png' - -import { PromptFlag, PromptPayload } from '~/types' - -import { ModalPrompt, PopupPrompt, Prompt } from './Prompt' -import { promptLogic } from './promptLogic' - -const meta: Meta = { - title: 'Components/Prompts', - component: Prompt, -} -export default meta -export function ModalPrompt_(): JSX.Element { - // Ideally we'd instead mock the feature flag and payload but I couldn't get that to work - const payload = { - title: 'New hedgehog spotted!', - body: "We have exciting news, there's a new hedge hog that has arrived!.", - image: BlankDashboardHog, - type: 'modal', - primaryButtonText: 'Join the search!', - primaryButtonURL: 'https://google.com', - } as PromptPayload - const openPromptFlag = { - flag: 'new-hedgehog', - payload: payload, - showingPrompt: true, - } as PromptFlag - const { closePrompt } = useActions(promptLogic) - - return ( -
        - -
        - ) -} - -export function PopupPrompt_(): JSX.Element { - const payload = { - title: 'New hedgehog spotted!', - body: "We have exciting news, there's a new hedge hog that has arrived!.", - image: BlankDashboardHog, - type: 'popup', - primaryButtonText: 'Join the search!', - primaryButtonURL: 'https://google.com', - } as PromptPayload - const openPromptFlag = { - flag: 'new-hedgehog', - payload: payload, - showingPrompt: true, - } as PromptFlag - const { closePrompt } = useActions(promptLogic) - - return ( -
        - -
        - ) -} diff --git a/frontend/src/lib/logic/newPrompt/promptLogic.tsx b/frontend/src/lib/logic/newPrompt/promptLogic.tsx deleted file mode 100644 index 064fcff1c78cd..0000000000000 --- a/frontend/src/lib/logic/newPrompt/promptLogic.tsx +++ /dev/null @@ -1,179 +0,0 @@ -import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' -import { router } from 'kea-router' -import posthog from 'posthog-js' - -import { PromptButtonType, PromptFlag, PromptPayload } from '~/types' - -import { featureFlagLogic } from '../featureFlagLogic' -import type { promptLogicType } from './promptLogicType' - -const PROMPT_PREFIX = 'prompt' -const LAST_SEEN = 'last-seen' -const MINIMUM_DAYS_BETWEEN_PROMPTS = 1 - -function getFeatureSessionStorageKey(featureFlagName: string): string { - return `${PROMPT_PREFIX}-${featureFlagName}` -} - -function getLastSeenSessionStorageKey(): string { - return `${PROMPT_PREFIX}-${LAST_SEEN}` -} - -function hasSeenPromptRecently(): boolean { - const lastSeenPopup = localStorage.getItem(getLastSeenSessionStorageKey()) - const lastSeenPopupDate = lastSeenPopup ? new Date(lastSeenPopup) : null - const oneDayAgo = new Date() - oneDayAgo.setDate(oneDayAgo.getDate() - MINIMUM_DAYS_BETWEEN_PROMPTS) - - let seenRecently = false - - if (lastSeenPopupDate && lastSeenPopupDate > oneDayAgo) { - seenRecently = true - } - return seenRecently -} - -function shouldShowPopup(featureFlagName: string): boolean { - // The feature flag should be disabled for the user once the prompt has been closed through the user properties - // This is a second check for shorter-term preventing of the prompt from showing - const flagShownBefore = localStorage.getItem(getFeatureSessionStorageKey(featureFlagName)) - - const seenRecently = hasSeenPromptRecently() - - return !flagShownBefore && !seenRecently -} - -function sendPopupEvent( - event: string, - promptFlag: PromptFlag, - buttonType: PromptButtonType | undefined = undefined -): void { - const properties = { - flagName: promptFlag.flag, - flagPayload: promptFlag.payload, - } - - if (buttonType) { - properties['buttonPressed'] = buttonType - } - - posthog.capture(event, properties) -} - -export const promptLogic = kea([ - path(['lib', 'logic', 'newPrompt', 'promptLogic']), - actions({ - closePrompt: (promptFlag: PromptFlag, buttonType: PromptButtonType) => ({ promptFlag, buttonType }), - setPromptFlags: (promptFlags: PromptFlag[]) => ({ promptFlags }), - searchForValidFlags: true, - setOpenPromptFlag: (promptFlag: PromptFlag) => ({ promptFlag }), - // hide the prompt without sending an event or setting the localstorage - // used for when the user navigates away from the page - hidePromptWithoutSaving: (promptFlag: PromptFlag) => ({ promptFlag }), - }), - connect({ - actions: [featureFlagLogic, ['setFeatureFlags'], router, ['locationChanged']], - }), - reducers({ - promptFlags: [ - [] as PromptFlag[], - { - setPromptFlags: (_, { promptFlags }) => promptFlags, - setOpenPromptFlag: (promptFlags, { promptFlag }) => { - return promptFlags.map((flag: PromptFlag) => { - if (flag.flag === promptFlag.flag) { - return { ...flag, showingPrompt: true } - } - return flag - }) - }, - closePrompt: (promptFlags) => { - return promptFlags.map((flag: PromptFlag) => { - return { ...flag, showingPrompt: false } - }) - }, - hidePromptWithoutSaving: (promptFlags, { promptFlag }) => { - return promptFlags.map((flag: PromptFlag) => { - if (flag.flag === promptFlag.flag) { - return { ...flag, showingPrompt: false } - } - return flag - }) - }, - }, - ], - }), - listeners(({ actions, values }) => ({ - // TODO: on url change, check if there's a prompt to show - setFeatureFlags: async ({ flags }, breakpoint) => { - await breakpoint(100) - const promptFlags: PromptFlag[] = [] - flags.forEach((flag: string) => { - if (flag.startsWith(PROMPT_PREFIX) && posthog.isFeatureEnabled(flag)) { - const payload = posthog.getFeatureFlagPayload(flag) as PromptPayload - if (!payload || !payload.type) { - // indicates that it's not a valid prompt - return - } - promptFlags.push({ - flag, - payload, - showingPrompt: false, - }) - } - }) - actions.setPromptFlags(promptFlags) - actions.searchForValidFlags() - }, - searchForValidFlags: async () => { - for (const promptFlag of values.promptFlags) { - if (!promptFlag.payload.url_match || window.location.href.match(promptFlag.payload.url_match)) { - if (shouldShowPopup(promptFlag.flag)) { - actions.setOpenPromptFlag(promptFlag) - return // only show one prompt at a time - } - } - } - }, - setOpenPromptFlag: async ({ promptFlag }, breakpoint) => { - await breakpoint(1000) - sendPopupEvent('Prompt shown', promptFlag) - }, - closePrompt: async ({ promptFlag, buttonType }) => { - if (promptFlag) { - sendPopupEvent('Prompt closed', promptFlag, buttonType) - localStorage.setItem(getFeatureSessionStorageKey(promptFlag.flag), new Date().toDateString()) - localStorage.setItem(getLastSeenSessionStorageKey(), new Date().toDateString()) - posthog.people.set({ ['$' + promptFlag.flag]: new Date().toDateString() }) - - if (promptFlag?.payload.primaryButtonURL && buttonType === 'primary') { - window.open(promptFlag.payload.primaryButtonURL, '_blank') - } - } - }, - locationChanged: async (_, breakpoint) => { - await breakpoint(100) - if (values.openPromptFlag && values.openPromptFlag.payload.url_match) { - if (!window.location.href.match(values.openPromptFlag.payload.url_match)) { - actions.hidePromptWithoutSaving(values.openPromptFlag) - } - } - - actions.searchForValidFlags() - }, - })), - selectors({ - openPromptFlag: [ - (s) => [s.promptFlags], - (promptFlags) => { - return promptFlags.find((flag: PromptFlag) => flag.showingPrompt) - }, - ], - payload: [ - (s) => [s.openPromptFlag], - (openPromptFlag: PromptFlag) => { - return openPromptFlag?.payload - }, - ], - }), -]) diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index e88617ddd7bad..069d1758d0e56 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -457,11 +457,6 @@ export const eventUsageLogic = kea([ reportInstanceSettingChange: (name: string, value: string | boolean | number) => ({ name, value }), reportAxisUnitsChanged: (properties: Record) => ({ ...properties }), reportTeamSettingChange: (name: string, value: any) => ({ name, value }), - reportActivationSideBarShown: ( - activeTasksCount: number, - completedTasksCount: number, - completionPercent: number - ) => ({ activeTasksCount, completedTasksCount, completionPercent }), reportActivationSideBarTaskClicked: (key: string) => ({ key }), reportBillingUpgradeClicked: (plan: string) => ({ plan }), reportRoleCreated: (role: string) => ({ role }), @@ -1092,13 +1087,6 @@ export const eventUsageLogic = kea([ value, }) }, - reportActivationSideBarShown: ({ activeTasksCount, completedTasksCount, completionPercent }) => { - posthog.capture('activation sidebar shown', { - active_tasks_count: activeTasksCount, - completed_tasks_count: completedTasksCount, - completion_percent_count: completionPercent, - }) - }, reportActivationSideBarTaskClicked: ({ key }) => { posthog.capture('activation sidebar task clicked', { key, diff --git a/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json b/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json index 638a1618d5fb2..f186af09cdff1 100644 --- a/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json +++ b/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json @@ -96,7 +96,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -136,7 +135,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -453,7 +451,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -493,7 +490,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -683,7 +679,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -723,7 +718,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -899,7 +893,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -939,7 +932,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -1119,7 +1111,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -1159,7 +1150,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -1422,7 +1412,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -1462,7 +1451,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -1800,7 +1788,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -1840,7 +1827,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -2146,7 +2132,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -2186,7 +2171,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -2542,7 +2526,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -2582,7 +2565,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -2758,7 +2740,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -2798,7 +2779,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -2975,7 +2955,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -3015,7 +2994,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -3371,7 +3349,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -3411,7 +3388,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -3588,7 +3564,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -3628,7 +3603,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -3818,7 +3792,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -3858,7 +3831,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -4036,7 +4008,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -4076,7 +4047,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -4254,7 +4224,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -4294,7 +4263,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -4470,7 +4438,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -4510,7 +4477,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -4879,7 +4845,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -4919,7 +4884,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -5109,7 +5073,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -5149,7 +5112,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -5325,7 +5287,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -5365,7 +5326,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -5694,7 +5654,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -5734,7 +5693,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -5912,7 +5870,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -5952,7 +5909,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -6128,7 +6084,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -6168,7 +6123,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -6348,7 +6302,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -6388,7 +6341,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -6566,7 +6518,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -6606,7 +6557,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -6908,7 +6858,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -6948,7 +6897,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -7130,7 +7078,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -7170,7 +7117,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -7346,7 +7292,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -7386,7 +7331,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -7564,7 +7508,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -7604,7 +7547,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -7890,7 +7832,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -7930,7 +7871,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -8108,7 +8048,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -8148,7 +8087,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -8326,7 +8264,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -8366,7 +8303,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -8544,7 +8480,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -8584,7 +8519,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -8761,7 +8695,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -8801,7 +8734,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -8979,7 +8911,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -9019,7 +8950,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -9197,7 +9127,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -9237,7 +9166,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -9415,7 +9343,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -9455,7 +9382,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -9633,7 +9559,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -9673,7 +9598,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -9709,7 +9633,6 @@ }, "$referrer": "$direct", "$referring_domain": "$direct", - "$feature_flag": "enable-prompts", "$feature_flag_response": true, "token": "phc_IdfzBh09RdfsZyvdjYbq8ml2NR0AD0SnFqcUl4Itwwp", "$session_id": "188906a04ee2c10-0132aa872fb6f9-1c525634-384000-188906a04ef1cdc", @@ -9938,7 +9861,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -9978,7 +9900,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -10164,7 +10085,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -10204,7 +10124,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -10391,7 +10310,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -10431,7 +10349,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -10609,7 +10526,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -10649,7 +10565,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -10825,7 +10740,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -10865,7 +10779,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -11041,7 +10954,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -11081,7 +10993,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -11257,7 +11168,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -11297,7 +11207,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -11584,7 +11493,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -11624,7 +11532,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -11805,7 +11712,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -11845,7 +11751,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -12021,7 +11926,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -12061,7 +11965,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -12239,7 +12142,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -12279,7 +12181,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -12457,7 +12358,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -12497,7 +12397,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -12675,7 +12574,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -12715,7 +12613,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -12893,7 +12790,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -12933,7 +12829,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -13110,7 +13005,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -13150,7 +13044,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -13328,7 +13221,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -13368,7 +13260,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -13546,7 +13437,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -13586,7 +13476,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -13764,7 +13653,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -13804,7 +13692,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -13840,7 +13727,6 @@ }, "$referrer": "$direct", "$referring_domain": "$direct", - "$feature_flag": "enable-prompts", "$feature_flag_response": true, "token": "phc_IdfzBh09RdfsZyvdjYbq8ml2NR0AD0SnFqcUl4Itwwp", "$session_id": "1889068fbce1060-03042ca0405c1f-1c525634-384000-1889068fbcf1371", @@ -13982,7 +13868,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -14022,7 +13907,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -14287,7 +14171,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -14327,7 +14210,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -14513,7 +14395,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -14553,7 +14434,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -14740,7 +14620,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -14780,7 +14659,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -14958,7 +14836,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -14998,7 +14875,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -15174,7 +15050,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -15214,7 +15089,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -15390,7 +15264,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -15430,7 +15303,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -15606,7 +15478,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -15646,7 +15517,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -15828,7 +15698,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -15868,7 +15737,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -16151,7 +16019,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -16191,7 +16058,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -16367,7 +16233,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -16407,7 +16272,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -16588,7 +16452,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -16628,7 +16491,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -16806,7 +16668,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -16846,7 +16707,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -17024,7 +16884,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -17064,7 +16923,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -17242,7 +17100,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -17282,7 +17139,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -17459,7 +17315,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -17499,7 +17354,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -17677,7 +17531,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -17717,7 +17570,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -17895,7 +17747,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -17935,7 +17786,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -18113,7 +17963,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -18153,7 +18002,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -18189,7 +18037,6 @@ }, "$referrer": "$direct", "$referring_domain": "$direct", - "$feature_flag": "enable-prompts", "$feature_flag_response": true, "token": "phc_IdfzBh09RdfsZyvdjYbq8ml2NR0AD0SnFqcUl4Itwwp", "$session_id": "18890672ccd2bb9-07e026efe4e2a7-1c525634-384000-18890672cce22bc", @@ -18331,7 +18178,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -18371,7 +18217,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -18636,7 +18481,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -18676,7 +18520,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -18862,7 +18705,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -18902,7 +18744,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -19089,7 +18930,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -19129,7 +18969,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -19305,7 +19144,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -19345,7 +19183,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -19523,7 +19360,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -19563,7 +19399,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -19739,7 +19574,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -19779,7 +19613,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -19955,7 +19788,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -19995,7 +19827,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -20177,7 +20008,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -20217,7 +20047,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -20394,7 +20223,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -20434,7 +20262,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -20610,7 +20437,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -20650,7 +20476,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -21003,7 +20828,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -21043,7 +20867,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -21219,7 +21042,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -21259,7 +21081,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -21439,7 +21260,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -21479,7 +21299,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -21809,7 +21628,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -21849,7 +21667,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -22031,7 +21848,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -22071,7 +21887,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -22363,7 +22178,6 @@ "sampling", "recordings-v2-recorder", "posthog-3000", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -22403,7 +22217,6 @@ "$feature/sampling": true, "$feature/recordings-v2-recorder": true, "$feature/posthog-3000": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 3b4d5c2577a7e..e155aa67cf805 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -10,6 +10,7 @@ import { MOCK_PERSON_PROPERTIES, MOCK_SECOND_ORGANIZATION_MEMBER, } from 'lib/api.mock' +import { ResponseComposition, RestContext, RestRequest } from 'msw' import { getAvailableFeatures } from '~/mocks/features' import { SharingConfigurationType } from '~/types' @@ -25,6 +26,19 @@ export const toPaginatedResponse = (results: any[]): typeof EMPTY_PAGINATED_RESP previous: null, }) +// this really returns MaybePromise> +// but MSW doesn't export MaybePromise 🤷 +function posthogCORSResponse(req: RestRequest, res: ResponseComposition, ctx: RestContext): any { + return res( + ctx.status(200), + ctx.json('ok'), + // some of our tests try to make requests via posthog-js e.g. userLogic calls identify + // they have to have CORS allowed, or they pass but print noise to the console + ctx.set('Access-Control-Allow-Origin', req.referrer.length ? req.referrer : 'http://localhost'), + ctx.set('Access-Control-Allow-Credentials', 'true') + ) +} + export const defaultMocks: Mocks = { get: { '/api/projects/:team_id/activity_log/important_changes/': EMPTY_PAGINATED_RESPONSE, @@ -108,12 +122,13 @@ export const defaultMocks: Mocks = { }, }, post: { - 'https://us.i.posthog.com/e/': (): MockSignature => [200, 'ok'], - '/e/': (): MockSignature => [200, 'ok'], - 'https://us.i.posthog.com/decide/': (): MockSignature => [200, 'ok'], - '/decide/': (): MockSignature => [200, 'ok'], - 'https://us.i.posthog.com/engage/': (): MockSignature => [200, 'ok'], + 'https://us.i.posthog.com/e/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), + '/e/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), + 'https://us.i.posthog.com/decide/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), + '/decide/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), + 'https://us.i.posthog.com/engage/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), '/api/projects/:team_id/insights/:insight_id/viewed/': (): MockSignature => [201, null], + 'api/projects/:team_id/query': [200, { results: [] }], }, } export const handlers = mocksToHandlers(defaultMocks) diff --git a/frontend/src/models/personPropertiesModel.ts b/frontend/src/models/personPropertiesModel.ts deleted file mode 100644 index f319095c3ba81..0000000000000 --- a/frontend/src/models/personPropertiesModel.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { connect, events, kea, key, listeners, path, props, selectors } from 'kea' -import { loaders } from 'kea-loaders' -import { combineUrl, router } from 'kea-router' -import api from 'lib/api' -import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { dataWarehouseJoinsLogic } from 'scenes/data-warehouse/external/dataWarehouseJoinsLogic' -import { teamLogic } from 'scenes/teamLogic' - -import { updateListOfPropertyDefinitions } from '~/models/propertyDefinitionsModel' -import { PersonProperty, PropertyDefinition } from '~/types' - -import type { personPropertiesModelType } from './personPropertiesModelType' -import { PersonPropertiesModelProps } from './types' - -const WHITELISTED = ['/insights', '/events', '/sessions', '/dashboard', '/person'] - -export const personPropertiesModel = kea([ - props({} as PersonPropertiesModelProps), - path(['models', 'personPropertiesModel']), - key((props) => props.taxonomicFilterLogicKey), - connect({ - values: [ - teamLogic, - ['currentTeamId'], - dataWarehouseJoinsLogic, - ['columnsJoinedToPersons'], - featureFlagLogic, - ['featureFlags'], - ], - }), - loaders(({ values }) => ({ - personProperties: [ - [] as PersonProperty[], - { - loadPersonProperties: async () => { - const url = combineUrl(`api/projects/${values.currentTeamId}/property_definitions`, { - type: 'person', - properties: values.propertyAllowList?.[TaxonomicFilterGroupType.PersonProperties] - ? values.propertyAllowList[TaxonomicFilterGroupType.PersonProperties].join(',') - : undefined, - }).url - return (await api.get(url)).results - }, - }, - ], - })), - listeners(() => ({ - loadPersonPropertiesSuccess: ({ personProperties }) => { - updateListOfPropertyDefinitions( - personProperties as PropertyDefinition[], - TaxonomicFilterGroupType.PersonProperties - ) - }, - })), - selectors(() => ({ - combinedPersonProperties: [ - (s) => [s.personProperties, s.columnsJoinedToPersons, s.featureFlags], - (personProperties, columnsJoinedToPersons, featureFlags) => { - // Hack to make sure person properties only show data warehouse in specific instances for now - if ( - featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] && - WHITELISTED.some((path) => router.values.location.pathname.includes(path)) - ) { - return [...personProperties, ...columnsJoinedToPersons] - } - return [...personProperties] - }, - ], - propertyAllowList: [ - () => [(_, props) => props.propertyAllowList], - (propertyAllowList) => propertyAllowList as PersonPropertiesModelProps['propertyAllowList'], - ], - })), - events(({ actions }) => ({ - afterMount: actions.loadPersonProperties, - })), -]) diff --git a/frontend/src/models/propertyDefinitionsModel.ts b/frontend/src/models/propertyDefinitionsModel.ts index b7bba27261714..338e60a5e956f 100644 --- a/frontend/src/models/propertyDefinitionsModel.ts +++ b/frontend/src/models/propertyDefinitionsModel.ts @@ -1,6 +1,6 @@ import { actions, kea, listeners, path, reducers, selectors } from 'kea' import api, { ApiMethodOptions } from 'lib/api' -import { TaxonomicFilterGroupType, TaxonomicFilterValue } from 'lib/components/TaxonomicFilter/types' +import { TaxonomicFilterValue } from 'lib/components/TaxonomicFilter/types' import { dayjs } from 'lib/dayjs' import { captureTimeToSeeData } from 'lib/internalMetrics' import { colonDelimitedDuration } from 'lib/utils' @@ -46,18 +46,6 @@ export const updatePropertyDefinitions = (propertyDefinitions: PropertyDefinitio propertyDefinitionsModel.findMounted()?.actions.updatePropertyDefinitions(propertyDefinitions) } -export const updateListOfPropertyDefinitions = ( - results: PropertyDefinition[], - groupType: TaxonomicFilterGroupType -): void => { - const propertyDefinitions: PropertyDefinition[] = results - const apiType = groupType === TaxonomicFilterGroupType.PersonProperties ? 'person' : 'event' - const newPropertyDefinitions = Object.fromEntries( - propertyDefinitions.map((propertyDefinition) => [`${apiType}/${propertyDefinition.name}`, propertyDefinition]) - ) - updatePropertyDefinitions(newPropertyDefinitions) -} - export type PropValue = { id?: number name?: string | boolean diff --git a/frontend/src/models/types.ts b/frontend/src/models/types.ts deleted file mode 100644 index b3f4c22f60d4d..0000000000000 --- a/frontend/src/models/types.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' - -export interface PersonPropertiesModelProps { - propertyAllowList?: { [key in TaxonomicFilterGroupType]?: string[] } // only return properties in this list, currently only working for EventProperties and PersonProperties - taxonomicFilterLogicKey: string -} diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts index 55a417bdff91e..ead4227a3c793 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts @@ -39,6 +39,9 @@ describe('dataNodeLogic - query cancellation', () => { ) }, }, + delete: { + '/api/projects/:team_id/query/uuid-first': [200, {}], + }, }) }) afterEach(() => logic?.unmount()) diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 50562340715af..62158a7a16bcc 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -384,7 +384,8 @@ export const dataNodeLogic = kea([ ], hogQLInsightsRetentionFlagEnabled: [ (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION], + (featureFlags) => + !!(featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION]), ], query: [(_, p) => [p.query], (query) => query], newQuery: [ diff --git a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx index 5f10bef86273c..bec890d838bee 100644 --- a/frontend/src/queries/nodes/DataTable/SavedQueries.tsx +++ b/frontend/src/queries/nodes/DataTable/SavedQueries.tsx @@ -34,7 +34,7 @@ export function SavedQueries({ query, setQuery }: SavedQueriesProps): JSX.Elemen return ( ( { + it('handles empty properties', () => { + const properties = {} + + const result = cleanGlobalProperties(properties) + + expect(result).toEqual(undefined) + }) + + it('handles old style properties', () => { + const properties = { utm_medium__icontains: 'email' } + + const result = cleanGlobalProperties(properties) + + expect(result).toEqual({ + type: 'AND', + values: [ + { + type: 'AND', + values: [ + { + key: 'utm_medium', + operator: 'icontains', + type: 'event', + value: 'email', + }, + ], + }, + ], + }) + }) + + it('handles property filter lists', () => { + const properties = [{ key: 'id', type: 'cohort', value: 636, operator: null }] + + const result = cleanGlobalProperties(properties) + + expect(result).toEqual({ + type: 'AND', + values: [{ type: 'AND', values: [{ key: 'id', type: 'cohort', value: 636 }] }], + }) + }) + + it('handles property group filters', () => { + const properties = { + type: 'AND', + values: [{ type: 'AND', values: [{ key: 'id', type: 'cohort', value: 850, operator: null }] }], + } + + const result = cleanGlobalProperties(properties) + + expect(result).toEqual(properties) + }) + + it('handles property group filters values', () => { + const properties = { + type: 'AND', + values: [{ key: 'id', type: 'cohort', value: 850, operator: null }], + } + + const result = cleanGlobalProperties(properties) + + expect(result).toEqual({ + type: 'AND', + values: [ + { + type: 'AND', + values: [{ key: 'id', type: 'cohort', value: 850 }], + }, + ], + }) + }) +}) + +describe('cleanEntityProperties', () => { + it('handles empty properties', () => { + const properties = {} + + const result = cleanEntityProperties(properties) + + expect(result).toEqual(undefined) + }) + + it('handles old style properties', () => { + const properties = { utm_medium__icontains: 'email' } + + const result = cleanEntityProperties(properties) + + expect(result).toEqual([ + { + key: 'utm_medium', + operator: 'icontains', + type: 'event', + value: 'email', + }, + ]) + }) + + it('handles property filter lists', () => { + const properties = [ + { key: '$current_url', type: 'event', value: 'https://hedgebox.net/signup/', operator: 'exact' }, + ] + + const result = cleanEntityProperties(properties) + + expect(result).toEqual(properties) + }) + + it('handles property group values', () => { + const properties = { + type: 'AND', + values: [ + { + key: '$current_url', + operator: 'exact', + type: 'event', + value: 'https://hedgebox.net/signup/', + }, + ], + } + + const result = cleanEntityProperties(properties) + + expect(result).toEqual([ + { + key: '$current_url', + operator: 'exact', + type: 'event', + value: 'https://hedgebox.net/signup/', + }, + ]) + }) +}) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts new file mode 100644 index 0000000000000..3505204aed08a --- /dev/null +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts @@ -0,0 +1,180 @@ +import { + AnyPropertyFilter, + EventPropertyFilter, + PropertyFilterType, + PropertyGroupFilter, + PropertyGroupFilterValue, + PropertyOperator, +} from '~/types' + +/** Cleans properties of insights. These are either a simple list of property filters or a property group filter. The property group filter has + * a type (AND, OR) and a list of values that are property group filter values, which are either property group filter values or a simple list of + * property filters. + */ +export const cleanGlobalProperties = ( + properties: Record | Record[] | undefined +): AnyPropertyFilter[] | PropertyGroupFilter | undefined => { + if ( + properties == undefined || + (Array.isArray(properties) && properties.length == 0) || + Object.keys(properties).length == 0 + ) { + // empty properties + return undefined + } else if (isOldStyleProperties(properties)) { + // old style properties + properties = transformOldStyleProperties(properties) + properties = { + type: 'AND', + values: [{ type: 'AND', values: properties }], + } + return cleanPropertyGroupFilter(properties) + } else if (Array.isArray(properties)) { + // list of property filters + properties = { + type: 'AND', + values: [{ type: 'AND', values: properties }], + } + return cleanPropertyGroupFilter(properties) + } else if ( + (properties['type'] === 'AND' || properties['type'] === 'OR') && + !properties['values'].some((property: any) => property['type'] === 'AND' || property['type'] === 'OR') + ) { + // property group filter value + properties = { + type: 'AND', + values: [properties], + } + return cleanPropertyGroupFilter(properties) + } else { + // property group filter + return cleanPropertyGroupFilter(properties) + } +} + +/** Cleans properties of entities i.e. event and action nodes. These are a simple list of property filters. */ +export const cleanEntityProperties = ( + properties: Record | Record[] | undefined +): AnyPropertyFilter[] | undefined => { + if ( + properties == undefined || + (Array.isArray(properties) && properties.length == 0) || + Object.keys(properties).length == 0 + ) { + // empty properties + return undefined + } else if (isOldStyleProperties(properties)) { + // old style properties + return transformOldStyleProperties(properties) + } else if (Array.isArray(properties)) { + // list of property filters + return properties.map(cleanProperty) + } else if ( + (properties['type'] === 'AND' || properties['type'] === 'OR') && + !properties['values'].some((property: any) => property['type'] === 'AND' || property['type'] === 'OR') + ) { + // property group filter value + return properties.values.map(cleanProperty) + } else { + throw new Error('Unexpected format of entity properties.') + } +} + +const cleanPropertyGroupFilter = (properties: Record): PropertyGroupFilter => { + properties['values'] = cleanPropertyGroupFilterValues(properties.values) + return properties as PropertyGroupFilter +} + +const cleanPropertyGroupFilterValues = ( + properties: (AnyPropertyFilter | PropertyGroupFilterValue)[] +): (AnyPropertyFilter | PropertyGroupFilterValue)[] => { + return properties.map(cleanPropertyGroupFilterValue) +} + +const cleanPropertyGroupFilterValue = ( + property: AnyPropertyFilter | PropertyGroupFilterValue +): AnyPropertyFilter | PropertyGroupFilterValue => { + if (property['type'] == 'AND' || property['type'] == 'OR') { + // property group filter value + property['values'] = cleanPropertyGroupFilterValues(property['values'] as PropertyGroupFilterValue[]) + return property + } else { + // property filter + return cleanProperty(property) + } +} + +const cleanProperty = (property: Record): AnyPropertyFilter => { + // fix type typo + if (property['type'] === 'events') { + property['type'] = 'event' + } + + // fix value key typo + if (property['values'] !== undefined && property['value'] === undefined) { + property['value'] = property['values'] + delete property['values'] + } + + // convert precalculated and static cohorts to cohorts + if (['precalculated-cohort', 'static-cohort'].includes(property['type'])) { + property['type'] = 'cohort' + } + + // fix invalid property key for cohorts + if (property['type'] === 'cohort' && property['key'] !== 'id') { + property['key'] = 'id' + } + + // set a default operator for properties that support it, but don't have an operator set + if (isPropertyWithOperator(property) && property['operator'] === undefined) { + property['operator'] = 'exact' + } + + // remove the operator for properties that don't support it, but have it set + if (!isPropertyWithOperator(property) && property['operator'] !== undefined) { + delete property['operator'] + } + + // remove none from values + if (Array.isArray(property['value'])) { + property['value'] = property['value'].filter((x) => x !== null) + } + + // remove keys without concrete value + Object.keys(property).forEach((key) => { + if (property[key] === undefined) { + delete property[key] + } + }) + + return property +} + +const isPropertyWithOperator = (property: Record): boolean => { + return !['cohort', 'hogql'].includes(property['type']) +} + +// old style dict properties e.g. {"utm_medium__icontains": "email"} +const isOldStyleProperties = (properties: Record | Record[]): boolean => { + return ( + !Array.isArray(properties) && Object.keys(properties).length === 1 && !['AND', 'OR'].includes(properties.type) + ) +} + +const transformOldStyleProperties = ( + properties: Record | Record[] +): EventPropertyFilter[] => { + const key = Object.keys(properties)[0] + const value = Object.values(properties)[0] + const keySplit = key.split('__') + + return [ + { + key: keySplit[0], + value: value, + operator: keySplit.length > 1 ? (keySplit[1] as PropertyOperator) : PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ] +} diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts index 12f1b95c0a10b..fea11606ec4b6 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts @@ -801,7 +801,12 @@ describe('filtersToQueryNode', () => { kind: NodeKind.LifecycleQuery, properties: { type: FilterLogicalOperator.And, - values: [], + values: [ + { + type: FilterLogicalOperator.And, + values: [], + }, + ], }, filterTestAccounts: true, dateRange: { @@ -1394,7 +1399,12 @@ describe('filtersToQueryNode', () => { }, properties: { type: FilterLogicalOperator.And, - values: [], + values: [ + { + type: FilterLogicalOperator.And, + values: [], + }, + ], }, } expect(result).toEqual(query) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts index 40cd113a7d29d..b832a167813e2 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts @@ -41,10 +41,8 @@ import { } from '~/queries/utils' import { ActionFilter, - AnyPropertyFilter, BaseMathType, DataWarehouseFilter, - FilterLogicalOperator, FilterType, FunnelExclusionLegacy, FunnelsFilterType, @@ -53,14 +51,13 @@ import { InsightType, isDataWarehouseFilter, PathsFilterType, - PropertyFilterType, - PropertyGroupFilterValue, - PropertyOperator, RetentionEntity, RetentionFilterType, TrendsFilterType, } from '~/types' +import { cleanEntityProperties, cleanGlobalProperties } from './cleanProperties' + const reverseInsightMap: Record, InsightNodeKind> = { [InsightType.TRENDS]: NodeKind.TrendsQuery, [InsightType.FUNNELS]: NodeKind.FunnelsQuery, @@ -110,7 +107,7 @@ export const legacyEntityToNode = ( } if (includeProperties) { - shared = { ...shared, properties: cleanProperties(entity.properties) } as any + shared = { ...shared, properties: cleanEntityProperties(entity.properties) } as any } if (mathAvailability !== MathAvailability.None) { @@ -213,97 +210,6 @@ export const sanitizeRetentionEntity = (entity: RetentionEntity | undefined): Re return record } -const cleanProperties = (parentProperties: FilterType['properties']): InsightsQueryBase['properties'] => { - if (!parentProperties || !parentProperties.values) { - return parentProperties - } - - const processAnyPropertyFilter = (filter: AnyPropertyFilter): AnyPropertyFilter => { - if ( - filter.type === PropertyFilterType.Event || - filter.type === PropertyFilterType.Person || - filter.type === PropertyFilterType.Element || - filter.type === PropertyFilterType.Session || - filter.type === PropertyFilterType.Group || - filter.type === PropertyFilterType.Feature || - filter.type === PropertyFilterType.Recording - ) { - return { - ...filter, - operator: filter.operator ?? PropertyOperator.Exact, - } - } - - // Some saved insights have `"operator": null` defined in the properties, this - // breaks HogQL trends and Pydantic validation - if (filter.type === PropertyFilterType.Cohort) { - if ('operator' in filter) { - delete filter.operator - } - } - - return filter - } - - const processPropertyGroupFilterValue = ( - propertyGroupFilterValue: PropertyGroupFilterValue - ): PropertyGroupFilterValue => { - if (propertyGroupFilterValue.values?.length === 0 || !propertyGroupFilterValue.values) { - return propertyGroupFilterValue - } - - // Check whether the first values type is an AND or OR - const firstValueType = propertyGroupFilterValue.values[0].type - - if (firstValueType === FilterLogicalOperator.And || firstValueType === FilterLogicalOperator.Or) { - // propertyGroupFilterValue.values is PropertyGroupFilterValue[] - const values = (propertyGroupFilterValue.values as PropertyGroupFilterValue[]).map( - processPropertyGroupFilterValue - ) - - return { - ...propertyGroupFilterValue, - values, - } - } - - // propertyGroupFilterValue.values is AnyPropertyFilter[] - const values = (propertyGroupFilterValue.values as AnyPropertyFilter[]).map(processAnyPropertyFilter) - - return { - ...propertyGroupFilterValue, - values, - } - } - - if (Array.isArray(parentProperties)) { - // parentProperties is AnyPropertyFilter[] - return parentProperties.map(processAnyPropertyFilter) - } - - if ( - (parentProperties.type === FilterLogicalOperator.And || parentProperties.type === FilterLogicalOperator.Or) && - Array.isArray(parentProperties.values) && - parentProperties.values.some( - (value) => - typeof value !== 'object' || - (value.type !== FilterLogicalOperator.And && value.type !== FilterLogicalOperator.Or) - ) - ) { - return { - type: FilterLogicalOperator.And, - values: [processPropertyGroupFilterValue(parentProperties)], - } - } - - // parentProperties is PropertyGroupFilter - const values = parentProperties.values.map(processPropertyGroupFilterValue) - return { - ...parentProperties, - values, - } -} - export const filtersToQueryNode = (filters: Partial): InsightQueryNode => { const captureException = (message: string): void => { Sentry.captureException(new Error(message), { @@ -318,7 +224,7 @@ export const filtersToQueryNode = (filters: Partial): InsightQueryNo const query: InsightsQueryBase = { kind: reverseInsightMap[filters.insight], - properties: cleanProperties(filters.properties), + properties: cleanGlobalProperties(filters.properties), filterTestAccounts: filters.filter_test_accounts, } if (filters.sampling_factor) { diff --git a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx index 4136358dac1dc..fc65ff00a670d 100644 --- a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx +++ b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx @@ -78,7 +78,8 @@ export function TrendsSeries(): JSX.Element | null { mathAvailability={mathAvailability} propertiesTaxonomicGroupTypes={propertiesTaxonomicGroupTypes} actionsTaxonomicGroupTypes={ - featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] && featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] + featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) ? [ TaxonomicFilterGroupType.Events, TaxonomicFilterGroupType.Actions, diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index 78778cec4322a..cc688988d845a 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -146,28 +146,22 @@ export async function query( let response: NonNullable const logParams: Record = {} const startTime = performance.now() - - const hogQLInsightsLifecycleFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE] - ) - const hogQLInsightsPathsFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_PATHS] - ) - const hogQLInsightsRetentionFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION] - ) - const hogQLInsightsTrendsFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] - ) - const hogQLInsightsStickinessFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_STICKINESS] - ) - const hogQLInsightsFunnelsFlagEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_FUNNELS] - ) - const hogQLInsightsLiveCompareEnabled = Boolean( - featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHT_LIVE_COMPARE] - ) + const allFlags = featureFlagLogic.findMounted()?.values.featureFlags ?? {} + + const hogQLInsightsFlagEnabled = Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS]) + const hogQLInsightsLifecycleFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE]) + const hogQLInsightsPathsFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_PATHS]) + const hogQLInsightsRetentionFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION]) + const hogQLInsightsTrendsFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) + const hogQLInsightsStickinessFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_STICKINESS]) + const hogQLInsightsFunnelsFlagEnabled = + hogQLInsightsFlagEnabled || Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_FUNNELS]) + const hogQLInsightsLiveCompareEnabled = Boolean(allFlags[FEATURE_FLAGS.HOGQL_INSIGHT_LIVE_COMPARE]) async function fetchLegacyUrl(): Promise> { const response = await api.getResponse(legacyUrl!) @@ -220,7 +214,13 @@ export async function query( (hogQLInsightsFunnelsFlagEnabled && isFunnelsQuery(queryNode)) ) { if (hogQLInsightsLiveCompareEnabled) { - const legacyFunction = legacyUrl ? fetchLegacyUrl : fetchLegacyInsights + const legacyFunction = (): any => { + try { + return legacyUrl ? fetchLegacyUrl() : fetchLegacyInsights() + } catch (e) { + console.error('Error fetching legacy insights', e) + } + } let legacyResponse: any ;[response, legacyResponse] = await Promise.all([ executeQuery(queryNode, methodOptions, refresh, queryId), @@ -252,11 +252,17 @@ export async function query( res2 = res2[0]?.people.map((n: any) => n.id) res1 = res1.map((n: any) => n[0].id) // Sort, since the order of the results is not guaranteed + const bv = (v: any): string => + [null, 'null', 'none', '9007199254740990', 9007199254740990].includes(v) + ? '$$_posthog_breakdown_null_$$' + : ['Other', '9007199254740991', 9007199254740991].includes(v) + ? '$$_posthog_breakdown_other_$$' + : String(v) res1.sort((a: any, b: any) => - (a.breakdown_value ?? a.label ?? a).localeCompare(b.breakdown_value ?? b.label ?? b) + bv(a.breakdown_value ?? a.label ?? a).localeCompare(bv(b.breakdown_value ?? b.label ?? b)) ) res2.sort((a: any, b: any) => - (a.breakdown_value ?? a.label ?? a).localeCompare(b.breakdown_value ?? b.label ?? b) + bv(a.breakdown_value ?? a.label ?? a).localeCompare(bv(b.breakdown_value ?? b.label ?? b)) ) } diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 898d4e0974004..eeac4bb951269 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -501,14 +501,14 @@ "ChartDisplayType": { "enum": [ "ActionsLineGraph", - "ActionsLineGraphCumulative", + "ActionsBar", "ActionsAreaGraph", - "ActionsTable", + "ActionsLineGraphCumulative", + "BoldNumber", "ActionsPie", - "ActionsBar", "ActionsBarValue", - "WorldMap", - "BoldNumber" + "ActionsTable", + "WorldMap" ], "type": "string" }, @@ -949,6 +949,10 @@ }, "type": "object" }, + "DatetimeDay": { + "format": "date-time", + "type": "string" + }, "Day": { "type": "integer" }, @@ -2567,7 +2571,7 @@ "type": "string" }, "personsOnEventsMode": { - "enum": ["disabled", "v1_enabled", "v1_mixed", "v2_enabled"], + "enum": ["disabled", "v1_enabled", "v1_mixed", "v2_enabled", "v3_enabled"], "type": "string" } }, @@ -2788,6 +2792,9 @@ { "type": "string" }, + { + "$ref": "#/definitions/DatetimeDay" + }, { "$ref": "#/definitions/Day" } @@ -3453,9 +3460,6 @@ "operator": { "$ref": "#/definitions/PropertyOperator" }, - "table": { - "type": "string" - }, "type": { "const": "person", "description": "Person properties", @@ -3839,6 +3843,9 @@ { "type": "string" }, + { + "$ref": "#/definitions/DatetimeDay" + }, { "$ref": "#/definitions/Day" } diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 7eaa1734ea463..fc45ff6ecadcb 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -4,6 +4,7 @@ import { Breakdown, BreakdownKeyType, BreakdownType, + ChartDisplayCategory, ChartDisplayType, CountPerActorMathType, EventPropertyFilter, @@ -26,6 +27,8 @@ import { TrendsFilterType, } from '~/types' +export { ChartDisplayCategory } + // Type alias for number to be reflected as integer in json-schema. /** @asType integer */ type integer = number @@ -176,7 +179,7 @@ export interface DataNode extends Node { /** HogQL Query Options are automatically set per team. However, they can be overriden in the query. */ export interface HogQLQueryModifiers { - personsOnEventsMode?: 'disabled' | 'v1_enabled' | 'v1_mixed' | 'v2_enabled' + personsOnEventsMode?: 'disabled' | 'v1_enabled' | 'v1_mixed' | 'v2_enabled' | 'v3_enabled' personsArgMaxVersion?: 'auto' | 'v1' | 'v2' inCohortVia?: 'auto' | 'leftjoin' | 'subquery' | 'leftjoin_conjoined' materializationMode?: 'auto' | 'legacy_null_as_string' | 'legacy_null_as_null' | 'disabled' @@ -1179,9 +1182,13 @@ export interface FunnelCorrelationQuery { response?: FunnelCorrelationResponse } +/** @format date-time */ +export type DatetimeDay = string + export type BreakdownValueInt = integer export interface InsightActorsQueryOptionsResponse { - day?: { label: string; value: string | Day }[] + // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents + day?: { label: string; value: string | DatetimeDay | Day }[] status?: { label: string; value: string }[] interval?: { label: string diff --git a/frontend/src/scenes/ResourcePermissionModal.tsx b/frontend/src/scenes/ResourcePermissionModal.tsx index 0735b3d7b5f7d..b7361519f398d 100644 --- a/frontend/src/scenes/ResourcePermissionModal.tsx +++ b/frontend/src/scenes/ResourcePermissionModal.tsx @@ -7,11 +7,7 @@ import { LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { AccessLevel, Resource, RoleType } from '~/types' -import { - FormattedResourceLevel, - permissionsLogic, - ResourcePermissionMapping, -} from './settings/organization/Permissions/permissionsLogic' +import { permissionsLogic } from './settings/organization/Permissions/permissionsLogic' import { rolesLogic } from './settings/organization/Permissions/Roles/rolesLogic' import { urls } from './urls' @@ -88,7 +84,7 @@ export function ResourcePermission({ resourceType, canEdit, }: ResourcePermissionProps): JSX.Element { - const { allPermissions, shouldShowPermissionsTable } = useValues(permissionsLogic) + const { allPermissions } = useValues(permissionsLogic) const { roles: possibleRolesWithAccess } = useValues(rolesLogic) const resourceLevel = allPermissions.find((permission) => permission.resource === resourceType) // TODO: feature_flag_access_level should eventually be generic in this component @@ -112,7 +108,7 @@ export function ResourcePermission({ icon={ } - to={`${urls.settings('organization')}?tab=role_based_access`} + to={`${urls.settings('organization-rbac')}`} targetBlank size="small" noPadding @@ -166,33 +162,7 @@ export function ResourcePermission({ return ( <> - {!shouldShowPermissionsTable && ( - <> - {resourceLevel && } - - - )} - {shouldShowPermissionsTable && } - {!shouldShowPermissionsTable && ( - <> -
        Roles
        - {roles.length > 0 ? ( -
        - {roles.map((role) => { - return ( - deleteAssociatedRole(roleId)} - /> - ) - })} -
        - ) : ( -
        No roles added yet
        - )} - - )} + {canEdit && ( <>
        Custom edit roles
        @@ -217,61 +187,3 @@ export function ResourcePermission({ ) } - -function OrganizationResourcePermissionLabel({ - resourceLevel, -}: { - resourceLevel: FormattedResourceLevel -}): JSX.Element { - return ( - <> - } - to={`${urls.settings('organization')}?tab=role_based_access`} - targetBlank - size="small" - noPadding - className="ml-1" - /> - } - > -
        Organization default
        -
        - {ResourcePermissionMapping[resourceLevel.access_level]} - - ) -} - -function OrganizationResourcePermissionRoles({ roles }: { roles: RoleType[] }): JSX.Element { - return ( - <> -
        Roles with edit access
        -
        - {roles.map((role) => ( - - {role.name}{' '} - - ))} -
        - - ) -} - -function RoleRow({ role, deleteRole }: { role: RoleType; deleteRole?: (roleId: RoleType['id']) => void }): JSX.Element { - return ( -
        - {role.name} - {deleteRole && ( - } - onClick={() => deleteRole(role.id)} - tooltip="Remove role from permission" - tooltipPlacement="bottom-start" - size="small" - /> - )} -
        - ) -} diff --git a/frontend/src/scenes/actions/ActionEdit.tsx b/frontend/src/scenes/actions/ActionEdit.tsx index 9da6187358372..40623d01883fc 100644 --- a/frontend/src/scenes/actions/ActionEdit.tsx +++ b/frontend/src/scenes/actions/ActionEdit.tsx @@ -15,7 +15,6 @@ import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' import { compactNumber, uuid } from 'lib/utils' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' -import { userLogic } from 'scenes/userLogic' import { tagsModel } from '~/models/tagsModel' import { ActionStepType, AvailableFeature } from '~/types' @@ -32,7 +31,6 @@ export function ActionEdit({ action: loadedAction, id }: ActionEditLogicProps): const { action, actionLoading, actionCount, actionCountLoading } = useValues(logic) const { submitAction, deleteAction } = useActions(logic) const { currentTeam } = useValues(teamLogic) - const { hasAvailableFeature } = useValues(userLogic) const { tags } = useValues(tagsModel) const slackEnabled = currentTeam?.slack_incoming_webhook @@ -96,7 +94,7 @@ export function ActionEdit({ action: loadedAction, id }: ActionEditLogicProps): className="action-description" compactButtons maxLength={600} // No limit on backend model, but enforce shortish description - paywall={!hasAvailableFeature(AvailableFeature.INGESTION_TAXONOMY)} + paywallFeature={AvailableFeature.INGESTION_TAXONOMY} /> )} diff --git a/frontend/src/scenes/authentication/signup/SignupContainer.tsx b/frontend/src/scenes/authentication/signup/SignupContainer.tsx index bee5d9ddeaf3b..285602454fc10 100644 --- a/frontend/src/scenes/authentication/signup/SignupContainer.tsx +++ b/frontend/src/scenes/authentication/signup/SignupContainer.tsx @@ -2,15 +2,21 @@ import { IconCheckCircle } from '@posthog/icons' import { useValues } from 'kea' import { router } from 'kea-router' import { BridgePage } from 'lib/components/BridgePage/BridgePage' +import { CustomerLogo } from 'lib/components/CustomerLogo' import { CLOUD_HOSTNAMES, FEATURE_FLAGS } from 'lib/constants' import { Link } from 'lib/lemon-ui/Link' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { featureFlagLogic, FeatureFlagsSet } from 'lib/logic/featureFlagLogic' +import { ReactNode } from 'react' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { SceneExport } from 'scenes/sceneTypes' import { userLogic } from 'scenes/userLogic' import { Region } from '~/types' +import airbus from '../../../lib/customers/airbus.svg' +import hasura from '../../../lib/customers/hasura.svg' +import staples from '../../../lib/customers/staples.svg' +import yCombinator from '../../../lib/customers/y-combinator.svg' import { SignupForm } from './signupForm/SignupForm' export const scene: SceneExport = { @@ -22,25 +28,21 @@ export function SignupContainer(): JSX.Element | null { const { user } = useValues(userLogic) const footerHighlights = { - cloud: ['Hosted & managed by PostHog', 'Pay per event, cancel anytime', 'Community, Slack & email support'], - selfHosted: [ - 'Fully featured product, unlimited events', - 'Data in your own infrastructure', - 'Community, Slack & email support', - ], + cloud: ['Hosted & managed by PostHog', 'Pay per event, cancel anytime', 'Fast and reliable support'], + selfHosted: ['Fully featured product, unlimited events', 'Data in your own infrastructure', 'Community forum'], } return !user ? ( +
        {footerHighlights[preflight?.cloud ? 'cloud' : 'selfHosted'].map((val, idx) => ( - +

        {val} - +

        ))} - +
        } sideLogo leftContainerContent={} @@ -50,59 +52,90 @@ export function SignupContainer(): JSX.Element | null { ) : null } +type ProductBenefit = { + benefit: string + description: string | ReactNode +} + +const getProductBenefits = (featureFlags: FeatureFlagsSet): ProductBenefit[] => { + const signupBenefitsFlag = featureFlags[FEATURE_FLAGS.SIGNUP_BENEFITS] + switch (signupBenefitsFlag) { + case 'generic-language': + return [ + { + benefit: 'Free usage every month - even on paid plans', + description: '1M free events, 5K free session recordings, and more. Every month. Forever.', + }, + { + benefit: 'Start collecting data immediately', + description: 'Integrate with developer-friendly APIs or a low-code web snippet.', + }, + { + benefit: 'Join industry leaders that run on PostHog', + description: + 'Airbus, Hasura, Y Combinator, Staples, and thousands more trust PostHog as their Product OS.', + }, + ] + case 'logos': + return [ + { + benefit: '1M events free every month', + description: 'Product analytics, feature flags, experiments, and more.', + }, + { + benefit: 'Start collecting events immediately', + description: 'Integrate with developer-friendly APIs or use our easy autocapture script.', + }, + { + benefit: 'Join industry leaders that run on PostHog', + description: ( +
        + {[airbus, hasura, yCombinator, staples].map((company, i) => ( + + + + ))} +
        + ), + }, + ] + default: + return [ + { + benefit: 'Free for 1M events every month', + description: 'Product analytics, feature flags, experiments, and more.', + }, + { + benefit: 'Start collecting events immediately', + description: 'Integrate with developer-friendly APIs or use our easy autocapture script.', + }, + { + benefit: 'Join industry leaders that run on PostHog', + description: + 'Airbus, Hasura, Y Combinator, Staples, and thousands more trust PostHog as their Product OS.', + }, + ] + } +} + export function SignupLeftContainer(): JSX.Element { const { preflight } = useValues(preflightLogic) const { featureFlags } = useValues(featureFlagLogic) - const showGenericSignupBenefits: boolean = featureFlags[FEATURE_FLAGS.GENERIC_SIGNUP_BENEFITS] === 'test' - const getRegionUrl = (region: string): string => { const { pathname, search, hash } = router.values.currentLocation return `https://${CLOUD_HOSTNAMES[region]}${pathname}${search}${hash}` } - const productBenefits: { - benefit: string - description: string - }[] = showGenericSignupBenefits - ? [ - { - benefit: 'Free usage every month - even on paid plans', - description: '1M free events, 5K free session recordings, and more. Every month. Forever.', - }, - { - benefit: 'Start collecting data immediately', - description: 'Integrate with developer-friendly APIs or low-code web snippet.', - }, - { - benefit: 'Join industry leaders that run on PostHog', - description: - 'ClickHouse, Airbus, Hasura, Y Combinator, and thousands more trust PostHog as their Product OS.', - }, - ] - : [ - { - benefit: 'Free for 1M events every month', - description: 'Product analytics, feature flags, experiments, and more.', - }, - { - benefit: 'Start collecting events immediately', - description: 'Integrate with developer-friendly APIs or use our easy autocapture script.', - }, - { - benefit: 'Join industry leaders that run on PostHog', - description: - 'ClickHouse, Airbus, Hasura, Y Combinator, and thousands more trust PostHog as their Product OS.', - }, - ] + const productBenefits = getProductBenefits(featureFlags) return ( <>
        {productBenefits.map((benefit, i) => ( -
        +
        - +

        {benefit.benefit}

        diff --git a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx index 1952c156a8fa2..a79bac3b9b0df 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx +++ b/frontend/src/scenes/authentication/signup/signupForm/panels/SignupPanel2.tsx @@ -17,10 +17,10 @@ export function SignupPanel2(): JSX.Element | null { return (
        - + diff --git a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts index 803f47ecad608..df031fe4f4ec6 100644 --- a/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts +++ b/frontend/src/scenes/authentication/signup/signupForm/signupLogic.ts @@ -6,6 +6,7 @@ import { urlToAction } from 'kea-router' import api from 'lib/api' import { CLOUD_HOSTNAMES, FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import posthog from 'posthog-js' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { urls } from 'scenes/urls' @@ -22,7 +23,7 @@ export interface AccountResponse { export interface SignupForm { email: string password: string - first_name: string + name: string organization_name: string role_at_organization: string referral_source: string @@ -78,19 +79,27 @@ export const signupLogic = kea([ alwaysShowErrors: true, showErrorsOnTouch: true, defaults: { - first_name: '', + name: '', organization_name: '', role_at_organization: '', referral_source: '', } as SignupForm, - errors: ({ first_name, organization_name }) => ({ - first_name: !first_name ? 'Please enter your name' : undefined, - organization_name: !organization_name ? 'Please enter your organization name' : undefined, + errors: ({ name }) => ({ + name: !name ? 'Please enter your name' : undefined, }), submit: async (payload, breakpoint) => { breakpoint() try { - const res = await api.create('api/signup/', { ...values.signupPanel1, ...payload }) + const res = await api.create('api/signup/', { + ...values.signupPanel1, + ...payload, + first_name: payload.name.split(' ')[0], + last_name: payload.name.split(' ')[1] || undefined, + organization_name: payload.organization_name || undefined, + }) + if (!payload.organization_name) { + posthog.capture('sign up organization name not provided') + } location.href = res.redirect_url || '/' } catch (e) { actions.setSignupPanel2ManualErrors({ @@ -142,7 +151,7 @@ export const signupLogic = kea([ email, }) actions.setSignupPanel2Values({ - first_name: 'X', + name: 'X', organization_name: 'Y', }) actions.submitSignupPanel2() diff --git a/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx b/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx index 7fbbc8cc29d69..a2a9f9968f82c 100644 --- a/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx +++ b/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx @@ -242,6 +242,15 @@ export function BatchExportsEditFields({ ]} /> + + + +
        @@ -375,17 +384,21 @@ export function BatchExportsEditFields({ - - Does your Postgres instance have a self-signed SSL certificate? - - - - - } - /> + {({ value, onChange }) => ( + + Does your Postgres instance have a self-signed SSL certificate? + + + + + } + checked={!!value} + onChange={onChange} + /> + )} diff --git a/frontend/src/scenes/batch_exports/BatchExports.stories.tsx b/frontend/src/scenes/batch_exports/BatchExports.stories.tsx index 0dd616c44982a..dbd6779cb208d 100644 --- a/frontend/src/scenes/batch_exports/BatchExports.stories.tsx +++ b/frontend/src/scenes/batch_exports/BatchExports.stories.tsx @@ -42,6 +42,7 @@ export default { include_events: [], encryption: null, kms_key_id: null, + file_format: 'JSONLines', }, }, start_at: null, diff --git a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts index bc86d1618fe4f..30c123256d81a 100644 --- a/frontend/src/scenes/batch_exports/batchExportEditLogic.ts +++ b/frontend/src/scenes/batch_exports/batchExportEditLogic.ts @@ -90,6 +90,7 @@ export const batchExportFormFields = ( aws_secret_access_key: isNew ? (!config.aws_secret_access_key ? 'This field is required' : '') : '', compression: '', encryption: '', + file_format: isNew ? (!config.file_format ? 'This field is required' : '') : '', kms_key_id: !config.kms_key_id && config.encryption == 'aws:kms' ? 'This field is required' : '', exclude_events: '', include_events: '', diff --git a/frontend/src/scenes/dashboard/DashboardHeader.tsx b/frontend/src/scenes/dashboard/DashboardHeader.tsx index 231b3516f4674..4c15d085d0317 100644 --- a/frontend/src/scenes/dashboard/DashboardHeader.tsx +++ b/frontend/src/scenes/dashboard/DashboardHeader.tsx @@ -308,7 +308,11 @@ export function DashboardHeader(): JSX.Element | null { multiline name="description" markdown - value={dashboard.description || ''} + value={ + (hasAvailableFeature(AvailableFeature.TEAM_COLLABORATION) && + dashboard.description) || + '' + } placeholder="Description (optional)" onSave={(value) => updateDashboard({ id: dashboard.id, description: value, allowUndo: true }) @@ -316,7 +320,7 @@ export function DashboardHeader(): JSX.Element | null { saveOnBlur={true} compactButtons mode={!canEditDashboard ? 'view' : undefined} - paywall={!hasAvailableFeature(AvailableFeature.TEAM_COLLABORATION)} + paywallFeature={AvailableFeature.TEAM_COLLABORATION} /> )} {dashboard?.tags && ( diff --git a/frontend/src/scenes/data-management/definition/DefinitionView.tsx b/frontend/src/scenes/data-management/definition/DefinitionView.tsx index 93be7c2e0910c..f307d61f7d13a 100644 --- a/frontend/src/scenes/data-management/definition/DefinitionView.tsx +++ b/frontend/src/scenes/data-management/definition/DefinitionView.tsx @@ -17,7 +17,6 @@ import { definitionLogic, DefinitionLogicProps } from 'scenes/data-management/de import { EventDefinitionProperties } from 'scenes/data-management/events/EventDefinitionProperties' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' -import { userLogic } from 'scenes/userLogic' import { defaultDataTableColumns } from '~/queries/nodes/DataTable/utils' import { Query } from '~/queries/Query/Query' @@ -37,7 +36,6 @@ export function DefinitionView(props: DefinitionLogicProps = {}): JSX.Element { const { definition, definitionLoading, definitionMissing, hasTaxonomyFeatures, singular, isEvent, isProperty } = useValues(logic) const { deleteDefinition } = useActions(logic) - const { hasAvailableFeature } = useValues(userLogic) if (definitionLoading) { return @@ -146,7 +144,7 @@ export function DefinitionView(props: DefinitionLogicProps = {}): JSX.Element { className="definition-description" compactButtons maxLength={600} - paywall={!hasAvailableFeature(AvailableFeature.INGESTION_TAXONOMY)} + paywallFeature={AvailableFeature.INGESTION_TAXONOMY} /> ) } - if (definition.table) { - return ( - - - - ) - } - return ( diff --git a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx index 2116d2da6e74d..11f50ca4f0d27 100644 --- a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx +++ b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx @@ -1,6 +1,6 @@ import './ViewLinkModal.scss' -import { IconTrash } from '@posthog/icons' +import { IconCollapse, IconExpand, IconTrash } from '@posthog/icons' import { LemonButton, LemonDivider, @@ -35,7 +35,7 @@ export function ViewLinkModal(): JSX.Element { } isOpen={isJoinTableModalOpen} onClose={toggleJoinTableModal} - width={600} + width={700} > @@ -57,6 +57,7 @@ export function ViewLinkForm(): JSX.Element { selectedJoiningKey, sourceIsUsingHogQLExpression, joiningIsUsingHogQLExpression, + isViewLinkSubmitting, } = useValues(viewLinkLogic) const { selectJoiningTable, @@ -66,12 +67,13 @@ export function ViewLinkForm(): JSX.Element { selectSourceKey, selectJoiningKey, } = useActions(viewLinkLogic) + const [advancedSettingsExpanded, setAdvancedSettingsExpanded] = useState(false) return (
        -
        +
        Source Table {isNewJoin ? ( @@ -86,7 +88,7 @@ export function ViewLinkForm(): JSX.Element { selectedSourceTableName ?? '' )}
        -
        +
        Joining Table
        -
        -
        +
        +
        Source Table Key <> @@ -124,7 +126,7 @@ export function ViewLinkForm(): JSX.Element {
        -
        +
        Joining Table Key <> @@ -148,8 +150,22 @@ export function ViewLinkForm(): JSX.Element {
        {sqlCodeSnippet && ( - <> +
        + setAdvancedSettingsExpanded(!advancedSettingsExpanded)} + sideIcon={advancedSettingsExpanded ? : } + > +
        +

        Advanced settings

        +
        Customize how the fields are accessed
        +
        +
        +
        + )} + {sqlCodeSnippet && advancedSettingsExpanded && ( + <>
        Field Name @@ -185,7 +201,7 @@ export function ViewLinkForm(): JSX.Element { Close - + Save
        diff --git a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx index 1c72b31bd011f..26c90d15f1600 100644 --- a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx +++ b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx @@ -5,6 +5,7 @@ import { DatabaseTableTree, TreeItem } from 'lib/components/DatabaseTableTree/Da import { EmptyMessage } from 'lib/components/EmptyMessage/EmptyMessage' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { humanFriendlyDetailedTime } from 'lib/utils' import { DatabaseTable } from 'scenes/data-management/database/DatabaseTable' import { urls } from 'scenes/urls' @@ -17,8 +18,16 @@ import { dataWarehouseSceneLogic } from './dataWarehouseSceneLogic' import SourceModal from './SourceModal' export const DataWarehouseTables = (): JSX.Element => { - const { isSourceModalOpen, externalTables, posthogTables, savedQueriesFormatted, allTables, selectedRow } = - useValues(dataWarehouseSceneLogic) + const { + isSourceModalOpen, + externalTablesBySourceType, + dataWarehouseLoading, + posthogTables, + savedQueriesFormatted, + allTables, + selectedRow, + dataWarehouseSavedQueriesLoading, + } = useValues(dataWarehouseSceneLogic) const { toggleSourceModal, selectRow, deleteDataWarehouseSavedQuery, deleteDataWarehouseTable } = useActions(dataWarehouseSceneLogic) const { featureFlags } = useValues(featureFlagLogic) @@ -63,12 +72,15 @@ export const DataWarehouseTables = (): JSX.Element => { } const treeItems = (): TreeItem[] => { - const items = [ + const items: TreeItem[] = [ { name: 'External', - items: externalTables.map((table) => ({ - table: table, - icon: , + items: Object.keys(externalTablesBySourceType).map((source_type) => ({ + name: source_type, + items: externalTablesBySourceType[source_type].map((table) => ({ + table: table, + icon: , + })), })), emptyLabel: ( @@ -82,6 +94,7 @@ export const DataWarehouseTables = (): JSX.Element => { ), + isLoading: dataWarehouseLoading, }, { name: 'PostHog', @@ -99,6 +112,8 @@ export const DataWarehouseTables = (): JSX.Element => { table: table, icon: , })), + emptyLabel: No views found, + isLoading: dataWarehouseSavedQueriesLoading, }) } @@ -108,7 +123,7 @@ export const DataWarehouseTables = (): JSX.Element => { return ( <>
        -
        +
        {selectedRow ? ( @@ -149,6 +164,19 @@ export const DataWarehouseTables = (): JSX.Element => {
        {selectedRow.type == DataWarehouseRowType.ExternalTable && (
        + <> + Last Synced At + + {selectedRow.payload.external_schema?.last_synced_at + ? humanFriendlyDetailedTime( + selectedRow.payload.external_schema?.last_synced_at, + 'MMMM DD, YYYY', + 'h:mm A' + ) + : 'Not yet synced'} + + + <> Files URL pattern {selectedRow.payload.url_pattern} diff --git a/frontend/src/scenes/data-warehouse/external/SourceModal.tsx b/frontend/src/scenes/data-warehouse/external/SourceModal.tsx index ae546ea9eae21..204b0ef71982e 100644 --- a/frontend/src/scenes/data-warehouse/external/SourceModal.tsx +++ b/frontend/src/scenes/data-warehouse/external/SourceModal.tsx @@ -5,6 +5,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import hubspotLogo from 'public/hubspot-logo.svg' import postgresLogo from 'public/postgres-logo.svg' import stripeLogo from 'public/stripe-logo.svg' +import zendeskLogo from 'public/zendesk-logo.png' import { DatawarehouseTableForm } from '../new_table/DataWarehouseTableForm' import PostgresSchemaForm from './forms/PostgresSchemaForm' @@ -103,6 +104,13 @@ function FirstStep(): JSX.Element { ) } + if (config.name === 'Zendesk') { + return ( + + Zendesk logo + + ) + } return <> } diff --git a/frontend/src/scenes/data-warehouse/external/dataWarehouseJoinsLogic.ts b/frontend/src/scenes/data-warehouse/external/dataWarehouseJoinsLogic.ts index 6c6a3af715664..b5f493b2d7f17 100644 --- a/frontend/src/scenes/data-warehouse/external/dataWarehouseJoinsLogic.ts +++ b/frontend/src/scenes/data-warehouse/external/dataWarehouseJoinsLogic.ts @@ -1,19 +1,13 @@ -import { afterMount, connect, kea, path, selectors } from 'kea' +import { afterMount, kea, path } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' -import { capitalizeFirstLetter } from 'lib/utils' -import { DatabaseSchemaQueryResponseField } from '~/queries/schema' -import { DataWarehouseViewLink, PropertyDefinition, PropertyType } from '~/types' +import { DataWarehouseViewLink } from '~/types' import type { dataWarehouseJoinsLogicType } from './dataWarehouseJoinsLogicType' -import { dataWarehouseSceneLogic } from './dataWarehouseSceneLogic' export const dataWarehouseJoinsLogic = kea([ path(['scenes', 'data-warehouse', 'external', 'dataWarehouseJoinsLogic']), - connect(() => ({ - values: [dataWarehouseSceneLogic, ['externalTablesMap']], - })), loaders({ joins: [ [] as DataWarehouseViewLink[], @@ -25,40 +19,6 @@ export const dataWarehouseJoinsLogic = kea([ }, ], }), - selectors({ - personTableJoins: [(s) => [s.joins], (joins) => joins.filter((join) => join.source_table_name === 'persons')], - tablesJoinedToPersons: [ - (s) => [s.externalTablesMap, s.personTableJoins], - (externalTablesMap, personTableJoins) => { - return personTableJoins.map((join: DataWarehouseViewLink) => { - // valid join should have a joining table name - const table = externalTablesMap[join.joining_table_name as string] - return { - table, - join, - } - }) - }, - ], - columnsJoinedToPersons: [ - (s) => [s.tablesJoinedToPersons], - (tablesJoinedToPersons) => { - return tablesJoinedToPersons.reduce((acc, { table, join }) => { - if (table) { - acc.push( - ...table.columns.map((column: DatabaseSchemaQueryResponseField) => ({ - id: column.key, - name: join.field_name + ': ' + column.key, - table: join.field_name, - property_type: capitalizeFirstLetter(column.type) as PropertyType, - })) - ) - } - return acc - }, [] as PropertyDefinition[]) - }, - ], - }), afterMount(({ actions }) => { actions.loadJoins() }), diff --git a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts index db722a37777a1..b95ae5d8755b7 100644 --- a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts +++ b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.ts @@ -8,7 +8,13 @@ import { userLogic } from 'scenes/userLogic' import { DataWarehouseTable } from '~/types' import { dataWarehouseSavedQueriesLogic } from '../saved_queries/dataWarehouseSavedQueriesLogic' -import { DatabaseTableListRow, DataWarehouseRowType, DataWarehouseSceneTab, DataWarehouseTableType } from '../types' +import { + DatabaseTableListRow, + DataWarehouseExternalTableType, + DataWarehouseRowType, + DataWarehouseSceneTab, + DataWarehouseTableType, +} from '../types' import type { dataWarehouseSceneLogicType } from './dataWarehouseSceneLogicType' export const dataWarehouseSceneLogic = kea([ @@ -18,9 +24,9 @@ export const dataWarehouseSceneLogic = kea([ userLogic, ['user'], databaseTableListLogic, - ['filteredTables', 'dataWarehouse'], + ['filteredTables', 'dataWarehouse', 'dataWarehouseLoading'], dataWarehouseSavedQueriesLogic, - ['savedQueries'], + ['savedQueries', 'dataWarehouseSavedQueriesLoading'], featureFlagLogic, ['featureFlags'], ], @@ -132,6 +138,21 @@ export const dataWarehouseSceneLogic = kea([ return [...externalTables, ...posthogTables, ...savedQueriesFormatted] }, ], + externalTablesBySourceType: [ + (s) => [s.externalTables], + (externalTables): Record => { + return externalTables.reduce((acc: Record, table) => { + table = table as DataWarehouseExternalTableType + if (table.payload.external_data_source) { + if (!acc[table.payload.external_data_source.source_type]) { + acc[table.payload.external_data_source.source_type] = [] + } + acc[table.payload.external_data_source.source_type].push(table) + } + return acc + }, {}) + }, + ], }), listeners(({ actions }) => ({ deleteDataWarehouseSavedQuery: async (view) => { diff --git a/frontend/src/scenes/data-warehouse/external/sourceModalLogic.tsx b/frontend/src/scenes/data-warehouse/external/sourceModalLogic.tsx index d710ed397811e..38f7c9b543938 100644 --- a/frontend/src/scenes/data-warehouse/external/sourceModalLogic.tsx +++ b/frontend/src/scenes/data-warehouse/external/sourceModalLogic.tsx @@ -119,6 +119,38 @@ export const SOURCE_DETAILS: Record = { }, ], }, + Zendesk: { + name: 'Zendesk', + caption: ( + <> + Enter your Zendesk API key to automatically pull your Zendesk support data into the PostHog Data + warehouse. + + ), + fields: [ + { + name: 'subdomain', + label: 'Zendesk Subdomain', + type: 'text', + required: true, + placeholder: '', + }, + { + name: 'api_key', + label: 'API Key', + type: 'text', + required: true, + placeholder: '', + }, + { + name: 'email_address', + label: 'Zendesk Email Address', + type: 'text', + required: true, + placeholder: '', + }, + ], + }, } export const sourceModalLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/types.ts b/frontend/src/scenes/data-warehouse/types.ts index 6f5f375d5ee2c..2b1bb3edef117 100644 --- a/frontend/src/scenes/data-warehouse/types.ts +++ b/frontend/src/scenes/data-warehouse/types.ts @@ -42,7 +42,7 @@ export interface DataWarehousePostHogTableType extends DataWarehouseTableBaseTyp payload: DatabaseTableListRow } -export interface DataWarehouseExternalTablType extends DataWarehouseTableBaseType { +export interface DataWarehouseExternalTableType extends DataWarehouseTableBaseType { type: DataWarehouseRowType.ExternalTable payload: DataWarehouseTable } @@ -54,7 +54,7 @@ export interface DataWarehouseViewType extends DataWarehouseTableBaseType { export type DataWarehouseTableType = | DataWarehousePostHogTableType - | DataWarehouseExternalTablType + | DataWarehouseExternalTableType | DataWarehouseViewType export enum DataWarehouseSceneTab { diff --git a/frontend/src/scenes/debug/HogQLDebug.tsx b/frontend/src/scenes/debug/HogQLDebug.tsx index 5defca51309b1..e2c4a08341d13 100644 --- a/frontend/src/scenes/debug/HogQLDebug.tsx +++ b/frontend/src/scenes/debug/HogQLDebug.tsx @@ -71,6 +71,7 @@ export function HogQLDebug({ query, setQuery, queryKey }: HogQLDebugProps): JSX. { value: 'v1_enabled', label: 'V1 Enabled' }, { value: 'v1_mixed', label: 'V1 Mixed' }, { value: 'v2_enabled', label: 'V2 Enabled' }, + { value: 'v3_enabled', label: 'V3 Enabled (Join)' }, ]} onChange={(value) => setQuery({ diff --git a/frontend/src/scenes/events/__mocks__/eventsQuery.json b/frontend/src/scenes/events/__mocks__/eventsQuery.json index 2ec1ff533774c..a55c2a0d29a14 100644 --- a/frontend/src/scenes/events/__mocks__/eventsQuery.json +++ b/frontend/src/scenes/events/__mocks__/eventsQuery.json @@ -150,7 +150,6 @@ "query_running_time", "recording-debugging", "sampling", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -189,7 +188,6 @@ "$feature/recording-debugging": true, "$feature/sampling": true, "$feature/recordings-v2-recorder": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -362,7 +360,6 @@ "query_running_time", "recording-debugging", "sampling", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -401,7 +398,6 @@ "$feature/recording-debugging": true, "$feature/sampling": true, "$feature/recordings-v2-recorder": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -574,7 +570,6 @@ "query_running_time", "recording-debugging", "sampling", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -613,7 +608,6 @@ "$feature/recording-debugging": true, "$feature/sampling": true, "$feature/recordings-v2-recorder": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -786,7 +780,6 @@ "query_running_time", "recording-debugging", "sampling", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -825,7 +818,6 @@ "$feature/recording-debugging": true, "$feature/sampling": true, "$feature/recordings-v2-recorder": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, @@ -1012,7 +1004,6 @@ "query_running_time", "recording-debugging", "sampling", - "enable-prompts", "feedback-scene", "hogql", "notebooks", @@ -1051,7 +1042,6 @@ "$feature/recording-debugging": true, "$feature/sampling": true, "$feature/recordings-v2-recorder": true, - "$feature/enable-prompts": true, "$feature/feedback-scene": true, "$feature/hogql": true, "$feature/notebooks": true, diff --git a/frontend/src/scenes/experiments/Experiment.scss b/frontend/src/scenes/experiments/Experiment.scss index e56c2b26b11d5..8d0d2c667d705 100644 --- a/frontend/src/scenes/experiments/Experiment.scss +++ b/frontend/src/scenes/experiments/Experiment.scss @@ -156,17 +156,6 @@ } } -.preview-conversion-goal-num { - flex-shrink: 0; - width: 24px; - height: 24px; - margin-right: 0.5rem; - font-weight: 700; - color: var(--primary-alt); - text-align: center; - background-color: var(--side); -} - .experiment-preview-row { padding-bottom: 1rem; margin-bottom: 1rem; @@ -193,3 +182,9 @@ text-transform: uppercase; letter-spacing: 0.5px; } + +.experiment-view { + .InsightViz .LemonTable__cell--sticky::before { + background: var(--bg-table); + } +} diff --git a/frontend/src/scenes/experiments/Experiment.tsx b/frontend/src/scenes/experiments/Experiment.tsx index 40dc9632a6180..4a01bcb120b33 100644 --- a/frontend/src/scenes/experiments/Experiment.tsx +++ b/frontend/src/scenes/experiments/Experiment.tsx @@ -840,7 +840,13 @@ export function Experiment(): JSX.Element { ) } -const ResetButton = ({ experiment, onConfirm }: { experiment: ExperimentType; onConfirm: () => void }): JSX.Element => { +export const ResetButton = ({ + experiment, + onConfirm, +}: { + experiment: ExperimentType + onConfirm: () => void +}): JSX.Element => { const onClickReset = (): void => { LemonDialog.open({ title: 'Reset this experiment?', diff --git a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx index b5d0f76e29e16..f4513affb6556 100644 --- a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx +++ b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx @@ -40,7 +40,7 @@ if (experimentFlagValue === '${variant}' ) { export function JSSnippet({ flagKey, variant }: SnippetProps): JSX.Element { return ( - <> +
        {`if (posthog.getFeatureFlag('${flagKey}') === '${variant}') { // Do something differently for this user @@ -49,11 +49,13 @@ export function JSSnippet({ flagKey, variant }: SnippetProps): JSX.Element { // so if something goes wrong with flag evaluation, you don't break your app. }`} - Test that it works +
        + Test that it works +
        {`posthog.featureFlags.override({'${flagKey}': '${variant}'})`} - +
        ) } diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx new file mode 100644 index 0000000000000..5a95c20edd972 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -0,0 +1,306 @@ +import './Experiment.scss' + +import { IconPlusSmall, IconTrash } from '@posthog/icons' +import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon-ui' +import { BindLogic, useActions, useValues } from 'kea' +import { Form, Group } from 'kea-forms' +import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' +import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { IconChevronLeft } from 'lib/lemon-ui/icons' +import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { LemonField } from 'lib/lemon-ui/LemonField' +import { LemonRadio } from 'lib/lemon-ui/LemonRadio' +import { capitalizeFirstLetter } from 'lib/utils' +import { useEffect } from 'react' +import { insightDataLogic } from 'scenes/insights/insightDataLogic' +import { insightLogic } from 'scenes/insights/insightLogic' + +import { Query } from '~/queries/Query/Query' +import { InsightType } from '~/types' + +import { EXPERIMENT_INSIGHT_ID } from './constants' +import { experimentLogic } from './experimentLogic' +import { ExperimentInsightCreator } from './MetricSelector' + +const StepInfo = (): JSX.Element => { + const { experiment } = useValues(experimentLogic) + const { addExperimentGroup, removeExperimentGroup, moveToNextFormStep } = useActions(experimentLogic) + + return ( +
        +
        +
        + + + + + + + + + +
        +
        +

        Variants

        +
        Add up to 9 variants to test against your control.
        + +
        +
        +

        Control

        +
        + + + + + + +
        +
        + Included automatically, cannot be edited or removed +
        +
        +
        +

        Test(s)

        + {experiment.parameters.feature_flag_variants?.map((_, index) => { + if (index === 0) { + return null + } + + return ( + +
        1 && 'mt-2'}`} + > + + + + +
        + {index !== 1 && ( + + } + onClick={() => removeExperimentGroup(index)} + /> + + )} +
        +
        +
        + ) + })} +
        + Alphanumeric, hyphens and underscores only +
        + {(experiment.parameters.feature_flag_variants.length ?? 0) < MAX_EXPERIMENT_VARIANTS && ( + addExperimentGroup()} + icon={} + data-attr="add-test-variant" + > + Add test variant + + )} +
        +
        +
        +
        + moveToNextFormStep()}> + Continue + +
        + ) +} + +const StepGoal = (): JSX.Element => { + const { experiment, exposureAndSampleSize, experimentInsightType, groupTypes, aggregationLabel } = + useValues(experimentLogic) + const { setExperiment, setNewExperimentInsight, createExperiment } = useActions(experimentLogic) + + // insightLogic + const logic = insightLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }) + const { insightProps } = useValues(logic) + + // insightDataLogic + const { query } = useValues(insightDataLogic(insightProps)) + + return ( +
        +
        + {groupTypes.size > 0 && ( +
        +

        Participant type

        +
        + This sets default aggregation type for all metrics and feature flags. You can change this at + any time by updating the metric or feature flag. +
        + + { + const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined + + setExperiment({ + parameters: { + ...experiment.parameters, + aggregation_group_type_index: groupTypeIndex ?? undefined, + }, + }) + setNewExperimentInsight() + }} + options={[ + { value: -1, label: 'Persons' }, + ...Array.from(groupTypes.values()).map((groupType) => ({ + value: groupType.group_type_index, + label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), + })), + ]} + /> +
        + )} +
        +

        Goal type

        + + { + val && + setNewExperimentInsight({ + insight: val, + properties: experiment?.filters?.properties, + }) + }} + options={[ + { + value: InsightType.FUNNELS, + label: ( +
        +
        Conversion funnel
        +
        + Track how many people complete a sequence of actions and/or events +
        +
        + ), + }, + { + value: InsightType.TRENDS, + label: ( +
        +
        Trend
        +
        + Track a cumulative total count of a specific event or action +
        +
        + ), + }, + ]} + /> +
        +
        +

        Goal criteria

        +
        + {experimentInsightType === InsightType.FUNNELS + ? "Create the funnel where you'd like to see an increased conversion rate." + : 'Create a trend goal to track change in a single metric.'} +
        + +
        + +
        +
        +
        +

        Goal preview

        +
        + + + +
        +
        +
        + { + const { exposure, sampleSize } = exposureAndSampleSize + createExperiment(true, exposure, sampleSize) + }} + > + Save as draft + +
        + ) +} + +export function ExperimentForm(): JSX.Element { + const { currentFormStep, props } = useValues(experimentLogic) + const { setCurrentFormStep } = useActions(experimentLogic) + + const stepComponents = { + 0: , + 1: , + } + const CurrentStepComponent = (currentFormStep && stepComponents[currentFormStep]) || + + useEffect(() => { + setCurrentFormStep(0) + }, []) + + return ( +
        + {currentFormStep > 0 && ( + } + type="secondary" + className="my-4" + onClick={() => { + setCurrentFormStep(currentFormStep - 1) + }} + > + Back + + )} + + {CurrentStepComponent} + +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentImplementationDetails.tsx b/frontend/src/scenes/experiments/ExperimentImplementationDetails.tsx index 8880b55f7eabc..8a836986ed37a 100644 --- a/frontend/src/scenes/experiments/ExperimentImplementationDetails.tsx +++ b/frontend/src/scenes/experiments/ExperimentImplementationDetails.tsx @@ -109,35 +109,49 @@ export function ExperimentImplementationDetails({ experiment }: ExperimentImplem } return ( -
        -
        Feature flag usage and implementation
        -
        -
        -
        - Variant group - ({ - value: variant.key, - label: variant.key, - }) - )} - /> +
        +

        Implementation

        +
        +
        +
        +
        + Variant group + ({ + value: variant.key, + label: variant.key, + }) + )} + /> +
        +
        + +
        - +
        + Implement your experiment in code +
        +
        + +
        + + + See the docs for more implementation information. +
        - Implement your experiment in code - - - - See the docs for more implementation information. -
        ) diff --git a/frontend/src/scenes/experiments/ExperimentNext.tsx b/frontend/src/scenes/experiments/ExperimentNext.tsx index e1891241ac816..01557833d7f80 100644 --- a/frontend/src/scenes/experiments/ExperimentNext.tsx +++ b/frontend/src/scenes/experiments/ExperimentNext.tsx @@ -1,330 +1,71 @@ import './Experiment.scss' -import { IconPlusSmall, IconTrash } from '@posthog/icons' -import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon-ui' -import { BindLogic, useActions, useValues } from 'kea' -import { Form, Group } from 'kea-forms' -import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' -import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' -import { IconChevronRight } from 'lib/lemon-ui/icons' -import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { LemonField } from 'lib/lemon-ui/LemonField' -import { LemonRadio } from 'lib/lemon-ui/LemonRadio' -import { capitalizeFirstLetter } from 'lib/utils' -import React from 'react' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' -import { insightLogic } from 'scenes/insights/insightLogic' +import { useActions, useValues } from 'kea' -import { Query } from '~/queries/Query/Query' -import { InsightType } from '~/types' - -import { EXPERIMENT_INSIGHT_ID } from './constants' +import { ExperimentForm } from './ExperimentForm' +import { ExperimentImplementationDetails } from './ExperimentImplementationDetails' import { experimentLogic } from './experimentLogic' -import { ExperimentInsightCreator } from './MetricSelector' - -const Header = (): JSX.Element => { - const { currentFormStep } = useValues(experimentLogic) - - const steps = ['Info', 'Goal'] - - return ( -
        -
        -

        New experiment

        -
        Measure the impact of changes against the baseline.
        -
        -
        -
        - {steps.map((step, index) => ( - - {index > 0 && } -
        - {step} -
        -
        - ))} -
        -
        -
        - ) -} - -const StepInfo = (): JSX.Element => { - const { experiment } = useValues(experimentLogic) - const { addExperimentGroup, removeExperimentGroup, moveToNextFormStep } = useActions(experimentLogic) - - return ( -
        -
        -
        - - - - - - - - - -
        -
        -

        Variants

        -
        Add up to 9 variants to test against your control.
        - -
        -
        -

        Control

        -
        - - - - - - -
        -
        - Included automatically, cannot be edited or removed -
        -
        -
        -

        Test(s)

        - {experiment.parameters.feature_flag_variants?.map((_, index) => { - if (index === 0) { - return null - } - - return ( - -
        1 && 'mt-2'}`} - > - - - - -
        - {index !== 1 && ( - - } - onClick={() => removeExperimentGroup(index)} - /> - - )} -
        -
        -
        - ) - })} -
        - Alphanumeric, hyphens and underscores only -
        - {(experiment.parameters.feature_flag_variants.length ?? 0) < MAX_EXPERIMENT_VARIANTS && ( - addExperimentGroup()} - icon={} - data-attr="add-test-variant" - > - Add test variant - - )} -
        -
        -
        -
        -
        - - moveToNextFormStep()}> - Continue - -
        -
        - ) -} - -const StepGoal = (): JSX.Element => { - const { experiment, exposureAndSampleSize, experimentInsightType, groupTypes, aggregationLabel } = +import { ExperimentLoader, ExperimentLoadingAnimation, PageHeaderCustom } from './ExperimentView/components' +import { DistributionTable } from './ExperimentView/DistributionTable' +import { ExperimentExposureModal, ExperimentGoalModal, Goal } from './ExperimentView/Goal' +import { Info } from './ExperimentView/Info' +import { NoResultsEmptyState } from './ExperimentView/NoResultsEmptyState' +import { Overview } from './ExperimentView/Overview' +import { ProgressBar } from './ExperimentView/ProgressBar' +import { ReleaseConditionsTable } from './ExperimentView/ReleaseConditionsTable' +import { Results } from './ExperimentView/Results' +import { SecondaryMetricsTable } from './ExperimentView/SecondaryMetricsTable' + +export function ExperimentView(): JSX.Element { + const { experiment, experimentLoading, experimentResultsLoading, experimentId, experimentResults } = useValues(experimentLogic) - const { setExperiment, setNewExperimentInsight, createExperiment } = useActions(experimentLogic) - // insightLogic - const logic = insightLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }) - const { insightProps } = useValues(logic) - - // insightDataLogic - const { query } = useValues(insightDataLogic(insightProps)) + const { updateExperimentSecondaryMetrics } = useActions(experimentLogic) return ( -
        -
        -
        -

        Participant type

        -
        - This sets default aggregation type for all metrics and feature flags. You can change this at any - time by updating the metric or feature flag. -
        - - { - const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined - - setExperiment({ - parameters: { - ...experiment.parameters, - aggregation_group_type_index: groupTypeIndex ?? undefined, - }, - }) - setNewExperimentInsight() - }} - options={[ - { value: -1, label: 'Persons' }, - ...Array.from(groupTypes.values()).map((groupType) => ({ - value: groupType.group_type_index, - label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), - })), - ]} - /> -
        -
        -

        Goal type

        - - { - val && - setNewExperimentInsight({ - insight: val, - properties: experiment?.filters?.properties, - }) - }} - options={[ - { - value: InsightType.FUNNELS, - label: ( -
        -
        Conversion funnel
        -
        - Track how many people complete a sequence of actions and/or events -
        -
        - ), - }, - { - value: InsightType.TRENDS, - label: ( -
        -
        Trend
        -
        - Track a cumulative total count of a specific event or action -
        -
        - ), - }, - ]} - /> -
        -
        -

        Goal criteria

        -
        - {experimentInsightType === InsightType.FUNNELS - ? "Create the funnel where you'd like to see an increased conversion rate." - : 'Create a trend goal to track change in a single metric.'} -
        - -
        - -
        -
        -
        -

        Goal preview

        -
        - - - -
        -
        -
        -
        - { - const { exposure, sampleSize } = exposureAndSampleSize - createExperiment(true, exposure, sampleSize) - }} - > - Create experiment - + <> + +
        + {experimentLoading ? ( + + ) : ( + <> + + {experimentResultsLoading ? ( + + ) : experimentResults && experimentResults.insight ? ( + <> + + + + + updateExperimentSecondaryMetrics(metrics)} + initialMetrics={experiment.secondary_metrics} + defaultAggregationType={experiment.parameters?.aggregation_group_type_index} + /> + + + + ) : ( + <> + + + {experiment.start_date && } + + )} + + + + )}
        -
        + ) } export function ExperimentNext(): JSX.Element { - const { experimentId, editingExistingExperiment, currentFormStep, props } = useValues(experimentLogic) + const { experimentId, editingExistingExperiment } = useValues(experimentLogic) - const stepComponents = { - 0: , - 1: , - } - const CurrentStepComponent = (currentFormStep && stepComponents[currentFormStep]) || - - return ( - <> - {experimentId === 'new' || editingExistingExperiment ? ( -
        -
        -
        - {CurrentStepComponent} -
        -
        - ) : ( -

        {`Experiment ${experimentId} draft/results`}

        - )} - - ) + return experimentId === 'new' || editingExistingExperiment ? : } diff --git a/frontend/src/scenes/experiments/ExperimentPreview.tsx b/frontend/src/scenes/experiments/ExperimentPreview.tsx index 3924eb67e5ace..6536294e95a58 100644 --- a/frontend/src/scenes/experiments/ExperimentPreview.tsx +++ b/frontend/src/scenes/experiments/ExperimentPreview.tsx @@ -444,7 +444,7 @@ export function MetricDisplay({ filters }: { filters?: FilterType }): JSX.Elemen .map((event: ActionFilterType, idx: number) => (
        -
        +
        {experimentInsightType === InsightType.FUNNELS ? (event.order || 0) + 1 : idx + 1}
        diff --git a/frontend/src/scenes/experiments/ExperimentResult.tsx b/frontend/src/scenes/experiments/ExperimentResult.tsx index 67633f8dd6f42..c978a5cd8c767 100644 --- a/frontend/src/scenes/experiments/ExperimentResult.tsx +++ b/frontend/src/scenes/experiments/ExperimentResult.tsx @@ -3,7 +3,6 @@ import './Experiment.scss' import { IconInfo } from '@posthog/icons' import { LemonTable, Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' -import { getSeriesColor } from 'lib/colors' import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' import { FunnelLayout } from 'lib/constants' import { LemonProgress } from 'lib/lemon-ui/LemonProgress' @@ -16,10 +15,8 @@ import { ChartDisplayType, FilterType, FunnelVizType, InsightShortId, InsightTyp import { LoadingState } from './Experiment' import { experimentLogic } from './experimentLogic' +import { getExperimentInsightColour } from './utils' -export function getExperimentInsightColour(variantIndex: number | null): string { - return variantIndex !== null ? getSeriesColor(variantIndex) : 'var(--muted-3000)' -} interface ExperimentResultProps { secondaryMetricId?: number } diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx new file mode 100644 index 0000000000000..43b3c50ed614b --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -0,0 +1,66 @@ +import '../Experiment.scss' + +import { LemonTable, LemonTableColumns, Link } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { getSeriesColor } from 'lib/colors' +import { capitalizeFirstLetter } from 'lib/utils' +import { urls } from 'scenes/urls' + +import { MultivariateFlagVariant } from '~/types' + +import { experimentLogic } from '../experimentLogic' + +export function DistributionTable(): JSX.Element { + const { experiment } = useValues(experimentLogic) + + const columns: LemonTableColumns = [ + { + className: 'w-1/3', + key: 'key', + title: 'Variant', + render: function Key(_, item, index): JSX.Element { + return ( +
        +
        + {capitalizeFirstLetter(item.key)} +
        + ) + }, + }, + { + className: 'w-1/3', + key: 'rollout_percentage', + title: 'Rollout', + render: function Key(_, item): JSX.Element { + return
        {`${item.rollout_percentage}%`}
        + }, + }, + ] + + return ( +
        +
        +
        +

        Distribution

        +
        + +
        +
        + + Manage distribution + +
        +
        +
        + +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx new file mode 100644 index 0000000000000..d1406633d4e86 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -0,0 +1,238 @@ +import '../Experiment.scss' + +import { IconInfo } from '@posthog/icons' +import { LemonButton, LemonDivider, LemonModal, Tooltip } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { Field, Form } from 'kea-forms' +import { InsightLabel } from 'lib/components/InsightLabel' +import { PropertyFilterButton } from 'lib/components/PropertyFilters/components/PropertyFilterButton' + +import { ActionFilter as ActionFilterType, AnyPropertyFilter, Experiment, FilterType, InsightType } from '~/types' + +import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from '../constants' +import { experimentLogic } from '../experimentLogic' +import { MetricSelector } from '../MetricSelector' + +export function MetricDisplay({ filters }: { filters?: FilterType }): JSX.Element { + const experimentInsightType = filters?.insight || InsightType.TRENDS + + return ( + <> + {([...(filters?.events || []), ...(filters?.actions || [])] as ActionFilterType[]) + .sort((a, b) => (a.order || 0) - (b.order || 0)) + .map((event: ActionFilterType, idx: number) => ( +
        +
        +
        + {experimentInsightType === InsightType.FUNNELS ? (event.order || 0) + 1 : idx + 1} +
        + + + +
        +
        + {event.properties?.map((prop: AnyPropertyFilter) => ( + + ))} +
        +
        + ))} + + ) +} + +export function ExposureMetric({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const { experiment } = useValues(experimentLogic({ experimentId })) + const { openExperimentExposureModal, updateExperimentExposure } = useActions(experimentLogic({ experimentId })) + + return ( + <> +
        + Exposure metric + + + +
        + {experiment.parameters?.custom_exposure_filter ? ( + + ) : ( + Default via $feature_flag_called events + )} +
        + + + Change exposure metric + + {experiment.parameters?.custom_exposure_filter && ( + updateExperimentExposure(null)} + > + Reset exposure + + )} + +
        + + ) +} + +export function ExperimentGoalModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const { experiment, isExperimentGoalModalOpen, experimentLoading } = useValues(experimentLogic({ experimentId })) + const { closeExperimentGoalModal, updateExperimentGoal, setNewExperimentInsight } = useActions( + experimentLogic({ experimentId }) + ) + + return ( + + + Cancel + + { + updateExperimentGoal(experiment.filters) + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
        + } + > +
        + + + +
        + + ) +} + +export function ExperimentExposureModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const { experiment, isExperimentExposureModalOpen, experimentLoading } = useValues( + experimentLogic({ experimentId }) + ) + const { closeExperimentExposureModal, updateExperimentExposure, setExperimentExposureInsight } = useActions( + experimentLogic({ experimentId }) + ) + + return ( + + + Cancel + + { + if (experiment.parameters.custom_exposure_filter) { + updateExperimentExposure(experiment.parameters.custom_exposure_filter) + } + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
        + } + > +
        + + + +
        + + ) +} + +export function Goal(): JSX.Element { + const { experiment, experimentId, experimentInsightType, experimentMathAggregationForTrends } = + useValues(experimentLogic) + const { openExperimentGoalModal } = useActions(experimentLogic({ experimentId })) + + return ( +
        +

        Experiment goal

        +
        + This {experimentInsightType === InsightType.FUNNELS ? 'funnel' : 'trend'}{' '} + {experimentInsightType === InsightType.FUNNELS + ? 'experiment measures conversion through each step of the user journey.' + : 'experiment tracks the performance of a single metric.'} +
        +
        +
        +
        + {experimentInsightType === InsightType.FUNNELS ? 'Conversion goal steps' : 'Trend goal'} +
        + + + Change experiment goal + +
        + {experimentInsightType === InsightType.TRENDS && + !experimentMathAggregationForTrends(experiment.filters) && ( + <> + +
        +
        + +
        +
        + + )} +
        +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/Info.tsx b/frontend/src/scenes/experiments/ExperimentView/Info.tsx new file mode 100644 index 0000000000000..b11b938860eac --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/Info.tsx @@ -0,0 +1,87 @@ +import '../Experiment.scss' + +import { IconWarning } from '@posthog/icons' +import { Link, ProfilePicture, Tooltip } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { CopyToClipboardInline } from 'lib/components/CopyToClipboard' +import { TZLabel } from 'lib/components/TZLabel' +import { IconOpenInNew } from 'lib/lemon-ui/icons' +import { urls } from 'scenes/urls' + +import { ProgressStatus } from '~/types' + +import { StatusTag } from '../Experiment' +import { experimentLogic } from '../experimentLogic' +import { getExperimentStatus } from '../experimentsLogic' +import { ResultsTag } from './components' + +export function Info(): JSX.Element { + const { experiment } = useValues(experimentLogic) + const { created_by, created_at } = experiment + + if (!experiment.feature_flag) { + return <> + } + + return ( +
        +
        +
        +
        Status
        + +
        +
        +
        Significance
        + +
        + {experiment.feature_flag && ( +
        +
        + Feature flag +
        + {getExperimentStatus(experiment) === ProgressStatus.Running && + !experiment.feature_flag.active && ( + + + + )} + + {experiment.feature_flag.key} + + + + +
        + )} +
        + +
        +
        +
        +
        Created at
        + {created_at && } +
        +
        +
        Created by
        + {created_by && } +
        +
        +
        +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/NoResultsEmptyState.tsx b/frontend/src/scenes/experiments/ExperimentView/NoResultsEmptyState.tsx new file mode 100644 index 0000000000000..c4c021a3c382e --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/NoResultsEmptyState.tsx @@ -0,0 +1,33 @@ +import '../Experiment.scss' + +import { Empty } from 'antd' +import { useValues } from 'kea' + +import { experimentLogic } from '../experimentLogic' + +export function NoResultsEmptyState(): JSX.Element { + const { experimentResultsLoading, experimentResultCalculationError } = useValues(experimentLogic) + + if (experimentResultsLoading) { + return <> + } + + return ( +
        +

        Results

        +
        +
        + +

        There are no experiment results yet

        + {!!experimentResultCalculationError && ( +
        {experimentResultCalculationError}
        + )} +
        + Wait a bit longer for your users to be exposed to the experiment. Double check your feature flag + implementation if you're still not seeing results. +
        +
        +
        +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/Overview.tsx b/frontend/src/scenes/experiments/ExperimentView/Overview.tsx new file mode 100644 index 0000000000000..76cc2136116d4 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/Overview.tsx @@ -0,0 +1,95 @@ +import '../Experiment.scss' + +import { LemonDivider } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { getSeriesColor } from 'lib/colors' +import { capitalizeFirstLetter } from 'lib/utils' + +import { InsightType } from '~/types' + +import { experimentLogic } from '../experimentLogic' + +export function Overview(): JSX.Element { + const { + experimentResults, + getIndexForVariant, + experimentInsightType, + sortedConversionRates, + highestProbabilityVariant, + areResultsSignificant, + } = useValues(experimentLogic) + + function SignificanceText(): JSX.Element { + return ( + <> + Your results are  + {`${areResultsSignificant ? 'significant' : 'not significant'}`}. + + ) + } + + if (experimentInsightType === InsightType.FUNNELS) { + const winningVariant = sortedConversionRates[0] + const secondBestVariant = sortedConversionRates[1] + const difference = winningVariant.conversionRate - secondBestVariant.conversionRate + + return ( +
        +

        Summary

        +
        +
        + {capitalizeFirstLetter(winningVariant.key)} +  is winning with a conversion rate  + + increase of {`${difference.toFixed(2)}%`} + +  percentage points (vs  +
        + {capitalizeFirstLetter(secondBestVariant.key)} + ).  + +
        +
        + ) + } + + const index = getIndexForVariant(experimentResults, highestProbabilityVariant || '') + if (highestProbabilityVariant && index !== null && experimentResults) { + const { probability } = experimentResults + + return ( +
        +

        Overview

        + +
        +
        + {capitalizeFirstLetter(highestProbabilityVariant)} +  is winning with a  + + {`${(probability[highestProbabilityVariant] * 100).toFixed(2)}% probability`}  + + of being best.  + +
        +
        + ) + } + + return <> +} diff --git a/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx b/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx new file mode 100644 index 0000000000000..1cedbcf500d6c --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/ProgressBar.tsx @@ -0,0 +1,77 @@ +import '../Experiment.scss' + +import { useValues } from 'kea' +import { dayjs } from 'lib/dayjs' +import { LemonProgress } from 'lib/lemon-ui/LemonProgress' +import { humanFriendlyNumber } from 'lib/utils' + +import { FunnelStep, InsightType } from '~/types' + +import { experimentLogic } from '../experimentLogic' + +export function ProgressBar(): JSX.Element { + const { experiment, experimentResults, experimentInsightType } = useValues(experimentLogic) + + // Parameters for experiment results + // don't use creation variables in results + const funnelResultsPersonsTotal = + experimentInsightType === InsightType.FUNNELS && experimentResults?.insight + ? (experimentResults.insight as FunnelStep[][]).reduce( + (sum: number, variantResult: FunnelStep[]) => variantResult[0]?.count + sum, + 0 + ) + : 0 + + const experimentProgressPercent = + experimentInsightType === InsightType.FUNNELS + ? ((funnelResultsPersonsTotal || 0) / (experiment?.parameters?.recommended_sample_size || 1)) * 100 + : (dayjs().diff(experiment?.start_date, 'day') / (experiment?.parameters?.recommended_running_time || 1)) * + 100 + + return ( +
        +
        {`${ + experimentProgressPercent > 100 ? 100 : experimentProgressPercent.toFixed(2) + }% complete`}
        + + {experimentInsightType === InsightType.TRENDS && experiment.start_date && ( +
        + {experiment.end_date ? ( +
        + Ran for {dayjs(experiment.end_date).diff(experiment.start_date, 'day')} days +
        + ) : ( +
        + {dayjs().diff(experiment.start_date, 'day')} days running +
        + )} +
        + Goal: {experiment?.parameters?.recommended_running_time ?? 'Unknown'} days +
        +
        + )} + {experimentInsightType === InsightType.FUNNELS && ( +
        + {experiment.end_date ? ( +
        + Saw {humanFriendlyNumber(funnelResultsPersonsTotal)} participants +
        + ) : ( +
        + {humanFriendlyNumber(funnelResultsPersonsTotal)} participants seen +
        + )} +
        + Goal: {humanFriendlyNumber(experiment?.parameters?.recommended_sample_size || 0)}{' '} + participants +
        +
        + )} +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx new file mode 100644 index 0000000000000..c0a4024e559f6 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx @@ -0,0 +1,77 @@ +import '../Experiment.scss' + +import { LemonTable, LemonTableColumns, LemonTag, Link } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { urls } from 'scenes/urls' + +import { groupsModel } from '~/models/groupsModel' +import { FeatureFlagGroupType } from '~/types' + +import { experimentLogic } from '../experimentLogic' + +export function ReleaseConditionsTable(): JSX.Element { + const { experiment } = useValues(experimentLogic) + const { aggregationLabel } = useValues(groupsModel) + + const columns: LemonTableColumns = [ + { + key: 'key', + title: '', + render: function Key(_, _item, index): JSX.Element { + return
        {`Set ${index + 1}`}
        + }, + }, + { + key: 'rollout_percentage', + title: 'Rollout', + render: function Key(_, item): JSX.Element { + const aggregationTargetName = + experiment.filters.aggregation_group_type_index != null + ? aggregationLabel(experiment.filters.aggregation_group_type_index).plural + : 'users' + + const releaseText = `${item.rollout_percentage}% of ${aggregationTargetName}` + + return ( +
        + {releaseText.startsWith('100% of') ? ( + {releaseText} + ) : ( + releaseText + )} +
        + ) + }, + }, + { + key: 'variant', + title: 'Override', + render: function Key(_, item): JSX.Element { + return
        {item.variant || '--'}
        + }, + }, + ] + + return ( +
        +
        +
        +

        Release conditions

        +
        + +
        +
        + + Manage release conditions + +
        +
        +
        + +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/Results.tsx b/frontend/src/scenes/experiments/ExperimentView/Results.tsx new file mode 100644 index 0000000000000..bd0662dfea042 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/Results.tsx @@ -0,0 +1,50 @@ +import '../Experiment.scss' + +import { useValues } from 'kea' + +import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { Query } from '~/queries/Query/Query' +import { NodeKind } from '~/queries/schema' +import { InsightShortId } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { transformResultFilters } from '../utils' +import { ResultsTag } from './components' +import { SummaryTable } from './SummaryTable' + +export function Results(): JSX.Element { + const { experimentResults } = useValues(experimentLogic) + + return ( +
        +
        +

        Results

        + +
        + + +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx new file mode 100644 index 0000000000000..ea9c7befcdd7f --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx @@ -0,0 +1,211 @@ +import '../Experiment.scss' + +import { IconPlus } from '@posthog/icons' +import { LemonButton, LemonInput, LemonModal, LemonTable, LemonTableColumns } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { Form } from 'kea-forms' +import { IconAreaChart } from 'lib/lemon-ui/icons' +import { LemonField } from 'lib/lemon-ui/LemonField' +import { capitalizeFirstLetter, humanFriendlyNumber } from 'lib/utils' + +import { InsightType } from '~/types' + +import { SECONDARY_METRIC_INSIGHT_ID } from '../constants' +import { experimentLogic, TabularSecondaryMetricResults } from '../experimentLogic' +import { MetricSelector } from '../MetricSelector' +import { secondaryMetricsLogic, SecondaryMetricsProps } from '../secondaryMetricsLogic' +import { getExperimentInsightColour } from '../utils' + +export function SecondaryMetricsTable({ + onMetricsChange, + initialMetrics, + experimentId, + defaultAggregationType, +}: SecondaryMetricsProps): JSX.Element { + const logic = secondaryMetricsLogic({ onMetricsChange, initialMetrics, experimentId, defaultAggregationType }) + const { metrics, isModalOpen, isSecondaryMetricModalSubmitting, existingModalSecondaryMetric, metricIdx } = + useValues(logic) + + const { + deleteMetric, + openModalToCreateSecondaryMetric, + openModalToEditSecondaryMetric, + closeModal, + saveSecondaryMetric, + setPreviewInsight, + } = useActions(logic) + + const { + secondaryMetricResultsLoading, + isExperimentRunning, + getIndexForVariant, + experiment, + experimentResults, + tabularSecondaryMetricResults, + } = useValues(experimentLogic({ experimentId })) + + const columns: LemonTableColumns = [ + { + key: 'variant', + title: 'Variant', + render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { + return ( +
        +
        + {capitalizeFirstLetter(item.variant)} +
        + ) + }, + }, + ] + + experiment.secondary_metrics?.forEach((metric, idx) => { + columns.push({ + key: `results_${idx}`, + title: ( + + } + onClick={() => openModalToEditSecondaryMetric(metric, idx)} + > + {capitalizeFirstLetter(metric.name)} + + + ), + render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { + return ( +
        + {item.results?.[idx].result ? ( + item.results[idx].insightType === InsightType.FUNNELS ? ( + <>{((item.results[idx].result as number) * 100).toFixed(1)}% + ) : ( + <>{humanFriendlyNumber(item.results[idx].result as number)} + ) + ) : ( + <>-- + )} +
        + ) + }, + }) + }) + + return ( + <> + + {existingModalSecondaryMetric && ( + deleteMetric(metricIdx)} + > + Delete + + )} +
        + + Cancel + + + {existingModalSecondaryMetric ? 'Save' : 'Create'} + +
        + + } + > +
        + + + + + + +
        +
        +
        +
        +
        +

        Secondary metrics

        + {metrics.length > 0 && ( +
        Click a metric name to compare variants on a graph.
        + )} +
        + +
        +
        + {metrics && metrics.length > 0 && metrics.length < 3 && isExperimentRunning && ( +
        + + Add metric + +
        + )} +
        +
        +
        + {metrics && metrics.length > 0 ? ( + + ) : ( +
        +
        + +
        + Add up to 3 secondary metrics to gauge side effects of your experiment. +
        + } + type="secondary" + size="small" + onClick={openModalToCreateSecondaryMetric} + > + Add metric + +
        +
        + )} +
        + + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx new file mode 100644 index 0000000000000..b6d4b95674c2c --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -0,0 +1,132 @@ +import '../Experiment.scss' + +import { IconInfo } from '@posthog/icons' +import { LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { getSeriesColor } from 'lib/colors' +import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' +import { LemonProgress } from 'lib/lemon-ui/LemonProgress' +import { capitalizeFirstLetter } from 'lib/utils' + +import { FunnelExperimentVariant, InsightType, TrendExperimentVariant } from '~/types' + +import { experimentLogic } from '../experimentLogic' + +export function SummaryTable(): JSX.Element { + const { + experimentResults, + experimentInsightType, + exposureCountDataForVariant, + conversionRateForVariant, + sortedConversionRates, + experimentMathAggregationForTrends, + countDataForVariant, + areTrendResultsConfusing, + } = useValues(experimentLogic) + + if (!experimentResults) { + return <> + } + + const columns: LemonTableColumns = [ + { + key: 'variants', + title: 'Variant', + render: function Key(_, item, index): JSX.Element { + return ( +
        +
        + {capitalizeFirstLetter(item.key)} +
        + ) + }, + }, + ] + + if (experimentInsightType === InsightType.TRENDS) { + columns.push({ + key: 'counts', + title: ( +
        + {experimentResults.insight?.[0] && 'action' in experimentResults.insight[0] && ( + + )} + + {experimentMathAggregationForTrends(experimentResults?.filters) ? 'metric' : 'count'} + +
        + ), + render: function Key(_, item, index): JSX.Element { + return ( +
        + {countDataForVariant(experimentResults, item.key)}{' '} + {areTrendResultsConfusing && index === 0 && ( + + + + )} +
        + ) + }, + }) + columns.push({ + key: 'exposure', + title: 'Exposure', + render: function Key(_, item): JSX.Element { + return
        {exposureCountDataForVariant(experimentResults, item.key)}
        + }, + }) + } + + if (experimentInsightType === InsightType.FUNNELS) { + columns.push({ + key: 'conversionRate', + title: 'Conversion rate', + render: function Key(_, item): JSX.Element { + const isWinning = item.key === sortedConversionRates[0].key + return ( +
        {`${conversionRateForVariant( + experimentResults, + item.key + )}%`}
        + ) + }, + }) + } + + columns.push({ + key: 'winProbability', + title: 'Win probability', + render: function Key(_, item): JSX.Element { + const percentage = + experimentResults?.probability?.[item.key] != undefined && + experimentResults.probability?.[item.key] * 100 + + return ( + <> + {percentage ? ( + + + {percentage.toFixed(2)}% + + ) : ( + '--' + )} + + ) + }, + }) + + return ( +
        + +
        + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx new file mode 100644 index 0000000000000..1a22957925e68 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -0,0 +1,148 @@ +import '../Experiment.scss' + +import { LemonButton, LemonDivider, LemonTable, LemonTag, LemonTagType } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { AnimationType } from 'lib/animations/animations' +import { Animation } from 'lib/components/Animation/Animation' +import { PageHeader } from 'lib/components/PageHeader' +import { dayjs } from 'lib/dayjs' +import { More } from 'lib/lemon-ui/LemonButton/More' +import { useEffect, useState } from 'react' + +import { ResetButton } from '../Experiment' +import { experimentLogic } from '../experimentLogic' + +export function ResultsTag(): JSX.Element { + const { areResultsSignificant } = useValues(experimentLogic) + const result: { color: LemonTagType; label: string } = areResultsSignificant + ? { color: 'success', label: 'Significant' } + : { color: 'primary', label: 'Not significant' } + + return ( + + {result.label} + + ) +} + +export function ExperimentLoader(): JSX.Element { + return ( + + ) +} + +export function ExperimentLoadingAnimation(): JSX.Element { + function EllipsisAnimation(): JSX.Element { + const [ellipsis, setEllipsis] = useState('.') + + useEffect(() => { + let count = 1 + let direction = 1 + + const interval = setInterval(() => { + setEllipsis('.'.repeat(count)) + count += direction + + if (count === 3 || count === 1) { + direction *= -1 + } + }, 300) + + return () => clearInterval(interval) + }, []) + + return {ellipsis} + } + + return ( +
        + +
        + Fetching experiment results + +
        +
        + ) +} + +export function PageHeaderCustom(): JSX.Element { + const { experiment, isExperimentRunning } = useValues(experimentLogic) + const { + launchExperiment, + resetRunningExperiment, + endExperiment, + archiveExperiment, + setEditExperiment, + loadExperimentResults, + loadSecondaryMetricResults, + } = useActions(experimentLogic) + + return ( + + {experiment && !isExperimentRunning && ( +
        + setEditExperiment(true)}> + Edit + + launchExperiment()}> + Launch + +
        + )} + {experiment && isExperimentRunning && ( +
        + <> + + loadExperimentResults(true)} + fullWidth + data-attr="refresh-experiment" + > + Refresh experiment results + + loadSecondaryMetricResults(true)} + fullWidth + data-attr="refresh-secondary-metrics" + > + Refresh secondary metrics + + + } + /> + + + + {!experiment.end_date && ( + endExperiment()}> + Stop + + )} + {experiment?.end_date && + dayjs().isSameOrAfter(dayjs(experiment.end_date), 'day') && + !experiment.archived && ( + archiveExperiment()}> + Archive + + )} +
        + )} + + } + /> + ) +} diff --git a/frontend/src/scenes/experiments/MetricSelector.tsx b/frontend/src/scenes/experiments/MetricSelector.tsx index 4df25546fe8a9..fbfcd0617d61c 100644 --- a/frontend/src/scenes/experiments/MetricSelector.tsx +++ b/frontend/src/scenes/experiments/MetricSelector.tsx @@ -4,6 +4,7 @@ import { IconInfo } from '@posthog/icons' import { LemonSelect } from '@posthog/lemon-ui' import { BindLogic, useActions, useValues } from 'kea' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { EXPERIMENT_DEFAULT_DURATION } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { Attribution } from 'scenes/insights/EditorFilters/AttributionFilter' @@ -23,8 +24,6 @@ import { Query } from '~/queries/Query/Query' import { FunnelsQuery, InsightQueryNode, TrendsQuery } from '~/queries/schema' import { EditorFilterProps, FilterType, InsightLogicProps, InsightShortId, InsightType } from '~/types' -import { DEFAULT_DURATION } from './experimentLogic' - export interface MetricSelectorProps { dashboardItemId: InsightShortId setPreviewInsight: (filters?: Partial) => void @@ -75,8 +74,8 @@ export function MetricSelector({ {showDateRangeBanner && ( - Preview insights are generated based on {DEFAULT_DURATION} days of data. This can cause a mismatch - between the preview and the actual results. + Preview insights are generated based on {EXPERIMENT_DEFAULT_DURATION} days of data. This can cause a + mismatch between the preview and the actual results. )} diff --git a/frontend/src/scenes/experiments/SecondaryMetricsTable.tsx b/frontend/src/scenes/experiments/SecondaryMetricsTable.tsx index fbaae05233582..c15b4f8293a3a 100644 --- a/frontend/src/scenes/experiments/SecondaryMetricsTable.tsx +++ b/frontend/src/scenes/experiments/SecondaryMetricsTable.tsx @@ -16,9 +16,9 @@ import { InsightType } from '~/types' import { SECONDARY_METRIC_INSIGHT_ID } from './constants' import { experimentLogic, TabularSecondaryMetricResults } from './experimentLogic' -import { getExperimentInsightColour } from './ExperimentResult' import { MetricSelector } from './MetricSelector' import { secondaryMetricsLogic, SecondaryMetricsProps } from './secondaryMetricsLogic' +import { getExperimentInsightColour } from './utils' export function SecondaryMetricsTable({ onMetricsChange, diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 35e617d41470e..d2154c637a2a6 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -4,7 +4,7 @@ import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' import api from 'lib/api' -import { FunnelLayout } from 'lib/constants' +import { EXPERIMENT_DEFAULT_DURATION, FunnelLayout } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { Tooltip } from 'lib/lemon-ui/Tooltip' @@ -23,7 +23,7 @@ import { urls } from 'scenes/urls' import { groupsModel } from '~/models/groupsModel' import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { InsightVizNode } from '~/queries/schema' +import { FunnelsQuery, InsightVizNode, TrendsQuery } from '~/queries/schema' import { ActionFilter as ActionFilterType, Breadcrumb, @@ -47,8 +47,6 @@ import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from './constan import type { experimentLogicType } from './experimentLogicType' import { experimentsLogic } from './experimentsLogic' -export const DEFAULT_DURATION = 14 // days - const NEW_EXPERIMENT: Experiment = { id: 'new', name: '', @@ -358,7 +356,7 @@ export const experimentLogic = kea([ newInsightFilters = cleanFilters({ insight: InsightType.FUNNELS, funnel_viz_type: FunnelVizType.Steps, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), layout: FunnelLayout.horizontal, aggregation_group_type_index: aggregationGroupTypeIndex, @@ -375,14 +373,23 @@ export const experimentLogic = kea([ : { events: [{ ...getDefaultEvent(), ...groupAggregation }] } newInsightFilters = cleanFilters({ insight: InsightType.TRENDS, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), ...eventAddition, ...filters, }) } - actions.updateQuerySource(filtersToQueryNode(newInsightFilters)) + // This allows switching between insight types. It's necessary as `updateQuerySource` merges + // the new query with any existing query and that causes validation problems when there are + // unsupported properties in the now merged query. + const newQuery = filtersToQueryNode(newInsightFilters) + if (filters?.insight === InsightType.FUNNELS) { + ;(newQuery as TrendsQuery).trendsFilter = undefined + } else { + ;(newQuery as FunnelsQuery).funnelsFilter = undefined + } + actions.updateQuerySource(newQuery) }, // sync form value `filters` with query setQuery: ({ query }) => { @@ -391,7 +398,7 @@ export const experimentLogic = kea([ setExperimentExposureInsight: async ({ filters }) => { const newInsightFilters = cleanFilters({ insight: InsightType.TRENDS, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_from: dayjs().subtract(EXPERIMENT_DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), ...filters, }) @@ -672,6 +679,16 @@ export const experimentLogic = kea([ return !!experiment?.start_date }, ], + isExperimentStopped: [ + (s) => [s.experiment], + (experiment): boolean => { + return ( + !!experiment?.end_date && + dayjs().isSameOrAfter(dayjs(experiment.end_date), 'day') && + !experiment.archived + ) + }, + ], breadcrumbs: [ (s) => [s.experiment, s.experimentId], (experiment, experimentId): Breadcrumb[] => [ @@ -801,7 +818,11 @@ export const experimentLogic = kea([ return parseFloat( ( 4 / - Math.pow(Math.sqrt(lambda1 / DEFAULT_DURATION) - Math.sqrt(lambda2 / DEFAULT_DURATION), 2) + Math.pow( + Math.sqrt(lambda1 / EXPERIMENT_DEFAULT_DURATION) - + Math.sqrt(lambda2 / EXPERIMENT_DEFAULT_DURATION), + 2 + ) ).toFixed(1) ) }, @@ -809,7 +830,7 @@ export const experimentLogic = kea([ expectedRunningTime: [ () => [], () => - (entrants: number, sampleSize: number, duration: number = DEFAULT_DURATION): number => { + (entrants: number, sampleSize: number, duration: number = EXPERIMENT_DEFAULT_DURATION): number => { // recommended people / (actual people / day) = expected days return parseFloat((sampleSize / (entrants / duration)).toFixed(1)) }, @@ -1014,13 +1035,29 @@ export const experimentLogic = kea([ return variantsWithResults }, ], + sortedConversionRates: [ + (s) => [s.experimentResults, s.variants, s.conversionRateForVariant], + ( + experimentResults: any, + variants: any, + conversionRateForVariant: any + ): { key: string; conversionRate: number; index: number }[] => { + const conversionRates = [] + for (let index = 0; index < variants.length; index++) { + const variant = variants[index].key + const conversionRate = parseFloat(conversionRateForVariant(experimentResults, variant)) + conversionRates.push({ key: variant, conversionRate, index }) + } + return conversionRates.sort((a, b) => b.conversionRate - a.conversionRate) + }, + ], }), forms(({ actions, values }) => ({ experiment: { options: { showErrorsOnTouch: true }, defaults: { ...NEW_EXPERIMENT } as Experiment, errors: ({ name, feature_flag_key, parameters }) => ({ - name: !name && 'You have to enter a name.', + name: !name && 'Please enter a name', feature_flag_key: validateFeatureFlagKey(feature_flag_key), parameters: { feature_flag_variants: parameters.feature_flag_variants?.map(({ key }) => ({ diff --git a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts b/frontend/src/scenes/experiments/secondaryMetricsLogic.ts index d3b04d4a29c38..a12bc0f4a7547 100644 --- a/frontend/src/scenes/experiments/secondaryMetricsLogic.ts +++ b/frontend/src/scenes/experiments/secondaryMetricsLogic.ts @@ -10,7 +10,7 @@ import { teamLogic } from 'scenes/teamLogic' import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { InsightVizNode } from '~/queries/schema' +import { FunnelsQuery, InsightVizNode, TrendsQuery } from '~/queries/schema' import { Experiment, FilterType, FunnelVizType, InsightType, SecondaryExperimentMetric } from '~/types' import { SECONDARY_METRIC_INSIGHT_ID } from './constants' @@ -162,7 +162,16 @@ export const secondaryMetricsLogic = kea([ }) } - actions.updateQuerySource(filtersToQueryNode(newInsightFilters)) + // This allows switching between insight types. It's necessary as `updateQuerySource` merges + // the new query with any existing query and that causes validation problems when there are + // unsupported properties in the now merged query. + const newQuery = filtersToQueryNode(newInsightFilters) + if (filters?.insight === InsightType.FUNNELS) { + ;(newQuery as TrendsQuery).trendsFilter = undefined + } else { + ;(newQuery as FunnelsQuery).funnelsFilter = undefined + } + actions.updateQuerySource(newQuery) }, // sync form value `filters` with query setQuery: ({ query }) => { diff --git a/frontend/src/scenes/experiments/utils.ts b/frontend/src/scenes/experiments/utils.ts new file mode 100644 index 0000000000000..90d7b2c64f44b --- /dev/null +++ b/frontend/src/scenes/experiments/utils.ts @@ -0,0 +1,19 @@ +import { getSeriesColor } from 'lib/colors' +import { FunnelLayout } from 'lib/constants' + +import { ChartDisplayType, FilterType, FunnelVizType, InsightType } from '~/types' + +export function getExperimentInsightColour(variantIndex: number | null): string { + return variantIndex !== null ? getSeriesColor(variantIndex) : 'var(--muted-3000)' +} + +export const transformResultFilters = (filters: Partial): Partial => ({ + ...filters, + ...(filters.insight === InsightType.FUNNELS && { + layout: FunnelLayout.vertical, + funnel_viz_type: FunnelVizType.Steps, + }), + ...(filters.insight === InsightType.TRENDS && { + display: ChartDisplayType.ActionsLineGraphCumulative, + }), +}) diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 0905db1420fa4..f99e578887799 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -174,17 +174,13 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { key: FeatureFlagsTab.USAGE, content: , }) - } - if (featureFlags[FEATURE_FLAGS.MULTI_PROJECT_FEATURE_FLAGS]) { tabs.push({ label: 'Projects', key: FeatureFlagsTab.PROJECTS, content: , }) - } - if (featureFlags[FEATURE_FLAGS.SCHEDULED_CHANGES_FEATURE_FLAGS]) { tabs.push({ label: 'Schedule', key: FeatureFlagsTab.SCHEDULE, @@ -220,7 +216,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { }) } - if (featureFlags[FEATURE_FLAGS.ROLE_BASED_ACCESS] && featureFlag.can_edit) { + if (featureFlag.can_edit) { tabs.push({ label: 'Permissions', key: FeatureFlagsTab.PERMISSIONS, @@ -431,29 +427,27 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { {featureFlags[FEATURE_FLAGS.AUTO_ROLLBACK_FEATURE_FLAGS] && ( )} - {featureFlags[FEATURE_FLAGS.ROLE_BASED_ACCESS] && ( -
        -

        Permissions

        - -
        - - setRolesToAdd(roleIds)} - rolesToAdd={rolesToAdd} - addableRoles={addableRoles} - addableRolesLoading={unfilteredAddableRolesLoading} - onAdd={() => addAssociatedRoles()} - roles={derivedRoles} - deleteAssociatedRole={(id) => - deleteAssociatedRole({ roleId: id }) - } - canEdit={featureFlag.can_edit} - /> - -
        +
        +

        Permissions

        + +
        + + setRolesToAdd(roleIds)} + rolesToAdd={rolesToAdd} + addableRoles={addableRoles} + addableRolesLoading={unfilteredAddableRolesLoading} + onAdd={() => addAssociatedRoles()} + roles={derivedRoles} + deleteAssociatedRole={(id) => + deleteAssociatedRole({ roleId: id }) + } + canEdit={featureFlag.can_edit} + /> +
        - )} +
        )} @@ -572,15 +566,13 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { { editFeatureFlag(true) }} - disabled={!featureFlag.can_edit} > Edit diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx index 389e0e2e4f271..75e3b9a47a6e6 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx @@ -8,7 +8,7 @@ import { router } from 'kea-router' import { allOperatorsToHumanName } from 'lib/components/DefinitionPopover/utils' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { isPropertyFilterWithOperator } from 'lib/components/PropertyFilters/utils' -import { FEATURE_FLAGS, INSTANTLY_AVAILABLE_PROPERTIES } from 'lib/constants' +import { INSTANTLY_AVAILABLE_PROPERTIES } from 'lib/constants' import { groupsAccessLogic, GroupsAccessStatus } from 'lib/introductions/groupsAccessLogic' import { GroupsIntroductionOption } from 'lib/introductions/GroupsIntroductionOption' import { IconErrorOutline, IconOpenInNew, IconSubArrowRight } from 'lib/lemon-ui/icons' @@ -60,7 +60,6 @@ export function FeatureFlagReleaseConditions({ affectedUsers, totalUsers, filtersTaxonomicOptions, - enabledFeatures, aggregationTargetName, } = useValues(releaseConditionsLogic) @@ -233,7 +232,7 @@ export function FeatureFlagReleaseConditions({ taxonomicFilterOptionsFromProp={filtersTaxonomicOptions} hasRowOperator={false} sendAllKeyUpdates - allowRelativeDateOptions={!!enabledFeatures[FEATURE_FLAGS.NEW_FEATURE_FLAG_OPERATORS]} + allowRelativeDateOptions errorMessages={ propertySelectErrors?.[index]?.properties?.some((message) => !!message.value) ? propertySelectErrors[index].properties?.map((message, index) => { diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts index f0007c648cc28..dab0d6f408993 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts @@ -2,8 +2,6 @@ import { actions, afterMount, connect, kea, key, listeners, path, props, propsCh import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { TaxonomicFilterGroupType, TaxonomicFilterProps } from 'lib/components/TaxonomicFilter/types' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' import { objectsEqual } from 'lib/utils' import { groupsModel } from '~/models/groupsModel' @@ -32,14 +30,7 @@ export const featureFlagReleaseConditionsLogic = kea id ?? 'unknown'), connect({ - values: [ - teamLogic, - ['currentTeamId'], - groupsModel, - ['groupTypes', 'aggregationLabel'], - enabledFeaturesLogic, - ['featureFlags as enabledFeatures'], - ], + values: [teamLogic, ['currentTeamId'], groupsModel, ['groupTypes', 'aggregationLabel']], }), actions({ setFilters: (filters: FeatureFlagFilters) => ({ filters }), @@ -210,35 +201,28 @@ export const featureFlagReleaseConditionsLogic = kea [s.filters, s.groupTypes, s.enabledFeatures], - (filters, groupTypes, enabledFeatures): TaxonomicFilterGroupType[] => { - const baseGroupTypes = [] - const additionalGroupTypes = [] - const newFlagOperatorsEnabled = enabledFeatures[FEATURE_FLAGS.NEW_FEATURE_FLAG_OPERATORS] + (s) => [s.filters, s.groupTypes], + (filters, groupTypes): TaxonomicFilterGroupType[] => { + const targetGroupTypes = [] const targetGroup = filters?.aggregation_group_type_index != null ? groupTypes.get(filters.aggregation_group_type_index as GroupTypeIndex) : undefined if (targetGroup) { - baseGroupTypes.push( + targetGroupTypes.push( `${TaxonomicFilterGroupType.GroupsPrefix}_${targetGroup?.group_type_index}` as unknown as TaxonomicFilterGroupType ) - if (newFlagOperatorsEnabled) { - additionalGroupTypes.push( - `${TaxonomicFilterGroupType.GroupNamesPrefix}_${filters.aggregation_group_type_index}` as unknown as TaxonomicFilterGroupType - ) - } + targetGroupTypes.push( + `${TaxonomicFilterGroupType.GroupNamesPrefix}_${filters.aggregation_group_type_index}` as unknown as TaxonomicFilterGroupType + ) } else { - baseGroupTypes.push(TaxonomicFilterGroupType.PersonProperties) - baseGroupTypes.push(TaxonomicFilterGroupType.Cohorts) - - if (newFlagOperatorsEnabled) { - additionalGroupTypes.push(TaxonomicFilterGroupType.Metadata) - } + targetGroupTypes.push(TaxonomicFilterGroupType.PersonProperties) + targetGroupTypes.push(TaxonomicFilterGroupType.Cohorts) + targetGroupTypes.push(TaxonomicFilterGroupType.Metadata) } - return [...baseGroupTypes, ...additionalGroupTypes] + return targetGroupTypes }, ], aggregationTargetName: [ diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 7cca0a0924172..c4e6842aff7e5 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -110,7 +110,7 @@ const EMPTY_MULTIVARIATE_OPTIONS: MultivariateFlagOptions = { /** Check whether a string is a valid feature flag key. If not, a reason string is returned - otherwise undefined. */ export function validateFeatureFlagKey(key: string): string | undefined { return !key - ? 'You need to set a key' + ? 'Please set a key' : !key.match?.(/^([A-z]|[a-z]|[0-9]|-|_)+$/) ? 'Only letters, numbers, hyphens (-) & underscores (_) are allowed.' : undefined diff --git a/frontend/src/scenes/funnels/funnelCorrelationFeedbackLogic.test.ts b/frontend/src/scenes/funnels/funnelCorrelationFeedbackLogic.test.ts index b88946646be5c..0ac26e5540b5e 100644 --- a/frontend/src/scenes/funnels/funnelCorrelationFeedbackLogic.test.ts +++ b/frontend/src/scenes/funnels/funnelCorrelationFeedbackLogic.test.ts @@ -1,6 +1,7 @@ import { expectLogic } from 'kea-test-utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import posthog from 'posthog-js' +import { teamLogic } from 'scenes/teamLogic' import { useAvailableFeatures } from '~/mocks/features' import { initKeaTests } from '~/test/init' @@ -14,6 +15,7 @@ describe('funnelCorrelationFeedbackLogic', () => { beforeEach(() => { useAvailableFeatures([AvailableFeature.CORRELATION_ANALYSIS]) initKeaTests(false) + teamLogic.mount() }) const defaultProps: InsightLogicProps = { diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts index 5a1e708eb6b59..3080f0db0560f 100644 --- a/frontend/src/scenes/funnels/funnelDataLogic.ts +++ b/frontend/src/scenes/funnels/funnelDataLogic.ts @@ -85,7 +85,9 @@ export const funnelDataLogic = kea([ hogQLInsightsFunnelsFlagEnabled: [ (s) => [s.featureFlags], (featureFlags): boolean => { - return !!featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_FUNNELS] + return !!( + featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_FUNNELS] + ) }, ], querySource: [ diff --git a/frontend/src/scenes/insights/EditorFilters/PathsEventTypes.tsx b/frontend/src/scenes/insights/EditorFilters/PathsEventTypes.tsx index 71fe12ce8d68d..48bbd016595ee 100644 --- a/frontend/src/scenes/insights/EditorFilters/PathsEventTypes.tsx +++ b/frontend/src/scenes/insights/EditorFilters/PathsEventTypes.tsx @@ -60,7 +60,7 @@ export function PathsEventsTypes({ insightProps }: EditorFilterProps): JSX.Eleme fullWidth type="secondary" dropdown={{ - sameWidth: true, + matchWidth: true, closeOnClickInside: false, overlay: options.map((option) => ( )} {canEditInsight ? ( diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx index b27d271f53d24..8ebb237640060 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx @@ -7,13 +7,22 @@ import { IconPlusSmall } from '@posthog/icons' import clsx from 'clsx' import { BindLogic, useActions, useValues } from 'kea' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { DISPLAY_TYPES_TO_CATEGORIES as DISPLAY_TYPES_TO_CATEGORY } from 'lib/constants' import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' import { verticalSortableListCollisionDetection } from 'lib/sortable' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import React, { useEffect } from 'react' import { RenameModal } from 'scenes/insights/filters/ActionFilter/RenameModal' +import { isTrendsFilter } from 'scenes/insights/sharedUtils' -import { ActionFilter as ActionFilterType, FilterType, FunnelExclusionLegacy, InsightType, Optional } from '~/types' +import { + ActionFilter as ActionFilterType, + ChartDisplayType, + FilterType, + FunnelExclusionLegacy, + InsightType, + Optional, +} from '~/types' import { teamLogic } from '../../../teamLogic' import { ActionFilterRow, MathAvailability } from './ActionFilterRow/ActionFilterRow' @@ -147,6 +156,9 @@ export const ActionFilter = React.forwardRef( mathAvailability, customRowSuffix, hasBreakdown: !!filters.breakdown, + trendsDisplayCategory: isTrendsFilter(filters) + ? DISPLAY_TYPES_TO_CATEGORY[filters.display || ChartDisplayType.ActionsLineGraph] + : null, actionsTaxonomicGroupTypes, propertiesTaxonomicGroupTypes, propertyFiltersPopover, diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx index 3f6d31c9489b2..0cb3eaeb086b3 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx @@ -3,7 +3,7 @@ import './ActionFilterRow.scss' import { DraggableSyntheticListeners } from '@dnd-kit/core' import { useSortable } from '@dnd-kit/sortable' import { CSS } from '@dnd-kit/utilities' -import { IconCopy, IconFilter, IconPencil, IconTrash } from '@posthog/icons' +import { IconCopy, IconFilter, IconPencil, IconTrash, IconWarning } from '@posthog/icons' import { LemonSelect, LemonSelectOption, LemonSelectOptions } from '@posthog/lemon-ui' import { BuiltLogic, useActions, useValues } from 'kea' import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' @@ -39,6 +39,7 @@ import { ActionFilter, ActionFilter as ActionFilterType, BaseMathType, + ChartDisplayCategory, CountPerActorMathType, EntityType, EntityTypes, @@ -115,6 +116,7 @@ export interface ActionFilterRowProps { renameRowButton, deleteButton, }: Record) => JSX.Element // build your own row given these components + trendsDisplayCategory: ChartDisplayCategory | null } export function ActionFilterRow({ @@ -142,6 +144,7 @@ export function ActionFilterRow({ disabled = false, readOnly = false, renderRow, + trendsDisplayCategory, }: ActionFilterRowProps): JSX.Element { const { entityFilterVisible } = useValues(logic) const { @@ -228,6 +231,7 @@ export function ActionFilterRow({ fullWidth groupType={filter.type as TaxonomicFilterGroupType} value={getValue(value, filter)} + filter={filter} onChange={(changedValue, taxonomicGroupType, item) => { const groupType = taxonomicFilterGroupTypeToEntityType(taxonomicGroupType) if (groupType === EntityTypes.DATA_WAREHOUSE) { @@ -376,6 +380,7 @@ export function ActionFilterRow({ disabled={readOnly} style={{ maxWidth: '100%', width: 'initial' }} mathAvailability={mathAvailability} + trendsDisplayCategory={trendsDisplayCategory} /> {mathDefinitions[math || BaseMathType.TotalCount]?.category === MathCategory.PropertyValue && ( @@ -513,6 +518,7 @@ interface MathSelectorProps { disabled?: boolean disabledReason?: string onMathSelect: (index: number, value: any) => any + trendsDisplayCategory: ChartDisplayCategory | null style?: React.CSSProperties } @@ -524,11 +530,14 @@ function isCountPerActorMath(math: string | undefined): math is CountPerActorMat return !!math && math in COUNT_PER_ACTOR_MATH_DEFINITIONS } +const TRAILING_MATH_TYPES = new Set([BaseMathType.WeeklyActiveUsers, BaseMathType.MonthlyActiveUsers]) + function useMathSelectorOptions({ math, index, mathAvailability, onMathSelect, + trendsDisplayCategory, }: MathSelectorProps): LemonSelectOptions { const mountedInsightDataLogic = insightDataLogic.findMounted() const query = mountedInsightDataLogic?.values?.query @@ -549,19 +558,33 @@ function useMathSelectorOptions({ mathAvailability != MathAvailability.ActorsOnly ? staticMathDefinitions : staticActorsOnlyMathDefinitions ) .filter(([key]) => { - if (!isStickiness) { - return true + if (isStickiness) { + // Remove WAU and MAU from stickiness insights + return !TRAILING_MATH_TYPES.has(key) + } + return true + }) + .map(([key, definition]) => { + const shouldWarnAboutTrailingMath = + TRAILING_MATH_TYPES.has(key) && trendsDisplayCategory === ChartDisplayCategory.TotalValue + return { + value: key, + icon: shouldWarnAboutTrailingMath ? : undefined, + label: definition.name, + tooltip: !shouldWarnAboutTrailingMath ? ( + definition.description + ) : ( + <> +

        {definition.description}

        + + In total value insights, it's usually not clear what date range "{definition.name}" refers + to. For full clarity, we recommend using "Unique users" here instead. + + + ), + 'data-attr': `math-${key}-${index}`, } - - // Remove WAU and MAU from stickiness insights - return key !== BaseMathType.WeeklyActiveUsers && key !== BaseMathType.MonthlyActiveUsers }) - .map(([key, definition]) => ({ - value: key, - label: definition.name, - tooltip: definition.description, - 'data-attr': `math-${key}-${index}`, - })) if (mathAvailability !== MathAvailability.ActorsOnly) { options.splice(1, 0, { @@ -579,7 +602,6 @@ function useMathSelectorOptions({ options={Object.entries(COUNT_PER_ACTOR_MATH_DEFINITIONS).map(([key, definition]) => ({ value: key, label: definition.shortName, - tooltip: definition.description, 'data-attr': `math-${key}-${index}`, }))} onClick={(e) => e.stopPropagation()} diff --git a/frontend/src/scenes/insights/insightDataLogic.ts b/frontend/src/scenes/insights/insightDataLogic.ts index 78ed36c747f90..822bfe41dcffb 100644 --- a/frontend/src/scenes/insights/insightDataLogic.ts +++ b/frontend/src/scenes/insights/insightDataLogic.ts @@ -145,6 +145,7 @@ export const insightDataLogic = kea([ } const maintainLegacy = !( + featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS] || (featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE] && isLifecycleQuery(sourceQuery)) || (featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_PATHS] && isPathsQuery(sourceQuery)) || (featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION] && isRetentionQuery(sourceQuery)) || diff --git a/frontend/src/scenes/insights/insightVizDataLogic.test.ts b/frontend/src/scenes/insights/insightVizDataLogic.test.ts index d1de2cd7b8af5..a0a535c7e1686 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.test.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.test.ts @@ -23,6 +23,7 @@ describe('insightVizDataLogic', () => { useMocks({ get: { '/api/projects/:team_id/insights/trend': [], + '/api/projects/:team_id/insights/': { results: [{}] }, }, }) initKeaTests() diff --git a/frontend/src/scenes/insights/views/BoldNumber/BoldNumber.tsx b/frontend/src/scenes/insights/views/BoldNumber/BoldNumber.tsx index 3642648298642..7f76c110bd55f 100644 --- a/frontend/src/scenes/insights/views/BoldNumber/BoldNumber.tsx +++ b/frontend/src/scenes/insights/views/BoldNumber/BoldNumber.tsx @@ -99,7 +99,7 @@ export function BoldNumber({ showPersonsModal = true }: ChartParams): JSX.Elemen const resultSeries = insightData?.result?.[0] as TrendResult | undefined const isTrendsQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) && isTrends && query && isInsightVizNode(query) && @@ -120,6 +120,10 @@ export function BoldNumber({ showPersonsModal = true }: ChartParams): JSX.Elemen kind: NodeKind.InsightActorsQuery, source: query.source, }, + additionalSelect: { + value_at_data_point: 'event_count', + matched_recordings: 'matched_recordings', + }, }) } else if (resultSeries.persons?.url) { openPersonsModal({ @@ -174,7 +178,7 @@ function BoldNumberComparison({ showPersonsModal }: Pick setVisible(false)} diff --git a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.tsx b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.tsx index d426ca87f525c..c6d037c3d1792 100644 --- a/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.tsx +++ b/frontend/src/scenes/insights/views/InsightsTable/InsightsTable.tsx @@ -3,6 +3,7 @@ import './InsightsTable.scss' import { useActions, useValues } from 'kea' import { getSeriesColor } from 'lib/colors' import { LemonTable, LemonTableColumn } from 'lib/lemon-ui/LemonTable' +import { compare as compareFn } from 'natural-orderby' import { insightLogic } from 'scenes/insights/insightLogic' import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' import { isTrendsFilter } from 'scenes/insights/sharedUtils' @@ -157,7 +158,7 @@ export function InsightsTable({ } const labelA = formatItemBreakdownLabel(a) const labelB = formatItemBreakdownLabel(b) - return labelA.localeCompare(labelB) + return compareFn()(labelA, labelB) }, }) if (isTrends && display === ChartDisplayType.WorldMap) { diff --git a/frontend/src/scenes/notebooks/Nodes/utils.test.tsx b/frontend/src/scenes/notebooks/Nodes/utils.test.tsx index af46f229b2cd8..09cfe7c1ceebd 100644 --- a/frontend/src/scenes/notebooks/Nodes/utils.test.tsx +++ b/frontend/src/scenes/notebooks/Nodes/utils.test.tsx @@ -1,6 +1,6 @@ import { NodeViewProps } from '@tiptap/core' import { useSyncedAttributes } from './utils' -import { renderHook, act } from '@testing-library/react-hooks' +import { renderHook, act } from '@testing-library/react' describe('notebook node utils', () => { jest.useFakeTimers() diff --git a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx index 50358e1f18537..aab8e816e5db5 100644 --- a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx @@ -212,7 +212,7 @@ export function NotebookSelectPopover({
        } - sameWidth={false} + matchWidth={false} actionable visible={!!showPopover} onVisibilityChange={(visible) => setShowPopover(visible)} diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index 52efdd5d47f0a..adcd6d3476b4d 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -1,11 +1,13 @@ import { useActions, useValues } from 'kea' -import { SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' +import { FEATURE_FLAGS, SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect, useState } from 'react' +import { AndroidInstructions } from 'scenes/onboarding/sdks/session-replay' import { SceneExport } from 'scenes/sceneTypes' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' -import { AvailableFeature, ProductKey } from '~/types' +import { AvailableFeature, ProductKey, SDKKey } from '~/types' import { OnboardingBillingStep } from './OnboardingBillingStep' import { OnboardingInviteTeammates } from './OnboardingInviteTeammates' @@ -108,6 +110,9 @@ const SessionReplayOnboarding = (): JSX.Element => { const { hasAvailableFeature } = useValues(userLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) + const hasAndroidOnBoarding = !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_MOBILE_ONBOARDING] + const configOptions: ProductConfigOption[] = [ { type: 'toggle', @@ -139,11 +144,16 @@ const SessionReplayOnboarding = (): JSX.Element => { }) } + const sdkInstructionMap = SessionReplaySDKInstructions + if (hasAndroidOnBoarding) { + sdkInstructionMap[SDKKey.ANDROID] = AndroidInstructions + } + return ( @@ -151,6 +161,7 @@ const SessionReplayOnboarding = (): JSX.Element => { ) } + const FeatureFlagsOnboarding = (): JSX.Element => { return ( diff --git a/frontend/src/scenes/onboarding/OnboardingReverseProxy.tsx b/frontend/src/scenes/onboarding/OnboardingReverseProxy.tsx index 362ce6c34a2ce..98b027127e92b 100644 --- a/frontend/src/scenes/onboarding/OnboardingReverseProxy.tsx +++ b/frontend/src/scenes/onboarding/OnboardingReverseProxy.tsx @@ -46,12 +46,7 @@ const proxyDocs = [ export const OnboardingReverseProxy = ({ stepKey }: { stepKey: OnboardingStepKey }): JSX.Element => { return ( - +

        A reverse proxy allows you to send events to PostHog Cloud using your own domain.

        diff --git a/frontend/src/scenes/onboarding/onboardingLogic.tsx b/frontend/src/scenes/onboarding/onboardingLogic.tsx index 4c8b35542109e..b53b14afc6b62 100644 --- a/frontend/src/scenes/onboarding/onboardingLogic.tsx +++ b/frontend/src/scenes/onboarding/onboardingLogic.tsx @@ -316,9 +316,9 @@ export const onboardingLogic = kea([ actionToUrl(({ values }) => ({ setStepKey: ({ stepKey }) => { if (stepKey) { - return [`/onboarding/${values.productKey}`, { step: stepKey }] + return [`/onboarding/${values.productKey}`, { ...router.values.searchParams, step: stepKey }] } else { - return [`/onboarding/${values.productKey}`] + return [`/onboarding/${values.productKey}`, router.values.searchParams] } }, goToNextStep: () => { @@ -327,9 +327,12 @@ export const onboardingLogic = kea([ ) const nextStep = values.allOnboardingSteps[currentStepIndex + 1] if (nextStep) { - return [`/onboarding/${values.productKey}`, { step: nextStep.props.stepKey }] + return [ + `/onboarding/${values.productKey}`, + { ...router.values.searchParams, step: nextStep.props.stepKey }, + ] } else { - return [`/onboarding/${values.productKey}`] + return [`/onboarding/${values.productKey}`, router.values.searchParams] } }, goToPreviousStep: () => { @@ -338,9 +341,12 @@ export const onboardingLogic = kea([ ) const previousStep = values.allOnboardingSteps[currentStepIndex - 1] if (previousStep) { - return [`/onboarding/${values.productKey}`, { step: previousStep.props.stepKey }] + return [ + `/onboarding/${values.productKey}`, + { ...router.values.searchParams, step: previousStep.props.stepKey }, + ] } else { - return [`/onboarding/${values.productKey}`] + return [`/onboarding/${values.productKey}`, router.values.searchParams] } }, updateCurrentTeamSuccess(val) { diff --git a/frontend/src/scenes/onboarding/sdks/SDKs.tsx b/frontend/src/scenes/onboarding/sdks/SDKs.tsx index 33555a1f17ca9..610caee92ee4f 100644 --- a/frontend/src/scenes/onboarding/sdks/SDKs.tsx +++ b/frontend/src/scenes/onboarding/sdks/SDKs.tsx @@ -118,6 +118,7 @@ export function SDKs({ {sdks?.map((sdk) => ( setSelectedSDK(sdk) : undefined} fullWidth diff --git a/frontend/src/scenes/onboarding/sdks/product-analytics/android.tsx b/frontend/src/scenes/onboarding/sdks/product-analytics/android.tsx index 365f8685a8a74..b6a1fb3c9520f 100644 --- a/frontend/src/scenes/onboarding/sdks/product-analytics/android.tsx +++ b/frontend/src/scenes/onboarding/sdks/product-analytics/android.tsx @@ -1,4 +1,12 @@ import { CodeSnippet, Language } from 'lib/components/CodeSnippet' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' +import { FEATURE_FLAGS } from 'lib/constants' +import { LemonTag } from 'lib/lemon-ui/LemonTag' +import { Link } from 'lib/lemon-ui/Link' +import { OnboardingStepKey } from 'scenes/onboarding/onboardingLogic' +import { urls } from 'scenes/urls' + +import { SDKKey } from '~/types' import { SDKInstallAndroidInstructions } from '../sdk-install-instructions' @@ -6,12 +14,31 @@ function AndroidCaptureSnippet(): JSX.Element { return {`PostHog.capture(event = "test-event")`} } +function AdvertiseAndroidReplay(): JSX.Element { + return ( +

        +

        + Session Replay for Android NEW +

        +
        + Session replay is now in beta for Android.{' '} + + Learn how to set it up + +
        +
        + ) +} + export function ProductAnalyticsAndroidInstructions(): JSX.Element { return ( <>

        Send an Event

        + + + ) } diff --git a/frontend/src/scenes/onboarding/sdks/sdk-install-instructions/android.tsx b/frontend/src/scenes/onboarding/sdks/sdk-install-instructions/android.tsx index ff740be34f4fd..103a87f183508 100644 --- a/frontend/src/scenes/onboarding/sdks/sdk-install-instructions/android.tsx +++ b/frontend/src/scenes/onboarding/sdks/sdk-install-instructions/android.tsx @@ -1,8 +1,14 @@ +import { Link } from '@posthog/lemon-ui' import { useValues } from 'kea' import { CodeSnippet, Language } from 'lib/components/CodeSnippet' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { apiHostOrigin } from 'lib/utils/apiHost' import { teamLogic } from 'scenes/teamLogic' +export interface AndroidSetupProps { + includeReplay?: boolean +} + function AndroidInstallSnippet(): JSX.Element { return ( @@ -13,7 +19,7 @@ function AndroidInstallSnippet(): JSX.Element { ) } -function AndroidSetupSnippet(): JSX.Element { +function AndroidSetupSnippet({ includeReplay }: AndroidSetupProps): JSX.Element { const { currentTeam } = useValues(teamLogic) return ( @@ -33,6 +39,18 @@ function AndroidSetupSnippet(): JSX.Element { apiKey = POSTHOG_API_KEY, host = POSTHOG_HOST ) + ${ + includeReplay + ? ` + // check https://posthog.com/docs/session-replay/mobile#installation + // for more config and to learn about how we capture sessions on mobile + // and what to expect + config.sessionReplay = true + // choose whether to mask images or text + config.sessionReplayConfig.maskAllImages = false + config.sessionReplayConfig.maskAllTextInputs = true` + : '' + } // Setup PostHog with the given Context and Config PostHogAndroid.setup(this, config) @@ -41,13 +59,24 @@ function AndroidSetupSnippet(): JSX.Element { ) } -export function SDKInstallAndroidInstructions(): JSX.Element { +export function SDKInstallAndroidInstructions(props: AndroidSetupProps): JSX.Element { return ( <> + {props.includeReplay ? ( + + 🚧 NOTE: Mobile recording is + currently in beta. We are keen to gather as much feedback as possible so if you try this out please + let us know. You can send feedback via the{' '} + + in-app support panel + {' '} + or one of our other support options. + + ) : null}

        Install

        Configure

        - + ) } diff --git a/frontend/src/scenes/onboarding/sdks/sdksLogic.tsx b/frontend/src/scenes/onboarding/sdks/sdksLogic.tsx index df4a13d8adaf1..a46984ee8f897 100644 --- a/frontend/src/scenes/onboarding/sdks/sdksLogic.tsx +++ b/frontend/src/scenes/onboarding/sdks/sdksLogic.tsx @@ -1,5 +1,6 @@ import { actions, afterMount, connect, events, kea, listeners, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' +import { urlToAction } from 'kea-router' import api from 'lib/api' import { LemonSelectOptions } from 'lib/lemon-ui/LemonSelect/LemonSelect' @@ -11,7 +12,7 @@ import { onboardingLogic } from '../onboardingLogic' import { allSDKs } from './allSDKs' import type { sdksLogicType } from './sdksLogicType' -/* +/* To add SDK instructions for your product: 1. If needed, add a new ProductKey enum value in ~/types.ts 2. Create a folder in this directory for your product @@ -118,14 +119,16 @@ export const sdksLogic = kea([ loadSnippetEvents: async () => { const query: HogQLQuery = { kind: NodeKind.HogQLQuery, - query: hogql`SELECT properties.$lib_version AS lib_version, max(timestamp) AS latest_timestamp, count(lib_version) as count - FROM events - WHERE timestamp >= now() - INTERVAL 3 DAY - AND timestamp <= now() - AND properties.$lib = 'web' - GROUP BY lib_version - ORDER BY latest_timestamp DESC - limit 10`, + query: hogql`SELECT properties.$lib_version AS lib_version, + max(timestamp) AS latest_timestamp, + count(lib_version) as count + FROM events + WHERE timestamp >= now() - INTERVAL 3 DAY + AND timestamp <= now() + AND properties.$lib = 'web' + GROUP BY lib_version + ORDER BY latest_timestamp DESC + limit 10`, } const res = await api.query(query) @@ -188,4 +191,12 @@ export const sdksLogic = kea([ afterMount(({ actions }) => { actions.loadSnippetEvents() }), + urlToAction(({ actions }) => ({ + '/onboarding/:productKey': (_productKey, { sdk }) => { + const matchedSDK = allSDKs.find((s) => s.key === sdk) + if (matchedSDK) { + actions.setSelectedSDK(matchedSDK) + } + }, + })), ]) diff --git a/frontend/src/scenes/onboarding/sdks/session-replay/SessionReplaySDKInstructions.tsx b/frontend/src/scenes/onboarding/sdks/session-replay/SessionReplaySDKInstructions.tsx index 7e43a06b7faba..16db14dbd1d85 100644 --- a/frontend/src/scenes/onboarding/sdks/session-replay/SessionReplaySDKInstructions.tsx +++ b/frontend/src/scenes/onboarding/sdks/session-replay/SessionReplaySDKInstructions.tsx @@ -7,4 +7,6 @@ export const SessionReplaySDKInstructions: SDKInstructionsMap = { [SDKKey.HTML_SNIPPET]: HTMLSnippetInstructions, [SDKKey.NEXT_JS]: NextJSInstructions, [SDKKey.REACT]: ReactInstructions, + // added by feature flag in Onboarding.tsx until released + //[SDKKey.ANDROID]: AndroidInstructions, } diff --git a/frontend/src/scenes/onboarding/sdks/session-replay/android.tsx b/frontend/src/scenes/onboarding/sdks/session-replay/android.tsx new file mode 100644 index 0000000000000..4afb1dc91ce60 --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/session-replay/android.tsx @@ -0,0 +1,11 @@ +import { SDKInstallAndroidInstructions } from '../sdk-install-instructions' +import { SessionReplayFinalSteps } from '../shared-snippets' + +export function AndroidInstructions(): JSX.Element { + return ( + <> + + + + ) +} diff --git a/frontend/src/scenes/onboarding/sdks/session-replay/index.tsx b/frontend/src/scenes/onboarding/sdks/session-replay/index.tsx index bee13a5ce58bb..1ef01349747b4 100644 --- a/frontend/src/scenes/onboarding/sdks/session-replay/index.tsx +++ b/frontend/src/scenes/onboarding/sdks/session-replay/index.tsx @@ -1,3 +1,4 @@ +export * from './android' export * from './html-snippet' export * from './js-web' export * from './next-js' diff --git a/frontend/src/scenes/paths/pathsDataLogic.test.ts b/frontend/src/scenes/paths/pathsDataLogic.test.ts index 99e97de3b031f..e22ec58c79aae 100644 --- a/frontend/src/scenes/paths/pathsDataLogic.test.ts +++ b/frontend/src/scenes/paths/pathsDataLogic.test.ts @@ -1,6 +1,7 @@ import { expectLogic } from 'kea-test-utils' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { pathsDataLogic } from 'scenes/paths/pathsDataLogic' +import { teamLogic } from 'scenes/teamLogic' import { initKeaTests } from '~/test/init' import { InsightLogicProps, InsightType, PathType } from '~/types' @@ -25,6 +26,7 @@ async function initPathsDataLogic(): Promise { describe('pathsDataLogic', () => { beforeEach(async () => { initKeaTests(false) + teamLogic.mount() await initPathsDataLogic() }) diff --git a/frontend/src/scenes/paths/pathsDataLogic.ts b/frontend/src/scenes/paths/pathsDataLogic.ts index 750490efc64f4..67a2c3b7e9dd9 100644 --- a/frontend/src/scenes/paths/pathsDataLogic.ts +++ b/frontend/src/scenes/paths/pathsDataLogic.ts @@ -111,7 +111,8 @@ export const pathsDataLogic = kea([ ], hogQLInsightsPathsFlagEnabled: [ (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_PATHS], + (featureFlags) => + !!(featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_PATHS]), ], }), diff --git a/frontend/src/scenes/retention/retentionModalLogic.ts b/frontend/src/scenes/retention/retentionModalLogic.ts index 9aba029d17c2c..3310d730a1699 100644 --- a/frontend/src/scenes/retention/retentionModalLogic.ts +++ b/frontend/src/scenes/retention/retentionModalLogic.ts @@ -67,7 +67,11 @@ export const retentionModalLogic = kea([ exploreUrl: [ (s) => [s.actorsQuery, s.featureFlags], (actorsQuery, featureFlags): string | null => { - if (!actorsQuery || !featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION]) { + if ( + !actorsQuery || + (!featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS] && + !featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION]) + ) { return null } const query: DataTableNode = { diff --git a/frontend/src/scenes/retention/retentionPeopleLogic.ts b/frontend/src/scenes/retention/retentionPeopleLogic.ts index 35daa1f0bd176..7bfd81aafe341 100644 --- a/frontend/src/scenes/retention/retentionPeopleLogic.ts +++ b/frontend/src/scenes/retention/retentionPeopleLogic.ts @@ -18,7 +18,10 @@ import type { retentionPeopleLogicType } from './retentionPeopleLogicType' const DEFAULT_RETENTION_LOGIC_KEY = 'default_retention_key' const hogQLInsightsRetentionFlagEnabled = (): boolean => - Boolean(featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION]) + Boolean( + featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS] || + featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS_RETENTION] + ) export const retentionPeopleLogic = kea([ props({} as InsightLogicProps), diff --git a/frontend/src/scenes/sceneLogic.ts b/frontend/src/scenes/sceneLogic.ts index 998726e131ec5..77b24b6407325 100644 --- a/frontend/src/scenes/sceneLogic.ts +++ b/frontend/src/scenes/sceneLogic.ts @@ -202,7 +202,7 @@ export const sceneLogic = kea([ !teamLogic.values.currentTeam.is_demo && !removeProjectIdIfPresent(location.pathname).startsWith(urls.onboarding('')) && !removeProjectIdIfPresent(location.pathname).startsWith(urls.products()) && - !removeProjectIdIfPresent(location.pathname).startsWith(urls.settings()) + !removeProjectIdIfPresent(location.pathname).startsWith('/settings') ) { const allProductUrls = Object.values(productUrlMapping).flat() if ( diff --git a/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx b/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx index 56eac1213d91a..cb6ef67cc5b3d 100644 --- a/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx +++ b/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx @@ -1,7 +1,9 @@ import { IconFeatures } from '@posthog/icons' -import { LemonButton, LemonTable, LemonTabs, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonTable, LemonTabs } from '@posthog/lemon-ui' +import { captureException } from '@sentry/react' import { useActions, useValues } from 'kea' import { JSONViewer } from 'lib/components/JSONViewer' +import { Sparkline } from 'lib/lemon-ui/Sparkline' import { useState } from 'react' import { urls } from 'scenes/urls' @@ -14,13 +16,9 @@ const MAX_TITLE_LENGTH = 75 export function SessionRecordingErrors(): JSX.Element { const { openSessionPlayer } = useActions(sessionPlayerModalLogic) const { errors, errorsLoading } = useValues(sessionRecordingErrorsLogic) - const { loadErrorClusters } = useActions(sessionRecordingErrorsLogic) + const { loadErrorClusters, createPlaylist } = useActions(sessionRecordingErrorsLogic) - if (errorsLoading) { - return - } - - if (!errors) { + if (!errors && !errorsLoading) { return ( } onClick={() => loadErrorClusters()}> Automagically find errors @@ -36,7 +34,7 @@ export function SessionRecordingErrors(): JSX.Element { title: 'Error', dataIndex: 'cluster', render: (_, cluster) => { - const displayTitle = parseTitle(cluster.sample.error) + const displayTitle = parseTitle(cluster.sample) return (
        {displayTitle} @@ -45,6 +43,17 @@ export function SessionRecordingErrors(): JSX.Element { }, width: '50%', }, + { + title: '', + render: (_, cluster) => { + return ( + + ) + }, + }, { title: 'Occurrences', dataIndex: 'occurrences', @@ -68,23 +77,41 @@ export function SessionRecordingErrors(): JSX.Element { title: 'Actions', render: function Render(_, cluster) { return ( - { - e.preventDefault() - openSessionPlayer({ id: cluster.sample.session_id }) - }} - className="p-2 whitespace-nowrap" - type="primary" - > - Watch example - +
        + { + e.preventDefault() + openSessionPlayer({ id: cluster.session_ids[0] }) + }} + className="whitespace-nowrap" + type="primary" + > + Watch example + + { + createPlaylist( + `Examples of '${parseTitle(cluster.sample)}'`, + cluster.session_ids + ) + }} + className="whitespace-nowrap" + type="secondary" + tooltip="Create a playlist of recordings containing this issue" + > + Create playlist + +
        ) }, }, ]} - dataSource={errors} - expandable={{ expandedRowRender: (cluster) => }} + loading={errorsLoading} + dataSource={errors || []} + expandable={{ + expandedRowRender: (cluster) => , + }} /> @@ -136,5 +163,20 @@ function parseTitle(error: string): string { input = error } - return input.split('\n')[0].trim().substring(0, MAX_TITLE_LENGTH) + if (!input) { + return error + } + + try { + // TRICKY - after json parsing we might not have a string, + // since the JSON parser will helpfully convert to other types too e.g. have seen objects here + if (typeof input !== 'string') { + input = JSON.stringify(input) + } + + return input.split('\n')[0].trim().substring(0, MAX_TITLE_LENGTH) || error + } catch (e) { + captureException(e, { extra: { error }, tags: { feature: 'replay/error-clustering' } }) + return error + } } diff --git a/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts b/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts index 45b887fd33cbb..49de62c7bf5c4 100644 --- a/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts +++ b/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts @@ -1,13 +1,19 @@ -import { afterMount, kea, path } from 'kea' +import { actions, afterMount, kea, listeners, path } from 'kea' import { loaders } from 'kea-loaders' +import { router } from 'kea-router' import api from 'lib/api' +import { urls } from 'scenes/urls' import { ErrorClusterResponse } from '~/types' +import { createPlaylist } from '../playlist/playlistUtils' import type { sessionRecordingErrorsLogicType } from './sessionRecordingErrorsLogicType' export const sessionRecordingErrorsLogic = kea([ path(['scenes', 'session-recordings', 'detail', 'sessionRecordingErrorsLogic']), + actions({ + createPlaylist: (name: string, sessionIds: string[]) => ({ name, sessionIds }), + }), loaders(() => ({ errors: [ null as ErrorClusterResponse, @@ -19,7 +25,19 @@ export const sessionRecordingErrorsLogic = kea( }, ], })), + listeners(() => ({ + createPlaylist: async ({ name, sessionIds }) => { + const playlist = await createPlaylist({ name: name }) + if (playlist) { + const samples = sessionIds.slice(0, 10) + await Promise.all( + samples.map((sessionId) => api.recordings.addRecordingToPlaylist(playlist.short_id, sessionId)) + ) + router.actions.push(urls.replayPlaylist(playlist.short_id)) + } + }, + })), afterMount(({ actions }) => { actions.loadErrorClusters(false) }), diff --git a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts index fb01f15b9a0cc..b6f547603114b 100644 --- a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts +++ b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackLogic.ts @@ -3,7 +3,6 @@ import { eventWithTime } from '@rrweb/types' import { BuiltLogic, connect, kea, listeners, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { beforeUnload } from 'kea-router' -import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { uuid } from 'lib/utils' @@ -178,11 +177,7 @@ export const sessionRecordingFilePlaybackLogic = kea diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx index 93b272ede828b..bf3fd911e604d 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx @@ -174,6 +174,7 @@ export function PlayerInspectorControls({ onClose }: { onClose: () => void }): J
        setTimestampMode(timestampMode === 'absolute' ? 'relative' : 'absolute')} tooltipPlacement="left" @@ -191,14 +192,15 @@ export function PlayerInspectorControls({ onClose }: { onClose: () => void }): J { - // If the user has syncScrolling on but it is paused due to interacting with the Inspector, we want to resume it + // If the user has syncScrolling on, but it is paused due to interacting with the Inspector, we want to resume it if (syncScroll && syncScrollingPaused) { setSyncScrollPaused(false) } else { - // Otherwise we are just toggling the settting + // Otherwise we are just toggling the setting setSyncScroll(!syncScroll) } }} diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx index 8442cdd4a28aa..5741c225a66a0 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx @@ -1,7 +1,8 @@ -import { LemonButton, LemonDivider, LemonTabs, LemonTag, LemonTagType, Link } from '@posthog/lemon-ui' +import { LemonButton, LemonDivider, LemonTabs, LemonTag, LemonTagType } from '@posthog/lemon-ui' import clsx from 'clsx' import { CodeSnippet, Language } from 'lib/components/CodeSnippet' import { Dayjs, dayjs } from 'lib/dayjs' +import { Link } from 'lib/lemon-ui/Link' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { humanFriendlyMilliseconds, humanizeBytes, isURL } from 'lib/utils' import { Fragment, useState } from 'react' diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts index 236cc3b5b8dc5..4f0bf12fc81cd 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts @@ -2,6 +2,7 @@ import { expectLogic } from 'kea-test-utils' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { playerInspectorLogic } from 'scenes/session-recordings/player/inspector/playerInspectorLogic' +import { useMocks } from '~/mocks/jest' import { initKeaTests } from '~/test/init' const playerLogicProps = { sessionRecordingId: '1', playerKey: 'playlist' } @@ -10,6 +11,11 @@ describe('playerInspectorLogic', () => { let logic: ReturnType beforeEach(() => { + useMocks({ + get: { + 'api/projects/:team_id/session_recordings/1/': {}, + }, + }) initKeaTests() featureFlagLogic.mount() logic = playerInspectorLogic(playerLogicProps) diff --git a/frontend/src/scenes/session-recordings/player/rrweb/index.ts b/frontend/src/scenes/session-recordings/player/rrweb/index.ts index 13efdc344b9cc..2739d32adfeca 100644 --- a/frontend/src/scenes/session-recordings/player/rrweb/index.ts +++ b/frontend/src/scenes/session-recordings/player/rrweb/index.ts @@ -1,5 +1,8 @@ import { playerConfig, ReplayPlugin } from 'rrweb/typings/types' +export const PLACEHOLDER_SVG_DATA_IMAGE_URL = + 'url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg==");' + const PROXY_URL = 'https://replay.ph-proxy.com' as const export const CorsPlugin: ReplayPlugin & { @@ -65,7 +68,5 @@ export const CorsPlugin: ReplayPlugin & { export const COMMON_REPLAYER_CONFIG: Partial = { triggerFocus: false, - insertStyleRules: [ - `.ph-no-capture { background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg=="); }`, - ], + insertStyleRules: [`.ph-no-capture { background-image: ${PLACEHOLDER_SVG_DATA_IMAGE_URL} }`], } diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 8d630aa10acbb..0aff48444a7e9 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -67,9 +67,10 @@ function isRecordingSnapshot(x: unknown): x is RecordingSnapshot { export const parseEncodedSnapshots = async ( items: (RecordingSnapshot | EncodedRecordingSnapshot | string)[], sessionId: string, - withMobileTransformer: boolean + // this is only kept so that we can export the untransformed data for debugging + withMobileTransformer: boolean = true ): Promise => { - if (!postHogEEModule && withMobileTransformer) { + if (!postHogEEModule) { postHogEEModule = await posthogEE() } const lineCount = items.length @@ -239,11 +240,7 @@ async function processEncodedResponse( let untransformed: RecordingSnapshot[] | null = null const transformed = deduplicateSnapshots( - await parseEncodedSnapshots( - encodedResponse, - props.sessionRecordingId, - !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_MOBILE] - ), + await parseEncodedSnapshots(encodedResponse, props.sessionRecordingId), existingData?.snapshots ?? [] ) diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index 22dd5881ae378..3c2a9842c0dbc 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -6,7 +6,6 @@ import clsx from 'clsx' import { range } from 'd3' import { BindLogic, useActions, useValues } from 'kea' import { EmptyMessage } from 'lib/components/EmptyMessage/EmptyMessage' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { FEATURE_FLAGS } from 'lib/constants' import { useResizeBreakpoints } from 'lib/hooks/useResizeObserver' @@ -58,12 +57,10 @@ function UnusableEventsWarning(props: { unusableEventsInFilter: string[] }): JSX the Web SDK - - ,{' '} - - the Android SDK - - + ,{' '} + + the Android SDK +

        ) diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx index bda13153d9ccb..961b0b54fa246 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistTroubleshooting.tsx @@ -1,7 +1,5 @@ import { LemonDivider, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' -import { FEATURE_FLAGS } from 'lib/constants' import { playerSettingsLogic } from '../player/playerSettingsLogic' import { sessionRecordingsPlaylistLogic } from './sessionRecordingsPlaylistLogic' @@ -21,9 +19,7 @@ export const SessionRecordingsPlaylistTroubleshooting = (): JSX.Element => {

          - -
          All recording sources:
          -
          +
          All recording sources:
          {otherRecordings.length > 0 && hideViewedRecordings && (
        • Viewed recordings hidden.{' '} @@ -42,10 +38,8 @@ export const SessionRecordingsPlaylistTroubleshooting = (): JSX.Element => { They are outside the retention period
        • - - -
          Web recordings
          -
          + +
          Web recordings
        • { ], }, + 'api/projects/:team/property_definitions/seen_together': { $pageview: true }, + '/api/projects/:team/session_recordings': (req) => { const { searchParams } = req.url if ( diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx index c24e8930cf3ec..7951844c7f581 100644 --- a/frontend/src/scenes/settings/SettingsMap.tsx +++ b/frontend/src/scenes/settings/SettingsMap.tsx @@ -298,7 +298,6 @@ export const SettingsMap: SettingSection[] = [ level: 'organization', id: 'organization-rbac', title: 'Role-based access', - flag: 'ROLE_BASED_ACCESS', settings: [ { id: 'organization-rbac', diff --git a/frontend/src/scenes/settings/organization/Permissions/permissionsLogic.tsx b/frontend/src/scenes/settings/organization/Permissions/permissionsLogic.tsx index 654b5b05caaa5..666f2b700bd7a 100644 --- a/frontend/src/scenes/settings/organization/Permissions/permissionsLogic.tsx +++ b/frontend/src/scenes/settings/organization/Permissions/permissionsLogic.tsx @@ -2,8 +2,6 @@ import { lemonToast } from '@posthog/lemon-ui' import { actions, afterMount, connect, kea, listeners, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { AccessLevel, OrganizationResourcePermissionType, Resource, RoleType } from '~/types' @@ -34,7 +32,7 @@ const ResourceAccessLevelMapping: Record = { export const permissionsLogic = kea([ path(['scenes', 'organization', 'Settings', 'Permissions', 'permissionsLogic']), connect({ - values: [featureFlagLogic, ['featureFlags'], rolesLogic, ['roles']], + values: [rolesLogic, ['roles']], actions: [rolesLogic, ['updateRole']], }), actions({ @@ -123,10 +121,6 @@ export const permissionsLogic = kea([ ) }, ], - shouldShowPermissionsTable: [ - (s) => [s.featureFlags], - (featureFlags) => featureFlags[FEATURE_FLAGS.ROLE_BASED_ACCESS] === 'control', - ], resourceRolesAccess: [ (s) => [s.allPermissions, s.roles], (permissions, roles) => { diff --git a/frontend/src/scenes/settings/user/UserDetails.tsx b/frontend/src/scenes/settings/user/UserDetails.tsx index 587b54161b91d..b9ffad870537d 100644 --- a/frontend/src/scenes/settings/user/UserDetails.tsx +++ b/frontend/src/scenes/settings/user/UserDetails.tsx @@ -19,16 +19,25 @@ export function UserDetails(): JSX.Element { maxWidth: '28rem', }} > - + - + + + + + @@ -274,10 +271,8 @@ export function SurveyResult({ disableEventsTable }: { disableEventsTable?: bool <>
          {surveyNPSScore}
          Total NPS Score
          - {featureFlags[FEATURE_FLAGS.SURVEYS_RESULTS_VISUALIZATIONS] && ( - // TODO: rework this to show nps scores over time - - )} + {/* TODO: rework this to show nps scores over time */} + )} ([ path(['scenes', 'surveys', 'surveysLogic']), connect(() => ({ - values: [ - userLogic, - ['hasAvailableFeature'], - teamLogic, - ['currentTeam', 'currentTeamLoading'], - featureFlagLogic, - ['featureFlags'], - ], + values: [userLogic, ['hasAvailableFeature'], teamLogic, ['currentTeam', 'currentTeamLoading']], actions: [teamLogic, ['loadCurrentTeam']], })), actions({ @@ -151,21 +142,17 @@ export const surveysLogic = kea([ }, ], ], - payGateFlagOn: [(s) => [s.featureFlags], (featureFlags) => featureFlags[FEATURE_FLAGS.SURVEYS_PAYGATES]], surveysStylingAvailable: [ - (s) => [s.hasAvailableFeature, s.payGateFlagOn], - (hasAvailableFeature, payGateFlagOn) => - !payGateFlagOn || (payGateFlagOn && hasAvailableFeature(AvailableFeature.SURVEYS_STYLING)), + (s) => [s.hasAvailableFeature], + (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_STYLING), ], surveysHTMLAvailable: [ - (s) => [s.hasAvailableFeature, s.payGateFlagOn], - (hasAvailableFeature, payGateFlagOn) => - !payGateFlagOn || (payGateFlagOn && hasAvailableFeature(AvailableFeature.SURVEYS_TEXT_HTML)), + (s) => [s.hasAvailableFeature], + (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_TEXT_HTML), ], surveysMultipleQuestionsAvailable: [ - (s) => [s.hasAvailableFeature, s.payGateFlagOn], - (hasAvailableFeature, payGateFlagOn) => - !payGateFlagOn || (payGateFlagOn && hasAvailableFeature(AvailableFeature.SURVEYS_MULTIPLE_QUESTIONS)), + (s) => [s.hasAvailableFeature], + (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_MULTIPLE_QUESTIONS), ], showSurveysDisabledBanner: [ (s) => [s.currentTeam, s.currentTeamLoading, s.surveys], diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx index 5240c916f2218..0fd8a8b8cc931 100644 --- a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx +++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx @@ -369,6 +369,7 @@ export function ActorRow({ actor, onOpenRecording, propertiesTimelineFilter }: A onClick={onOpenRecordingClick} sideIcon={matchedRecordings.length === 1 ? : null} type="secondary" + status={matchedRecordings.length > 1 ? 'alt' : undefined} size="small" > {matchedRecordings.length > 1 ? `${matchedRecordings.length} recordings` : 'View recording'} diff --git a/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts b/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts index 70958019ed94c..f2666ba43f58f 100644 --- a/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts +++ b/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts @@ -1,5 +1,6 @@ import { expectLogic } from 'kea-test-utils' +import { useMocks } from '~/mocks/jest' import { initKeaTests } from '~/test/init' import { personsModalLogic } from './personsModalLogic' @@ -8,6 +9,11 @@ describe('personsModalLogic', () => { let logic: ReturnType beforeEach(() => { + useMocks({ + get: { + 'api/projects/:team_id/persons/trends': {}, + }, + }) initKeaTests() }) diff --git a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx index 03743f0c4dd29..54c935d954b99 100644 --- a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx +++ b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx @@ -76,7 +76,7 @@ export function ActionsHorizontalBar({ showPersonsModal = true }: ChartParams): }, [indexedResults]) const isTrendsQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) && isTrends && query && isInsightVizNode(query) && @@ -115,6 +115,10 @@ export function ActionsHorizontalBar({ showPersonsModal = true }: ChartParams): kind: NodeKind.InsightActorsQuery, source: query.source, }, + additionalSelect: { + value_at_data_point: 'event_count', + matched_recordings: 'matched_recordings', + }, }) } else if (selectedUrl) { openPersonsModal({ diff --git a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx index b930608587e22..f2484752ce038 100644 --- a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx +++ b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx @@ -55,21 +55,21 @@ export function ActionsLineGraph({ [] const isLifecycleQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE]) && isLifecycle && query && isInsightVizNode(query) && isLifecycleQuery(query.source) const isStickinessQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_STICKINESS] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_STICKINESS]) && isStickiness && query && isInsightVizNode(query) && isStickinessQuery(query.source) const isTrendsQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) && isTrends && query && isInsightVizNode(query) && @@ -152,6 +152,10 @@ export function ActionsLineGraph({ breakdown: dataset.breakdown_value, compare: dataset.compare_label, }, + additionalSelect: { + value_at_data_point: 'event_count', + matched_recordings: 'matched_recordings', + }, }) } else { const datasetUrls = urlsForDatasets( diff --git a/frontend/src/scenes/trends/viz/ActionsPie.tsx b/frontend/src/scenes/trends/viz/ActionsPie.tsx index 86804b11e487f..36ff7166bc574 100644 --- a/frontend/src/scenes/trends/viz/ActionsPie.tsx +++ b/frontend/src/scenes/trends/viz/ActionsPie.tsx @@ -57,7 +57,7 @@ export function ActionsPie({ const showAggregation = !pieChartVizOptions?.hideAggregation const isTrendsQueryWithFeatureFlagOn = - featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] && + (featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS] || featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS]) && isTrends && query && isInsightVizNode(query) && @@ -118,6 +118,10 @@ export function ActionsPie({ kind: NodeKind.InsightActorsQuery, source: query.source, }, + additionalSelect: { + value_at_data_point: 'event_count', + matched_recordings: 'matched_recordings', + }, }) } else if (selectedUrl) { openPersonsModal({ diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 943ebbaa80bb2..13262c0eb3656 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -16,6 +16,7 @@ import { PipelineTab, ProductKey, ReplayTabs, + SDKKey, } from '~/types' import { OnboardingStepKey } from './onboarding/onboardingLogic' @@ -175,8 +176,10 @@ export const urls = { `/verify_email${userUuid ? `/${userUuid}` : ''}${token ? `/${token}` : ''}`, inviteSignup: (id: string): string => `/signup/${id}`, products: (): string => '/products', - onboarding: (productKey: string, stepKey?: OnboardingStepKey): string => - `/onboarding/${productKey}${stepKey ? '?step=' + stepKey : ''}`, + onboarding: (productKey: string, stepKey?: OnboardingStepKey, sdk?: SDKKey): string => + `/onboarding/${productKey}${stepKey ? '?step=' + stepKey : ''}${ + sdk && stepKey ? '&sdk=' + sdk : sdk ? '?sdk=' + sdk : '' + }`, // Cloud only organizationBilling: (products?: ProductKey[]): string => `/organization/billing${products && products.length ? `?products=${products.join(',')}` : ''}`, diff --git a/frontend/src/scenes/userLogic.ts b/frontend/src/scenes/userLogic.ts index 1dfd3390dc87f..8e8ccbe98ce5d 100644 --- a/frontend/src/scenes/userLogic.ts +++ b/frontend/src/scenes/userLogic.ts @@ -39,7 +39,7 @@ export const userLogic = kea([ : null, email: !email ? 'You need to have an email.' - : first_name.length > 254 + : email.length > 254 ? 'This email is too long. Please keep it under 255 characters.' : null, }), @@ -98,10 +98,12 @@ export const userLogic = kea([ { loadUserSuccess: (_, { user }) => ({ first_name: user?.first_name || '', + last_name: user?.last_name || '', email: user?.email || '', }), updateUserSuccess: (_, { user }) => ({ first_name: user?.first_name || '', + last_name: user?.last_name || '', email: user?.email || '', }), }, diff --git a/frontend/src/toolbar/actions/ActionsListView.tsx b/frontend/src/toolbar/actions/ActionsListView.tsx index 6054c0bc65241..fb999ed600949 100644 --- a/frontend/src/toolbar/actions/ActionsListView.tsx +++ b/frontend/src/toolbar/actions/ActionsListView.tsx @@ -1,7 +1,6 @@ import { Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Spinner } from 'lib/lemon-ui/Spinner' -import { useEffect } from 'react' import { actionsLogic } from '~/toolbar/actions/actionsLogic' import { actionsTabLogic } from '~/toolbar/actions/actionsTabLogic' @@ -13,13 +12,8 @@ interface ActionsListViewProps { export function ActionsListView({ actions }: ActionsListViewProps): JSX.Element { const { allActionsLoading, searchTerm } = useValues(actionsLogic) - const { getActions } = useActions(actionsLogic) const { selectAction } = useActions(actionsTabLogic) - useEffect(() => { - getActions() - }, []) - return (
          {actions.length ? ( diff --git a/frontend/src/toolbar/actions/ActionsToolbarMenu.tsx b/frontend/src/toolbar/actions/ActionsToolbarMenu.tsx index ecbb3800c9260..87ff37b719ee8 100644 --- a/frontend/src/toolbar/actions/ActionsToolbarMenu.tsx +++ b/frontend/src/toolbar/actions/ActionsToolbarMenu.tsx @@ -5,6 +5,7 @@ import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonInput } from 'lib/lemon-ui/LemonInput' import { Link } from 'lib/lemon-ui/Link' import { Spinner } from 'lib/lemon-ui/Spinner' +import { useEffect } from 'react' import { urls } from 'scenes/urls' import { ActionsEditingToolbarMenu } from '~/toolbar/actions/ActionsEditingToolbarMenu' @@ -16,13 +17,17 @@ import { toolbarConfigLogic } from '~/toolbar/toolbarConfigLogic' const ActionsListToolbarMenu = (): JSX.Element => { const { searchTerm } = useValues(actionsLogic) - const { setSearchTerm } = useActions(actionsLogic) + const { setSearchTerm, getActions } = useActions(actionsLogic) const { newAction } = useActions(actionsTabLogic) const { allActions, sortedActions, allActionsLoading } = useValues(actionsLogic) const { apiURL } = useValues(toolbarConfigLogic) + useEffect(() => { + getActions() + }, []) + return ( diff --git a/frontend/src/toolbar/bar/Toolbar.scss b/frontend/src/toolbar/bar/Toolbar.scss index ff83b7c9d922a..ec98167332fbd 100644 --- a/frontend/src/toolbar/bar/Toolbar.scss +++ b/frontend/src/toolbar/bar/Toolbar.scss @@ -144,8 +144,4 @@ transform: var(--toolbar-translate) scale(0); } } - - &--unauthenticated { - width: calc(5rem + 1px); // Account for border - } } diff --git a/frontend/src/toolbar/bar/Toolbar.tsx b/frontend/src/toolbar/bar/Toolbar.tsx index f255ccc26800d..8ed031f904d86 100644 --- a/frontend/src/toolbar/bar/Toolbar.tsx +++ b/frontend/src/toolbar/bar/Toolbar.tsx @@ -77,15 +77,18 @@ function MoreMenu(): JSX.Element { } maxContentWidth={true} > - } title="More options" /> + + + ) } -export function ToolbarInfoMenu(): JSX.Element { +export function ToolbarInfoMenu(): JSX.Element | null { const ref = useRef(null) const { visibleMenu, isDragging, menuProperties, minimized, isBlurred } = useValues(toolbarLogic) const { setMenu } = useActions(toolbarLogic) + const { isAuthenticated } = useValues(toolbarConfigLogic) const content = minimized ? null : visibleMenu === 'flags' ? ( @@ -102,6 +105,10 @@ export function ToolbarInfoMenu(): JSX.Element { return () => setMenu(null) }, [ref.current]) + if (!isAuthenticated) { + return null + } + return (
          } onClick={isAuthenticated ? toggleMinimized : authenticate} title={isAuthenticated ? 'Minimize' : 'Authenticate the PostHog Toolbar'} titleMinimized={isAuthenticated ? 'Expand the toolbar' : 'Authenticate the PostHog Toolbar'} - /> + > + + {isAuthenticated ? ( <> - } menuId="inspect" /> - } menuId="heatmap" /> - } menuId="actions" /> - } menuId="flags" title="Feature flags" /> + + + + + + + + + + + + - ) : null} + ) : ( + + Authenticate + + )}
          diff --git a/frontend/src/toolbar/bar/ToolbarButton.scss b/frontend/src/toolbar/bar/ToolbarButton.scss index 0d0bb666fa540..ce480f3fbab35 100644 --- a/frontend/src/toolbar/bar/ToolbarButton.scss +++ b/frontend/src/toolbar/bar/ToolbarButton.scss @@ -15,6 +15,8 @@ width: 2rem; height: 2rem; min-height: var(--lemon-button-height); + margin: 0.25rem; + font-weight: 600; color: var(--muted-alt); appearance: none !important; // Important as this gets overridden by Ant styles... cursor: pointer; @@ -43,4 +45,13 @@ } } } + + &--flex { + flex-grow: 1; + width: auto; + + button { + width: 100%; + } + } } diff --git a/frontend/src/toolbar/bar/ToolbarButton.tsx b/frontend/src/toolbar/bar/ToolbarButton.tsx index add0e5f2580ce..f5dfc755be469 100644 --- a/frontend/src/toolbar/bar/ToolbarButton.tsx +++ b/frontend/src/toolbar/bar/ToolbarButton.tsx @@ -10,17 +10,18 @@ import React from 'react' import { MenuState, toolbarLogic } from './toolbarLogic' export type ToolbarButtonProps = { - icon: React.ReactElement | null + children: React.ReactNode onClick?: () => void title?: string titleMinimized?: JSX.Element | string menuId?: MenuState + flex?: boolean } export const ToolbarButton: FunctionComponent = React.forwardRef< HTMLDivElement, ToolbarButtonProps ->(({ icon, title, onClick, titleMinimized, menuId, ...props }, ref): JSX.Element => { +>(({ children, title, onClick, titleMinimized, menuId, flex, ...props }, ref): JSX.Element => { const { visibleMenu, minimized, isDragging } = useValues(toolbarLogic) const { setVisibleMenu } = useActions(toolbarLogic) @@ -54,9 +55,13 @@ export const ToolbarButton: FunctionComponent = React.forwar } const theButton = ( -
          +
          ) diff --git a/frontend/src/toolbar/elements/heatmapLogic.ts b/frontend/src/toolbar/elements/heatmapLogic.ts index 8bb148b4f6adc..45ec141630420 100644 --- a/frontend/src/toolbar/elements/heatmapLogic.ts +++ b/frontend/src/toolbar/elements/heatmapLogic.ts @@ -24,7 +24,7 @@ const emptyElementsStatsPages: PaginatedResponse = { export const heatmapLogic = kea([ path(['toolbar', 'elements', 'heatmapLogic']), connect({ - values: [toolbarConfigLogic, ['apiURL'], currentPageLogic, ['href', 'wildcardHref']], + values: [currentPageLogic, ['href', 'wildcardHref']], actions: [currentPageLogic, ['setHref', 'setWildcardHref']], }), actions({ @@ -112,11 +112,8 @@ export const heatmapLogic = kea([ ], ...values.heatmapFilter, } - const includeEventsParams = '&include=$autocapture&include=$rageclick' - defaultUrl = `${values.apiURL}/api/element/stats/${encodeParams( - { ...params, paginate_response: true }, - '?' - )}${includeEventsParams}` + + defaultUrl = `/api/element/stats/${encodeParams({ ...params, paginate_response: true }, '?')}` } // toolbar fetch collapses queryparams but this URL has multiple with the same name @@ -124,7 +121,7 @@ export const heatmapLogic = kea([ url || defaultUrl, 'GET', undefined, - url ? 'use-as-provided' : 'only-add-token' + url ? 'use-as-provided' : 'full' ) if (response.status === 403) { diff --git a/frontend/src/toolbar/flags/flagsToolbarLogic.ts b/frontend/src/toolbar/flags/flagsToolbarLogic.ts index 60c1f568f45a6..e1f41cabca73c 100644 --- a/frontend/src/toolbar/flags/flagsToolbarLogic.ts +++ b/frontend/src/toolbar/flags/flagsToolbarLogic.ts @@ -40,11 +40,6 @@ export const flagsToolbarLogic = kea([ `/api/projects/@current/feature_flags/my_flags${encodeParams(params, '?')}` ) - if (response.status >= 400) { - toolbarConfigLogic.actions.tokenExpired() - return [] - } - breakpoint() if (!response.ok) { return [] diff --git a/frontend/src/toolbar/toolbarConfigLogic.ts b/frontend/src/toolbar/toolbarConfigLogic.ts index 1b4638b8f39f8..c0650d7552d55 100644 --- a/frontend/src/toolbar/toolbarConfigLogic.ts +++ b/frontend/src/toolbar/toolbarConfigLogic.ts @@ -6,7 +6,7 @@ import { posthog } from '~/toolbar/posthog' import { ToolbarProps } from '~/types' import type { toolbarConfigLogicType } from './toolbarConfigLogicType' -import { clearSessionToolbarToken } from './utils' +import { LOCALSTORAGE_KEY } from './utils' export const toolbarConfigLogic = kea([ path(['toolbar', 'toolbarConfigLogic']), @@ -19,48 +19,68 @@ export const toolbarConfigLogic = kea([ clearUserIntent: true, showButton: true, hideButton: true, + persistConfig: true, }), reducers(({ props }) => ({ - rawApiURL: [props.apiURL as string], - rawJsURL: [(props.jsURL || props.apiURL) as string], - temporaryToken: [props.temporaryToken || null, { logout: () => null, tokenExpired: () => null }], + // TRICKY: We cache a copy of the props. This allows us to connect the logic without passing the props in - only the top level caller has to do this. + props: [props], + temporaryToken: [ + props.temporaryToken || null, + { logout: () => null, tokenExpired: () => null, authenticate: () => null }, + ], actionId: [props.actionId || null, { logout: () => null, clearUserIntent: () => null }], userIntent: [props.userIntent || null, { logout: () => null, clearUserIntent: () => null }], - source: [props.source || null, { logout: () => null }], buttonVisible: [true, { showButton: () => true, hideButton: () => false, logout: () => false }], - dataAttributes: [props.dataAttributes || []], - posthog: [props.posthog ?? null], })), selectors({ - apiURL: [(s) => [s.rawApiURL], (apiURL) => `${apiURL.endsWith('/') ? apiURL.replace(/\/+$/, '') : apiURL}`], + posthog: [(s) => [s.props], (props) => props.posthog ?? null], + apiURL: [ + (s) => [s.props], + (props: ToolbarProps) => `${props.apiURL?.endsWith('/') ? props.apiURL.replace(/\/+$/, '') : props.apiURL}`, + ], jsURL: [ - (s) => [s.rawJsURL, s.apiURL], - (rawJsURL, apiUrl) => - `${rawJsURL ? (rawJsURL.endsWith('/') ? rawJsURL.replace(/\/+$/, '') : rawJsURL) : apiUrl}`, + (s) => [s.props, s.apiURL], + (props: ToolbarProps, apiUrl) => + `${props.jsURL ? (props.jsURL.endsWith('/') ? props.jsURL.replace(/\/+$/, '') : props.jsURL) : apiUrl}`, ], + dataAttributes: [(s) => [s.props], (props): string[] => props.dataAttributes ?? []], isAuthenticated: [(s) => [s.temporaryToken], (temporaryToken) => !!temporaryToken], }), - listeners(({ values }) => ({ + listeners(({ values, actions }) => ({ authenticate: () => { posthog.capture('toolbar authenticate', { is_authenticated: values.isAuthenticated }) const encodedUrl = encodeURIComponent(window.location.href) + actions.persistConfig() window.location.href = `${values.apiURL}/authorize_and_redirect/?redirect=${encodedUrl}` - clearSessionToolbarToken() }, logout: () => { posthog.capture('toolbar logout') - clearSessionToolbarToken() + localStorage.removeItem(LOCALSTORAGE_KEY) }, tokenExpired: () => { posthog.capture('toolbar token expired') console.warn('PostHog Toolbar API token expired. Clearing session.') - if (values.source !== 'localstorage') { + if (values.props.source !== 'localstorage') { lemonToast.error('PostHog Toolbar API token expired.') } - clearSessionToolbarToken() + actions.persistConfig() + }, + + persistConfig: () => { + // Most params we don't change, only those that we may have modified during the session + const toolbarParams: ToolbarProps = { + ...values.props, + temporaryToken: values.temporaryToken ?? undefined, + actionId: values.actionId ?? undefined, + userIntent: values.userIntent ?? undefined, + posthog: undefined, + featureFlags: undefined, + } + + localStorage.setItem(LOCALSTORAGE_KEY, JSON.stringify(toolbarParams)) }, })), @@ -83,12 +103,10 @@ export async function toolbarFetch( /* allows caller to control how the provided URL is altered before use if "full" then the payload and URL are taken apart and reconstructed - if "only-add-token" the URL is unchanged, the payload is not used - but the temporary token is added to the URL if "use-as-provided" then the URL is used as-is, and the payload is not used this is because the heatmapLogic needs more control over how the query parameters are constructed */ - urlConstruction: 'full' | 'only-add-token' | 'use-as-provided' = 'full' + urlConstruction: 'full' | 'use-as-provided' = 'full' ): Promise { const temporaryToken = toolbarConfigLogic.findMounted()?.values.temporaryToken const apiURL = toolbarConfigLogic.findMounted()?.values.apiURL @@ -96,8 +114,6 @@ export async function toolbarFetch( let fullUrl: string if (urlConstruction === 'use-as-provided') { fullUrl = url - } else if (urlConstruction === 'only-add-token') { - fullUrl = `${url}&temporary_token=${temporaryToken}` } else { const { pathname, searchParams } = combineUrl(url) const params = { ...searchParams, temporary_token: temporaryToken } @@ -119,10 +135,12 @@ export async function toolbarFetch( }) if (response.status === 403) { const responseData = await response.json() - // Do not try to authenticate if the user has no project access altogether - if (responseData.detail !== "You don't have access to the project.") { + if (responseData.detail === "You don't have access to the project.") { toolbarConfigLogic.actions.authenticate() } } + if (response.status == 401) { + toolbarConfigLogic.actions.tokenExpired() + } return response } diff --git a/frontend/src/toolbar/toolbarLogic.ts b/frontend/src/toolbar/toolbarLogic.ts deleted file mode 100644 index d5183a6734f20..0000000000000 --- a/frontend/src/toolbar/toolbarLogic.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { actions, afterMount, kea, listeners, path, props, reducers, selectors } from 'kea' -import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' - -import { actionsTabLogic } from '~/toolbar/actions/actionsTabLogic' -import { posthog } from '~/toolbar/posthog' -import { clearSessionToolbarToken } from '~/toolbar/utils' -import { ToolbarProps } from '~/types' - -import type { toolbarLogicType } from './toolbarLogicType' - -export const toolbarLogic = kea([ - path(['toolbar', 'toolbarLogic']), - props({} as ToolbarProps), - - actions({ - authenticate: true, - logout: true, - tokenExpired: true, - processUserIntent: true, - clearUserIntent: true, - showButton: true, - hideButton: true, - }), - - reducers(({ props }) => ({ - rawApiURL: [props.apiURL as string], - rawJsURL: [(props.jsURL || props.apiURL) as string], - temporaryToken: [props.temporaryToken || null, { logout: () => null, tokenExpired: () => null }], - actionId: [props.actionId || null, { logout: () => null, clearUserIntent: () => null }], - userIntent: [props.userIntent || null, { logout: () => null, clearUserIntent: () => null }], - source: [props.source || null, { logout: () => null }], - buttonVisible: [true, { showButton: () => true, hideButton: () => false, logout: () => false }], - dataAttributes: [props.dataAttributes || []], - posthog: [props.posthog ?? null], - })), - - selectors({ - apiURL: [(s) => [s.rawApiURL], (apiURL) => `${apiURL.endsWith('/') ? apiURL.replace(/\/+$/, '') : apiURL}`], - jsURL: [ - (s) => [s.rawJsURL, s.apiURL], - (rawJsURL, apiUrl) => - `${rawJsURL ? (rawJsURL.endsWith('/') ? rawJsURL.replace(/\/+$/, '') : rawJsURL) : apiUrl}`, - ], - isAuthenticated: [(s) => [s.temporaryToken], (temporaryToken) => !!temporaryToken], - }), - - listeners(({ values, props }) => ({ - authenticate: () => { - posthog.capture('toolbar authenticate', { is_authenticated: values.isAuthenticated }) - const encodedUrl = encodeURIComponent(window.location.href) - window.location.href = `${values.apiURL}/authorize_and_redirect/?redirect=${encodedUrl}` - clearSessionToolbarToken() - }, - logout: () => { - posthog.capture('toolbar logout') - clearSessionToolbarToken() - }, - tokenExpired: () => { - posthog.capture('toolbar token expired') - console.warn('PostHog Toolbar API token expired. Clearing session.') - if (values.source !== 'localstorage') { - lemonToast.error('PostHog Toolbar API token expired.') - } - clearSessionToolbarToken() - }, - processUserIntent: () => { - if (props.userIntent === 'add-action' || props.userIntent === 'edit-action') { - actionsTabLogic.actions.showButtonActions() - // the right view will next be opened in `actionsTabLogic` on `getActionsSuccess` - } - }, - })), - - afterMount(({ props, actions, values }) => { - if (props.instrument) { - const distinctId = props.distinctId - if (distinctId) { - posthog.identify(distinctId, props.userEmail ? { email: props.userEmail } : {}) - } - posthog.optIn() - } - if (props.userIntent) { - actions.processUserIntent() - } - posthog.capture('toolbar loaded', { is_authenticated: values.isAuthenticated }) - }), -]) diff --git a/frontend/src/toolbar/utils.ts b/frontend/src/toolbar/utils.ts index 20441d2d385a9..9e2e67b56231f 100644 --- a/frontend/src/toolbar/utils.ts +++ b/frontend/src/toolbar/utils.ts @@ -7,6 +7,7 @@ import { ActionStepForm, BoxColor, ElementRect } from '~/toolbar/types' import { ActionStepType, StringMatching } from '~/types' export const TOOLBAR_ID = '__POSTHOG_TOOLBAR__' +export const LOCALSTORAGE_KEY = '_postHogToolbarParams' export function getSafeText(el: HTMLElement): string { if (!el.childNodes || !el.childNodes.length) { @@ -328,14 +329,6 @@ export function stepToDatabaseFormat(step: ActionStepForm): ActionStepType { } } -export function clearSessionToolbarToken(): void { - window.sessionStorage?.removeItem('_postHogToolbarParams') - window.localStorage?.removeItem('_postHogToolbarParams') - // keeping these around for compatibility, should be eventually removed - window.sessionStorage?.removeItem('_postHogEditorParams') - window.localStorage?.removeItem('_postHogEditorParams') -} - export function getRectForElement(element: HTMLElement): ElementRect { const elements = [elementToAreaRect(element)] diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 2aaaba4809a87..a583fe34c26d2 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -672,7 +672,6 @@ export interface EventPropertyFilter extends BasePropertyFilter { export interface PersonPropertyFilter extends BasePropertyFilter { type: PropertyFilterType.Person operator: PropertyOperator - table?: string } export interface DataWarehousePropertyFilter extends BasePropertyFilter { @@ -903,8 +902,10 @@ export interface SessionRecordingsResponse { export type ErrorCluster = { cluster: number - sample: { session_id: string; error: string } + sample: string occurrences: number + session_ids: string[] + sparkline: Record unique_sessions: number viewed: number } @@ -1836,14 +1837,19 @@ export interface DatedAnnotationType extends Omit export enum ChartDisplayType { ActionsLineGraph = 'ActionsLineGraph', - ActionsLineGraphCumulative = 'ActionsLineGraphCumulative', + ActionsBar = 'ActionsBar', ActionsAreaGraph = 'ActionsAreaGraph', - ActionsTable = 'ActionsTable', + ActionsLineGraphCumulative = 'ActionsLineGraphCumulative', + BoldNumber = 'BoldNumber', ActionsPie = 'ActionsPie', - ActionsBar = 'ActionsBar', ActionsBarValue = 'ActionsBarValue', + ActionsTable = 'ActionsTable', WorldMap = 'WorldMap', - BoldNumber = 'BoldNumber', +} +export enum ChartDisplayCategory { + TimeSeries = 'TimeSeries', + CumulativeTimeSeries = 'CumulativeTimeSeries', + TotalValue = 'TotalValue', } export type BreakdownType = 'cohort' | 'person' | 'event' | 'group' | 'session' | 'hogql' | 'data_warehouse' @@ -2806,9 +2812,6 @@ export interface PropertyDefinition { verified?: boolean verified_at?: string verified_by?: string - - // For Data warehouse person properties - table?: string } export enum PropertyDefinitionState { @@ -2821,10 +2824,9 @@ export enum PropertyDefinitionState { export type Definition = EventDefinition | PropertyDefinition export interface PersonProperty { - id: string | number + id: number name: string count: number - table?: string } export type GroupTypeIndex = 0 | 1 | 2 | 3 | 4 @@ -3040,7 +3042,7 @@ interface BreadcrumbBase { /** Symbol, e.g. a lettermark or a profile picture. */ symbol?: React.ReactNode /** Whether to show a custom popover */ - popover?: Pick + popover?: Pick } interface LinkBreadcrumb extends BreadcrumbBase { /** Path to link to. */ @@ -3542,7 +3544,7 @@ export interface DataWarehouseViewLink { created_at?: string | null } -export type ExternalDataSourceType = 'Stripe' | 'Hubspot' | 'Postgres' +export type ExternalDataSourceType = 'Stripe' | 'Hubspot' | 'Postgres' | 'Zendesk' export interface ExternalDataSourceCreatePayload { source_type: ExternalDataSourceType @@ -3595,6 +3597,7 @@ export type BatchExportDestinationS3 = { encryption: string | null kms_key_id: string | null endpoint_url: string | null + file_format: string } } diff --git a/hogql_parser/HogQLParser.cpp b/hogql_parser/HogQLParser.cpp index f93862eec5c0d..de9386c795f09 100644 --- a/hogql_parser/HogQLParser.cpp +++ b/hogql_parser/HogQLParser.cpp @@ -127,7 +127,7 @@ void hogqlparserParserInitialize() { } ); static const int32_t serializedATNSegment[] = { - 4,1,242,979,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2, + 4,1,242,972,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2, 7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7, 14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7, 21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7, @@ -171,319 +171,317 @@ void hogqlparserParserInitialize() { 5,36,535,8,36,10,36,12,36,538,9,36,1,37,1,37,1,37,3,37,543,8,37,1,37, 1,37,1,37,1,37,1,37,4,37,550,8,37,11,37,12,37,551,1,37,1,37,3,37,556, 8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,3,37,587,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,3,37,604,8,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,37,1,37,3,37,616,8,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,3,37,626,8,37,1,37,3,37,629,8,37,1,37,1,37,3,37,633,8,37,1, - 37,3,37,636,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, - 37,3,37,649,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, - 37,1,37,1,37,1,37,1,37,3,37,666,8,37,1,37,1,37,3,37,670,8,37,1,37,1,37, - 1,37,1,37,3,37,676,8,37,1,37,1,37,1,37,1,37,1,37,3,37,683,8,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,695,8,37,1,37,1,37,3, - 37,699,8,37,1,37,3,37,702,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37, - 711,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 3,37,725,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 3,37,752,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,761,8,37,5,37,763, - 8,37,10,37,12,37,766,9,37,1,38,1,38,1,38,5,38,771,8,38,10,38,12,38,774, - 9,38,1,39,1,39,3,39,778,8,39,1,40,1,40,1,40,1,40,5,40,784,8,40,10,40, - 12,40,787,9,40,1,40,1,40,1,40,1,40,1,40,5,40,794,8,40,10,40,12,40,797, - 9,40,3,40,799,8,40,1,40,1,40,1,40,1,41,1,41,1,41,5,41,807,8,41,10,41, - 12,41,810,9,41,1,41,1,41,1,41,1,41,1,41,1,41,5,41,818,8,41,10,41,12,41, - 821,9,41,1,41,1,41,3,41,825,8,41,1,41,1,41,1,41,1,41,1,41,3,41,832,8, - 41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,3,42,845,8, - 42,1,43,1,43,1,43,5,43,850,8,43,10,43,12,43,853,9,43,1,44,1,44,1,44,1, - 44,1,44,1,44,1,44,1,44,1,44,1,44,3,44,865,8,44,1,45,1,45,1,45,1,45,3, - 45,871,8,45,1,45,3,45,874,8,45,1,46,1,46,1,46,5,46,879,8,46,10,46,12, - 46,882,9,46,1,47,1,47,1,47,1,47,1,47,1,47,1,47,1,47,1,47,3,47,893,8,47, - 1,47,1,47,1,47,1,47,3,47,899,8,47,5,47,901,8,47,10,47,12,47,904,9,47, - 1,48,1,48,1,48,3,48,909,8,48,1,48,1,48,1,49,1,49,1,49,3,49,916,8,49,1, - 49,1,49,1,50,1,50,1,50,5,50,923,8,50,10,50,12,50,926,9,50,1,51,1,51,1, - 52,1,52,1,52,1,52,1,52,1,52,3,52,936,8,52,3,52,938,8,52,1,53,3,53,941, - 8,53,1,53,1,53,1,53,1,53,1,53,1,53,3,53,949,8,53,1,54,1,54,1,54,3,54, - 954,8,54,1,55,1,55,1,56,1,56,1,57,1,57,1,58,1,58,3,58,964,8,58,1,59,1, - 59,1,59,3,59,969,8,59,1,60,1,60,1,60,1,60,1,61,1,61,1,61,1,61,1,61,0, - 3,36,74,94,62,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38, - 40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84, - 86,88,90,92,94,96,98,100,102,104,106,108,110,112,114,116,118,120,122, - 0,16,2,0,32,32,141,141,2,0,84,84,96,96,3,0,4,4,8,8,12,12,4,0,4,4,7,8, - 12,12,147,147,2,0,96,96,140,140,2,0,4,4,8,8,2,0,11,11,42,43,2,0,62,62, - 93,93,2,0,133,133,143,143,3,0,17,17,95,95,170,170,2,0,79,79,98,98,1,0, - 197,198,2,0,208,208,228,228,8,0,37,37,76,76,108,108,110,110,132,132,145, - 145,185,185,190,190,13,0,2,24,26,36,38,75,77,81,83,107,109,109,111,112, - 114,115,117,130,133,144,146,184,186,189,191,192,4,0,36,36,62,62,77,77, - 91,91,1107,0,127,1,0,0,0,2,131,1,0,0,0,4,146,1,0,0,0,6,149,1,0,0,0,8, - 198,1,0,0,0,10,201,1,0,0,0,12,207,1,0,0,0,14,211,1,0,0,0,16,217,1,0,0, - 0,18,235,1,0,0,0,20,238,1,0,0,0,22,241,1,0,0,0,24,251,1,0,0,0,26,254, - 1,0,0,0,28,258,1,0,0,0,30,291,1,0,0,0,32,293,1,0,0,0,34,296,1,0,0,0,36, - 311,1,0,0,0,38,373,1,0,0,0,40,378,1,0,0,0,42,389,1,0,0,0,44,391,1,0,0, - 0,46,397,1,0,0,0,48,405,1,0,0,0,50,423,1,0,0,0,52,425,1,0,0,0,54,433, - 1,0,0,0,56,438,1,0,0,0,58,446,1,0,0,0,60,450,1,0,0,0,62,454,1,0,0,0,64, - 463,1,0,0,0,66,477,1,0,0,0,68,479,1,0,0,0,70,529,1,0,0,0,72,531,1,0,0, - 0,74,669,1,0,0,0,76,767,1,0,0,0,78,777,1,0,0,0,80,798,1,0,0,0,82,831, - 1,0,0,0,84,844,1,0,0,0,86,846,1,0,0,0,88,864,1,0,0,0,90,873,1,0,0,0,92, - 875,1,0,0,0,94,892,1,0,0,0,96,905,1,0,0,0,98,915,1,0,0,0,100,919,1,0, - 0,0,102,927,1,0,0,0,104,937,1,0,0,0,106,940,1,0,0,0,108,953,1,0,0,0,110, - 955,1,0,0,0,112,957,1,0,0,0,114,959,1,0,0,0,116,963,1,0,0,0,118,968,1, - 0,0,0,120,970,1,0,0,0,122,974,1,0,0,0,124,128,3,2,1,0,125,128,3,6,3,0, - 126,128,3,82,41,0,127,124,1,0,0,0,127,125,1,0,0,0,127,126,1,0,0,0,128, - 129,1,0,0,0,129,130,5,0,0,1,130,1,1,0,0,0,131,137,3,4,2,0,132,133,5,176, - 0,0,133,134,5,4,0,0,134,136,3,4,2,0,135,132,1,0,0,0,136,139,1,0,0,0,137, - 135,1,0,0,0,137,138,1,0,0,0,138,3,1,0,0,0,139,137,1,0,0,0,140,147,3,6, - 3,0,141,142,5,220,0,0,142,143,3,2,1,0,143,144,5,236,0,0,144,147,1,0,0, - 0,145,147,3,122,61,0,146,140,1,0,0,0,146,141,1,0,0,0,146,145,1,0,0,0, - 147,5,1,0,0,0,148,150,3,8,4,0,149,148,1,0,0,0,149,150,1,0,0,0,150,151, - 1,0,0,0,151,153,5,146,0,0,152,154,5,49,0,0,153,152,1,0,0,0,153,154,1, - 0,0,0,154,156,1,0,0,0,155,157,3,10,5,0,156,155,1,0,0,0,156,157,1,0,0, - 0,157,158,1,0,0,0,158,160,3,72,36,0,159,161,3,12,6,0,160,159,1,0,0,0, - 160,161,1,0,0,0,161,163,1,0,0,0,162,164,3,14,7,0,163,162,1,0,0,0,163, - 164,1,0,0,0,164,166,1,0,0,0,165,167,3,18,9,0,166,165,1,0,0,0,166,167, - 1,0,0,0,167,169,1,0,0,0,168,170,3,20,10,0,169,168,1,0,0,0,169,170,1,0, - 0,0,170,172,1,0,0,0,171,173,3,22,11,0,172,171,1,0,0,0,172,173,1,0,0,0, - 173,176,1,0,0,0,174,175,5,189,0,0,175,177,7,0,0,0,176,174,1,0,0,0,176, - 177,1,0,0,0,177,180,1,0,0,0,178,179,5,189,0,0,179,181,5,169,0,0,180,178, - 1,0,0,0,180,181,1,0,0,0,181,183,1,0,0,0,182,184,3,24,12,0,183,182,1,0, - 0,0,183,184,1,0,0,0,184,186,1,0,0,0,185,187,3,16,8,0,186,185,1,0,0,0, - 186,187,1,0,0,0,187,189,1,0,0,0,188,190,3,26,13,0,189,188,1,0,0,0,189, - 190,1,0,0,0,190,193,1,0,0,0,191,194,3,30,15,0,192,194,3,32,16,0,193,191, - 1,0,0,0,193,192,1,0,0,0,193,194,1,0,0,0,194,196,1,0,0,0,195,197,3,34, - 17,0,196,195,1,0,0,0,196,197,1,0,0,0,197,7,1,0,0,0,198,199,5,189,0,0, - 199,200,3,86,43,0,200,9,1,0,0,0,201,202,5,168,0,0,202,205,5,198,0,0,203, - 204,5,189,0,0,204,206,5,164,0,0,205,203,1,0,0,0,205,206,1,0,0,0,206,11, - 1,0,0,0,207,208,5,68,0,0,208,209,3,36,18,0,209,13,1,0,0,0,210,212,7,1, - 0,0,211,210,1,0,0,0,211,212,1,0,0,0,212,213,1,0,0,0,213,214,5,9,0,0,214, - 215,5,90,0,0,215,216,3,72,36,0,216,15,1,0,0,0,217,218,5,188,0,0,218,219, - 3,118,59,0,219,220,5,10,0,0,220,221,5,220,0,0,221,222,3,56,28,0,222,232, - 5,236,0,0,223,224,5,206,0,0,224,225,3,118,59,0,225,226,5,10,0,0,226,227, - 5,220,0,0,227,228,3,56,28,0,228,229,5,236,0,0,229,231,1,0,0,0,230,223, - 1,0,0,0,231,234,1,0,0,0,232,230,1,0,0,0,232,233,1,0,0,0,233,17,1,0,0, - 0,234,232,1,0,0,0,235,236,5,129,0,0,236,237,3,74,37,0,237,19,1,0,0,0, - 238,239,5,187,0,0,239,240,3,74,37,0,240,21,1,0,0,0,241,242,5,73,0,0,242, - 249,5,18,0,0,243,244,7,0,0,0,244,245,5,220,0,0,245,246,3,72,36,0,246, - 247,5,236,0,0,247,250,1,0,0,0,248,250,3,72,36,0,249,243,1,0,0,0,249,248, - 1,0,0,0,250,23,1,0,0,0,251,252,5,74,0,0,252,253,3,74,37,0,253,25,1,0, - 0,0,254,255,5,122,0,0,255,256,5,18,0,0,256,257,3,46,23,0,257,27,1,0,0, - 0,258,259,5,122,0,0,259,260,5,18,0,0,260,261,3,72,36,0,261,29,1,0,0,0, - 262,263,5,99,0,0,263,266,3,74,37,0,264,265,5,206,0,0,265,267,3,74,37, - 0,266,264,1,0,0,0,266,267,1,0,0,0,267,272,1,0,0,0,268,269,5,189,0,0,269, - 273,5,164,0,0,270,271,5,18,0,0,271,273,3,72,36,0,272,268,1,0,0,0,272, - 270,1,0,0,0,272,273,1,0,0,0,273,292,1,0,0,0,274,275,5,99,0,0,275,278, - 3,74,37,0,276,277,5,189,0,0,277,279,5,164,0,0,278,276,1,0,0,0,278,279, - 1,0,0,0,279,280,1,0,0,0,280,281,5,118,0,0,281,282,3,74,37,0,282,292,1, - 0,0,0,283,284,5,99,0,0,284,285,3,74,37,0,285,286,5,118,0,0,286,289,3, - 74,37,0,287,288,5,18,0,0,288,290,3,72,36,0,289,287,1,0,0,0,289,290,1, - 0,0,0,290,292,1,0,0,0,291,262,1,0,0,0,291,274,1,0,0,0,291,283,1,0,0,0, - 292,31,1,0,0,0,293,294,5,118,0,0,294,295,3,74,37,0,295,33,1,0,0,0,296, - 297,5,150,0,0,297,298,3,52,26,0,298,35,1,0,0,0,299,300,6,18,-1,0,300, - 302,3,94,47,0,301,303,5,61,0,0,302,301,1,0,0,0,302,303,1,0,0,0,303,305, - 1,0,0,0,304,306,3,44,22,0,305,304,1,0,0,0,305,306,1,0,0,0,306,312,1,0, - 0,0,307,308,5,220,0,0,308,309,3,36,18,0,309,310,5,236,0,0,310,312,1,0, - 0,0,311,299,1,0,0,0,311,307,1,0,0,0,312,327,1,0,0,0,313,314,10,3,0,0, - 314,315,3,40,20,0,315,316,3,36,18,4,316,326,1,0,0,0,317,319,10,4,0,0, - 318,320,3,38,19,0,319,318,1,0,0,0,319,320,1,0,0,0,320,321,1,0,0,0,321, - 322,5,90,0,0,322,323,3,36,18,0,323,324,3,42,21,0,324,326,1,0,0,0,325, - 313,1,0,0,0,325,317,1,0,0,0,326,329,1,0,0,0,327,325,1,0,0,0,327,328,1, - 0,0,0,328,37,1,0,0,0,329,327,1,0,0,0,330,332,7,2,0,0,331,330,1,0,0,0, - 331,332,1,0,0,0,332,333,1,0,0,0,333,340,5,84,0,0,334,336,5,84,0,0,335, - 337,7,2,0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,340,1,0,0,0,338,340,7, - 2,0,0,339,331,1,0,0,0,339,334,1,0,0,0,339,338,1,0,0,0,340,374,1,0,0,0, - 341,343,7,3,0,0,342,341,1,0,0,0,342,343,1,0,0,0,343,344,1,0,0,0,344,346, - 7,4,0,0,345,347,5,123,0,0,346,345,1,0,0,0,346,347,1,0,0,0,347,356,1,0, - 0,0,348,350,7,4,0,0,349,351,5,123,0,0,350,349,1,0,0,0,350,351,1,0,0,0, - 351,353,1,0,0,0,352,354,7,3,0,0,353,352,1,0,0,0,353,354,1,0,0,0,354,356, - 1,0,0,0,355,342,1,0,0,0,355,348,1,0,0,0,356,374,1,0,0,0,357,359,7,5,0, - 0,358,357,1,0,0,0,358,359,1,0,0,0,359,360,1,0,0,0,360,362,5,69,0,0,361, - 363,5,123,0,0,362,361,1,0,0,0,362,363,1,0,0,0,363,372,1,0,0,0,364,366, - 5,69,0,0,365,367,5,123,0,0,366,365,1,0,0,0,366,367,1,0,0,0,367,369,1, - 0,0,0,368,370,7,5,0,0,369,368,1,0,0,0,369,370,1,0,0,0,370,372,1,0,0,0, - 371,358,1,0,0,0,371,364,1,0,0,0,372,374,1,0,0,0,373,339,1,0,0,0,373,355, - 1,0,0,0,373,371,1,0,0,0,374,39,1,0,0,0,375,376,5,31,0,0,376,379,5,90, - 0,0,377,379,5,206,0,0,378,375,1,0,0,0,378,377,1,0,0,0,379,41,1,0,0,0, - 380,381,5,119,0,0,381,390,3,72,36,0,382,383,5,179,0,0,383,384,5,220,0, - 0,384,385,3,72,36,0,385,386,5,236,0,0,386,390,1,0,0,0,387,388,5,179,0, - 0,388,390,3,72,36,0,389,380,1,0,0,0,389,382,1,0,0,0,389,387,1,0,0,0,390, - 43,1,0,0,0,391,392,5,144,0,0,392,395,3,50,25,0,393,394,5,118,0,0,394, - 396,3,50,25,0,395,393,1,0,0,0,395,396,1,0,0,0,396,45,1,0,0,0,397,402, - 3,48,24,0,398,399,5,206,0,0,399,401,3,48,24,0,400,398,1,0,0,0,401,404, - 1,0,0,0,402,400,1,0,0,0,402,403,1,0,0,0,403,47,1,0,0,0,404,402,1,0,0, - 0,405,407,3,74,37,0,406,408,7,6,0,0,407,406,1,0,0,0,407,408,1,0,0,0,408, - 411,1,0,0,0,409,410,5,117,0,0,410,412,7,7,0,0,411,409,1,0,0,0,411,412, - 1,0,0,0,412,415,1,0,0,0,413,414,5,26,0,0,414,416,5,200,0,0,415,413,1, - 0,0,0,415,416,1,0,0,0,416,49,1,0,0,0,417,424,3,122,61,0,418,421,3,106, - 53,0,419,420,5,238,0,0,420,422,3,106,53,0,421,419,1,0,0,0,421,422,1,0, - 0,0,422,424,1,0,0,0,423,417,1,0,0,0,423,418,1,0,0,0,424,51,1,0,0,0,425, - 430,3,54,27,0,426,427,5,206,0,0,427,429,3,54,27,0,428,426,1,0,0,0,429, - 432,1,0,0,0,430,428,1,0,0,0,430,431,1,0,0,0,431,53,1,0,0,0,432,430,1, - 0,0,0,433,434,3,118,59,0,434,435,5,212,0,0,435,436,3,108,54,0,436,55, - 1,0,0,0,437,439,3,58,29,0,438,437,1,0,0,0,438,439,1,0,0,0,439,441,1,0, - 0,0,440,442,3,60,30,0,441,440,1,0,0,0,441,442,1,0,0,0,442,444,1,0,0,0, - 443,445,3,62,31,0,444,443,1,0,0,0,444,445,1,0,0,0,445,57,1,0,0,0,446, - 447,5,126,0,0,447,448,5,18,0,0,448,449,3,72,36,0,449,59,1,0,0,0,450,451, - 5,122,0,0,451,452,5,18,0,0,452,453,3,46,23,0,453,61,1,0,0,0,454,455,7, - 8,0,0,455,456,3,64,32,0,456,63,1,0,0,0,457,464,3,66,33,0,458,459,5,16, - 0,0,459,460,3,66,33,0,460,461,5,6,0,0,461,462,3,66,33,0,462,464,1,0,0, - 0,463,457,1,0,0,0,463,458,1,0,0,0,464,65,1,0,0,0,465,466,5,33,0,0,466, - 478,5,142,0,0,467,468,5,175,0,0,468,478,5,128,0,0,469,470,5,175,0,0,470, - 478,5,64,0,0,471,472,3,106,53,0,472,473,5,128,0,0,473,478,1,0,0,0,474, - 475,3,106,53,0,475,476,5,64,0,0,476,478,1,0,0,0,477,465,1,0,0,0,477,467, - 1,0,0,0,477,469,1,0,0,0,477,471,1,0,0,0,477,474,1,0,0,0,478,67,1,0,0, - 0,479,480,3,74,37,0,480,481,5,0,0,1,481,69,1,0,0,0,482,530,3,118,59,0, - 483,484,3,118,59,0,484,485,5,220,0,0,485,486,3,118,59,0,486,493,3,70, - 35,0,487,488,5,206,0,0,488,489,3,118,59,0,489,490,3,70,35,0,490,492,1, - 0,0,0,491,487,1,0,0,0,492,495,1,0,0,0,493,491,1,0,0,0,493,494,1,0,0,0, - 494,496,1,0,0,0,495,493,1,0,0,0,496,497,5,236,0,0,497,530,1,0,0,0,498, - 499,3,118,59,0,499,500,5,220,0,0,500,505,3,120,60,0,501,502,5,206,0,0, - 502,504,3,120,60,0,503,501,1,0,0,0,504,507,1,0,0,0,505,503,1,0,0,0,505, - 506,1,0,0,0,506,508,1,0,0,0,507,505,1,0,0,0,508,509,5,236,0,0,509,530, - 1,0,0,0,510,511,3,118,59,0,511,512,5,220,0,0,512,517,3,70,35,0,513,514, - 5,206,0,0,514,516,3,70,35,0,515,513,1,0,0,0,516,519,1,0,0,0,517,515,1, - 0,0,0,517,518,1,0,0,0,518,520,1,0,0,0,519,517,1,0,0,0,520,521,5,236,0, - 0,521,530,1,0,0,0,522,523,3,118,59,0,523,525,5,220,0,0,524,526,3,72,36, - 0,525,524,1,0,0,0,525,526,1,0,0,0,526,527,1,0,0,0,527,528,5,236,0,0,528, - 530,1,0,0,0,529,482,1,0,0,0,529,483,1,0,0,0,529,498,1,0,0,0,529,510,1, - 0,0,0,529,522,1,0,0,0,530,71,1,0,0,0,531,536,3,74,37,0,532,533,5,206, - 0,0,533,535,3,74,37,0,534,532,1,0,0,0,535,538,1,0,0,0,536,534,1,0,0,0, - 536,537,1,0,0,0,537,73,1,0,0,0,538,536,1,0,0,0,539,540,6,37,-1,0,540, - 542,5,19,0,0,541,543,3,74,37,0,542,541,1,0,0,0,542,543,1,0,0,0,543,549, - 1,0,0,0,544,545,5,186,0,0,545,546,3,74,37,0,546,547,5,163,0,0,547,548, - 3,74,37,0,548,550,1,0,0,0,549,544,1,0,0,0,550,551,1,0,0,0,551,549,1,0, - 0,0,551,552,1,0,0,0,552,555,1,0,0,0,553,554,5,52,0,0,554,556,3,74,37, - 0,555,553,1,0,0,0,555,556,1,0,0,0,556,557,1,0,0,0,557,558,5,53,0,0,558, - 670,1,0,0,0,559,560,5,20,0,0,560,561,5,220,0,0,561,562,3,74,37,0,562, - 563,5,10,0,0,563,564,3,70,35,0,564,565,5,236,0,0,565,670,1,0,0,0,566, - 567,5,36,0,0,567,670,5,200,0,0,568,569,5,59,0,0,569,570,5,220,0,0,570, - 571,3,110,55,0,571,572,5,68,0,0,572,573,3,74,37,0,573,574,5,236,0,0,574, - 670,1,0,0,0,575,576,5,86,0,0,576,577,3,74,37,0,577,578,3,110,55,0,578, - 670,1,0,0,0,579,580,5,155,0,0,580,581,5,220,0,0,581,582,3,74,37,0,582, - 583,5,68,0,0,583,586,3,74,37,0,584,585,5,65,0,0,585,587,3,74,37,0,586, - 584,1,0,0,0,586,587,1,0,0,0,587,588,1,0,0,0,588,589,5,236,0,0,589,670, - 1,0,0,0,590,591,5,166,0,0,591,670,5,200,0,0,592,593,5,171,0,0,593,594, - 5,220,0,0,594,595,7,9,0,0,595,596,5,200,0,0,596,597,5,68,0,0,597,598, - 3,74,37,0,598,599,5,236,0,0,599,670,1,0,0,0,600,601,3,118,59,0,601,603, - 5,220,0,0,602,604,3,72,36,0,603,602,1,0,0,0,603,604,1,0,0,0,604,605,1, - 0,0,0,605,606,5,236,0,0,606,607,1,0,0,0,607,608,5,125,0,0,608,609,5,220, - 0,0,609,610,3,56,28,0,610,611,5,236,0,0,611,670,1,0,0,0,612,613,3,118, - 59,0,613,615,5,220,0,0,614,616,3,72,36,0,615,614,1,0,0,0,615,616,1,0, - 0,0,616,617,1,0,0,0,617,618,5,236,0,0,618,619,1,0,0,0,619,620,5,125,0, - 0,620,621,3,118,59,0,621,670,1,0,0,0,622,628,3,118,59,0,623,625,5,220, - 0,0,624,626,3,72,36,0,625,624,1,0,0,0,625,626,1,0,0,0,626,627,1,0,0,0, - 627,629,5,236,0,0,628,623,1,0,0,0,628,629,1,0,0,0,629,630,1,0,0,0,630, - 632,5,220,0,0,631,633,5,49,0,0,632,631,1,0,0,0,632,633,1,0,0,0,633,635, - 1,0,0,0,634,636,3,76,38,0,635,634,1,0,0,0,635,636,1,0,0,0,636,637,1,0, - 0,0,637,638,5,236,0,0,638,670,1,0,0,0,639,670,3,82,41,0,640,670,3,108, - 54,0,641,642,5,208,0,0,642,670,3,74,37,18,643,644,5,115,0,0,644,670,3, - 74,37,12,645,646,3,98,49,0,646,647,5,210,0,0,647,649,1,0,0,0,648,645, - 1,0,0,0,648,649,1,0,0,0,649,650,1,0,0,0,650,670,5,202,0,0,651,652,5,220, - 0,0,652,653,3,2,1,0,653,654,5,236,0,0,654,670,1,0,0,0,655,656,5,220,0, - 0,656,657,3,74,37,0,657,658,5,236,0,0,658,670,1,0,0,0,659,660,5,220,0, - 0,660,661,3,72,36,0,661,662,5,236,0,0,662,670,1,0,0,0,663,665,5,219,0, - 0,664,666,3,72,36,0,665,664,1,0,0,0,665,666,1,0,0,0,666,667,1,0,0,0,667, - 670,5,235,0,0,668,670,3,90,45,0,669,539,1,0,0,0,669,559,1,0,0,0,669,566, - 1,0,0,0,669,568,1,0,0,0,669,575,1,0,0,0,669,579,1,0,0,0,669,590,1,0,0, - 0,669,592,1,0,0,0,669,600,1,0,0,0,669,612,1,0,0,0,669,622,1,0,0,0,669, - 639,1,0,0,0,669,640,1,0,0,0,669,641,1,0,0,0,669,643,1,0,0,0,669,648,1, - 0,0,0,669,651,1,0,0,0,669,655,1,0,0,0,669,659,1,0,0,0,669,663,1,0,0,0, - 669,668,1,0,0,0,670,764,1,0,0,0,671,675,10,17,0,0,672,676,5,202,0,0,673, - 676,5,238,0,0,674,676,5,227,0,0,675,672,1,0,0,0,675,673,1,0,0,0,675,674, - 1,0,0,0,676,677,1,0,0,0,677,763,3,74,37,18,678,682,10,16,0,0,679,683, - 5,228,0,0,680,683,5,208,0,0,681,683,5,207,0,0,682,679,1,0,0,0,682,680, - 1,0,0,0,682,681,1,0,0,0,683,684,1,0,0,0,684,763,3,74,37,17,685,710,10, - 15,0,0,686,711,5,211,0,0,687,711,5,212,0,0,688,711,5,223,0,0,689,711, - 5,221,0,0,690,711,5,222,0,0,691,711,5,213,0,0,692,711,5,214,0,0,693,695, - 5,115,0,0,694,693,1,0,0,0,694,695,1,0,0,0,695,696,1,0,0,0,696,698,5,80, - 0,0,697,699,5,25,0,0,698,697,1,0,0,0,698,699,1,0,0,0,699,711,1,0,0,0, - 700,702,5,115,0,0,701,700,1,0,0,0,701,702,1,0,0,0,702,703,1,0,0,0,703, - 711,7,10,0,0,704,711,5,232,0,0,705,711,5,233,0,0,706,711,5,225,0,0,707, - 711,5,216,0,0,708,711,5,217,0,0,709,711,5,224,0,0,710,686,1,0,0,0,710, - 687,1,0,0,0,710,688,1,0,0,0,710,689,1,0,0,0,710,690,1,0,0,0,710,691,1, - 0,0,0,710,692,1,0,0,0,710,694,1,0,0,0,710,701,1,0,0,0,710,704,1,0,0,0, - 710,705,1,0,0,0,710,706,1,0,0,0,710,707,1,0,0,0,710,708,1,0,0,0,710,709, - 1,0,0,0,711,712,1,0,0,0,712,763,3,74,37,16,713,714,10,13,0,0,714,715, - 5,226,0,0,715,763,3,74,37,14,716,717,10,11,0,0,717,718,5,6,0,0,718,763, - 3,74,37,12,719,720,10,10,0,0,720,721,5,121,0,0,721,763,3,74,37,11,722, - 724,10,9,0,0,723,725,5,115,0,0,724,723,1,0,0,0,724,725,1,0,0,0,725,726, - 1,0,0,0,726,727,5,16,0,0,727,728,3,74,37,0,728,729,5,6,0,0,729,730,3, - 74,37,10,730,763,1,0,0,0,731,732,10,8,0,0,732,733,5,229,0,0,733,734,3, - 74,37,0,734,735,5,205,0,0,735,736,3,74,37,8,736,763,1,0,0,0,737,738,10, - 21,0,0,738,739,5,219,0,0,739,740,3,74,37,0,740,741,5,235,0,0,741,763, - 1,0,0,0,742,743,10,20,0,0,743,744,5,210,0,0,744,763,5,198,0,0,745,746, - 10,19,0,0,746,747,5,210,0,0,747,763,3,118,59,0,748,749,10,14,0,0,749, - 751,5,88,0,0,750,752,5,115,0,0,751,750,1,0,0,0,751,752,1,0,0,0,752,753, - 1,0,0,0,753,763,5,116,0,0,754,760,10,7,0,0,755,761,3,116,58,0,756,757, - 5,10,0,0,757,761,3,118,59,0,758,759,5,10,0,0,759,761,5,200,0,0,760,755, - 1,0,0,0,760,756,1,0,0,0,760,758,1,0,0,0,761,763,1,0,0,0,762,671,1,0,0, - 0,762,678,1,0,0,0,762,685,1,0,0,0,762,713,1,0,0,0,762,716,1,0,0,0,762, - 719,1,0,0,0,762,722,1,0,0,0,762,731,1,0,0,0,762,737,1,0,0,0,762,742,1, - 0,0,0,762,745,1,0,0,0,762,748,1,0,0,0,762,754,1,0,0,0,763,766,1,0,0,0, - 764,762,1,0,0,0,764,765,1,0,0,0,765,75,1,0,0,0,766,764,1,0,0,0,767,772, - 3,78,39,0,768,769,5,206,0,0,769,771,3,78,39,0,770,768,1,0,0,0,771,774, - 1,0,0,0,772,770,1,0,0,0,772,773,1,0,0,0,773,77,1,0,0,0,774,772,1,0,0, - 0,775,778,3,80,40,0,776,778,3,74,37,0,777,775,1,0,0,0,777,776,1,0,0,0, - 778,79,1,0,0,0,779,780,5,220,0,0,780,785,3,118,59,0,781,782,5,206,0,0, - 782,784,3,118,59,0,783,781,1,0,0,0,784,787,1,0,0,0,785,783,1,0,0,0,785, - 786,1,0,0,0,786,788,1,0,0,0,787,785,1,0,0,0,788,789,5,236,0,0,789,799, - 1,0,0,0,790,795,3,118,59,0,791,792,5,206,0,0,792,794,3,118,59,0,793,791, - 1,0,0,0,794,797,1,0,0,0,795,793,1,0,0,0,795,796,1,0,0,0,796,799,1,0,0, - 0,797,795,1,0,0,0,798,779,1,0,0,0,798,790,1,0,0,0,799,800,1,0,0,0,800, - 801,5,201,0,0,801,802,3,74,37,0,802,81,1,0,0,0,803,804,5,222,0,0,804, - 808,3,118,59,0,805,807,3,84,42,0,806,805,1,0,0,0,807,810,1,0,0,0,808, - 806,1,0,0,0,808,809,1,0,0,0,809,811,1,0,0,0,810,808,1,0,0,0,811,812,5, - 238,0,0,812,813,5,214,0,0,813,832,1,0,0,0,814,815,5,222,0,0,815,819,3, - 118,59,0,816,818,3,84,42,0,817,816,1,0,0,0,818,821,1,0,0,0,819,817,1, - 0,0,0,819,820,1,0,0,0,820,822,1,0,0,0,821,819,1,0,0,0,822,824,5,214,0, - 0,823,825,3,82,41,0,824,823,1,0,0,0,824,825,1,0,0,0,825,826,1,0,0,0,826, - 827,5,222,0,0,827,828,5,238,0,0,828,829,3,118,59,0,829,830,5,214,0,0, - 830,832,1,0,0,0,831,803,1,0,0,0,831,814,1,0,0,0,832,83,1,0,0,0,833,834, - 3,118,59,0,834,835,5,212,0,0,835,836,5,200,0,0,836,845,1,0,0,0,837,838, - 3,118,59,0,838,839,5,212,0,0,839,840,5,218,0,0,840,841,3,74,37,0,841, - 842,5,234,0,0,842,845,1,0,0,0,843,845,3,118,59,0,844,833,1,0,0,0,844, - 837,1,0,0,0,844,843,1,0,0,0,845,85,1,0,0,0,846,851,3,88,44,0,847,848, - 5,206,0,0,848,850,3,88,44,0,849,847,1,0,0,0,850,853,1,0,0,0,851,849,1, - 0,0,0,851,852,1,0,0,0,852,87,1,0,0,0,853,851,1,0,0,0,854,855,3,118,59, - 0,855,856,5,10,0,0,856,857,5,220,0,0,857,858,3,2,1,0,858,859,5,236,0, - 0,859,865,1,0,0,0,860,861,3,74,37,0,861,862,5,10,0,0,862,863,3,118,59, - 0,863,865,1,0,0,0,864,854,1,0,0,0,864,860,1,0,0,0,865,89,1,0,0,0,866, - 874,3,122,61,0,867,868,3,98,49,0,868,869,5,210,0,0,869,871,1,0,0,0,870, - 867,1,0,0,0,870,871,1,0,0,0,871,872,1,0,0,0,872,874,3,92,46,0,873,866, - 1,0,0,0,873,870,1,0,0,0,874,91,1,0,0,0,875,880,3,118,59,0,876,877,5,210, - 0,0,877,879,3,118,59,0,878,876,1,0,0,0,879,882,1,0,0,0,880,878,1,0,0, - 0,880,881,1,0,0,0,881,93,1,0,0,0,882,880,1,0,0,0,883,884,6,47,-1,0,884, - 893,3,98,49,0,885,893,3,96,48,0,886,887,5,220,0,0,887,888,3,2,1,0,888, - 889,5,236,0,0,889,893,1,0,0,0,890,893,3,82,41,0,891,893,3,122,61,0,892, - 883,1,0,0,0,892,885,1,0,0,0,892,886,1,0,0,0,892,890,1,0,0,0,892,891,1, - 0,0,0,893,902,1,0,0,0,894,898,10,3,0,0,895,899,3,116,58,0,896,897,5,10, - 0,0,897,899,3,118,59,0,898,895,1,0,0,0,898,896,1,0,0,0,899,901,1,0,0, - 0,900,894,1,0,0,0,901,904,1,0,0,0,902,900,1,0,0,0,902,903,1,0,0,0,903, - 95,1,0,0,0,904,902,1,0,0,0,905,906,3,118,59,0,906,908,5,220,0,0,907,909, - 3,100,50,0,908,907,1,0,0,0,908,909,1,0,0,0,909,910,1,0,0,0,910,911,5, - 236,0,0,911,97,1,0,0,0,912,913,3,102,51,0,913,914,5,210,0,0,914,916,1, - 0,0,0,915,912,1,0,0,0,915,916,1,0,0,0,916,917,1,0,0,0,917,918,3,118,59, - 0,918,99,1,0,0,0,919,924,3,74,37,0,920,921,5,206,0,0,921,923,3,74,37, - 0,922,920,1,0,0,0,923,926,1,0,0,0,924,922,1,0,0,0,924,925,1,0,0,0,925, - 101,1,0,0,0,926,924,1,0,0,0,927,928,3,118,59,0,928,103,1,0,0,0,929,938, - 5,196,0,0,930,931,5,210,0,0,931,938,7,11,0,0,932,933,5,198,0,0,933,935, - 5,210,0,0,934,936,7,11,0,0,935,934,1,0,0,0,935,936,1,0,0,0,936,938,1, - 0,0,0,937,929,1,0,0,0,937,930,1,0,0,0,937,932,1,0,0,0,938,105,1,0,0,0, - 939,941,7,12,0,0,940,939,1,0,0,0,940,941,1,0,0,0,941,948,1,0,0,0,942, - 949,3,104,52,0,943,949,5,197,0,0,944,949,5,198,0,0,945,949,5,199,0,0, - 946,949,5,82,0,0,947,949,5,113,0,0,948,942,1,0,0,0,948,943,1,0,0,0,948, - 944,1,0,0,0,948,945,1,0,0,0,948,946,1,0,0,0,948,947,1,0,0,0,949,107,1, - 0,0,0,950,954,3,106,53,0,951,954,5,200,0,0,952,954,5,116,0,0,953,950, - 1,0,0,0,953,951,1,0,0,0,953,952,1,0,0,0,954,109,1,0,0,0,955,956,7,13, - 0,0,956,111,1,0,0,0,957,958,7,14,0,0,958,113,1,0,0,0,959,960,7,15,0,0, - 960,115,1,0,0,0,961,964,5,195,0,0,962,964,3,114,57,0,963,961,1,0,0,0, - 963,962,1,0,0,0,964,117,1,0,0,0,965,969,5,195,0,0,966,969,3,110,55,0, - 967,969,3,112,56,0,968,965,1,0,0,0,968,966,1,0,0,0,968,967,1,0,0,0,969, - 119,1,0,0,0,970,971,5,200,0,0,971,972,5,212,0,0,972,973,3,106,53,0,973, - 121,1,0,0,0,974,975,5,218,0,0,975,976,3,118,59,0,976,977,5,234,0,0,977, - 123,1,0,0,0,120,127,137,146,149,153,156,160,163,166,169,172,176,180,183, - 186,189,193,196,205,211,232,249,266,272,278,289,291,302,305,311,319,325, - 327,331,336,339,342,346,350,353,355,358,362,366,369,371,373,378,389,395, - 402,407,411,415,421,423,430,438,441,444,463,477,493,505,517,525,529,536, - 542,551,555,586,603,615,625,628,632,635,648,665,669,675,682,694,698,701, - 710,724,751,760,762,764,772,777,785,795,798,808,819,824,831,844,851,864, - 870,873,880,892,898,902,908,915,924,935,937,940,948,953,963,968 + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,580,8,37,1,37,1,37, + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37, + 597,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,609,8, + 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,619,8,37,1,37,3,37,622, + 8,37,1,37,1,37,3,37,626,8,37,1,37,3,37,629,8,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,642,8,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,659,8,37,1, + 37,1,37,3,37,663,8,37,1,37,1,37,1,37,1,37,3,37,669,8,37,1,37,1,37,1,37, + 1,37,1,37,3,37,676,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, + 1,37,3,37,688,8,37,1,37,1,37,3,37,692,8,37,1,37,3,37,695,8,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,3,37,704,8,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,718,8,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,745,8,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,3,37,754,8,37,5,37,756,8,37,10,37,12,37,759,9,37,1,38,1, + 38,1,38,5,38,764,8,38,10,38,12,38,767,9,38,1,39,1,39,3,39,771,8,39,1, + 40,1,40,1,40,1,40,5,40,777,8,40,10,40,12,40,780,9,40,1,40,1,40,1,40,1, + 40,1,40,5,40,787,8,40,10,40,12,40,790,9,40,3,40,792,8,40,1,40,1,40,1, + 40,1,41,1,41,1,41,5,41,800,8,41,10,41,12,41,803,9,41,1,41,1,41,1,41,1, + 41,1,41,1,41,5,41,811,8,41,10,41,12,41,814,9,41,1,41,1,41,3,41,818,8, + 41,1,41,1,41,1,41,1,41,1,41,3,41,825,8,41,1,42,1,42,1,42,1,42,1,42,1, + 42,1,42,1,42,1,42,1,42,1,42,3,42,838,8,42,1,43,1,43,1,43,5,43,843,8,43, + 10,43,12,43,846,9,43,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44, + 3,44,858,8,44,1,45,1,45,1,45,1,45,3,45,864,8,45,1,45,3,45,867,8,45,1, + 46,1,46,1,46,5,46,872,8,46,10,46,12,46,875,9,46,1,47,1,47,1,47,1,47,1, + 47,1,47,1,47,1,47,1,47,3,47,886,8,47,1,47,1,47,1,47,1,47,3,47,892,8,47, + 5,47,894,8,47,10,47,12,47,897,9,47,1,48,1,48,1,48,3,48,902,8,48,1,48, + 1,48,1,49,1,49,1,49,3,49,909,8,49,1,49,1,49,1,50,1,50,1,50,5,50,916,8, + 50,10,50,12,50,919,9,50,1,51,1,51,1,52,1,52,1,52,1,52,1,52,1,52,3,52, + 929,8,52,3,52,931,8,52,1,53,3,53,934,8,53,1,53,1,53,1,53,1,53,1,53,1, + 53,3,53,942,8,53,1,54,1,54,1,54,3,54,947,8,54,1,55,1,55,1,56,1,56,1,57, + 1,57,1,58,1,58,3,58,957,8,58,1,59,1,59,1,59,3,59,962,8,59,1,60,1,60,1, + 60,1,60,1,61,1,61,1,61,1,61,1,61,0,3,36,74,94,62,0,2,4,6,8,10,12,14,16, + 18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62, + 64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,94,96,98,100,102,104,106, + 108,110,112,114,116,118,120,122,0,16,2,0,32,32,141,141,2,0,84,84,96,96, + 3,0,4,4,8,8,12,12,4,0,4,4,7,8,12,12,147,147,2,0,96,96,140,140,2,0,4,4, + 8,8,2,0,11,11,42,43,2,0,62,62,93,93,2,0,133,133,143,143,3,0,17,17,95, + 95,170,170,2,0,79,79,98,98,1,0,197,198,2,0,208,208,228,228,8,0,37,37, + 76,76,108,108,110,110,132,132,145,145,185,185,190,190,13,0,2,24,26,36, + 38,75,77,81,83,107,109,109,111,112,114,115,117,130,133,144,146,184,186, + 189,191,192,4,0,36,36,62,62,77,77,91,91,1099,0,127,1,0,0,0,2,131,1,0, + 0,0,4,146,1,0,0,0,6,149,1,0,0,0,8,198,1,0,0,0,10,201,1,0,0,0,12,207,1, + 0,0,0,14,211,1,0,0,0,16,217,1,0,0,0,18,235,1,0,0,0,20,238,1,0,0,0,22, + 241,1,0,0,0,24,251,1,0,0,0,26,254,1,0,0,0,28,258,1,0,0,0,30,291,1,0,0, + 0,32,293,1,0,0,0,34,296,1,0,0,0,36,311,1,0,0,0,38,373,1,0,0,0,40,378, + 1,0,0,0,42,389,1,0,0,0,44,391,1,0,0,0,46,397,1,0,0,0,48,405,1,0,0,0,50, + 423,1,0,0,0,52,425,1,0,0,0,54,433,1,0,0,0,56,438,1,0,0,0,58,446,1,0,0, + 0,60,450,1,0,0,0,62,454,1,0,0,0,64,463,1,0,0,0,66,477,1,0,0,0,68,479, + 1,0,0,0,70,529,1,0,0,0,72,531,1,0,0,0,74,662,1,0,0,0,76,760,1,0,0,0,78, + 770,1,0,0,0,80,791,1,0,0,0,82,824,1,0,0,0,84,837,1,0,0,0,86,839,1,0,0, + 0,88,857,1,0,0,0,90,866,1,0,0,0,92,868,1,0,0,0,94,885,1,0,0,0,96,898, + 1,0,0,0,98,908,1,0,0,0,100,912,1,0,0,0,102,920,1,0,0,0,104,930,1,0,0, + 0,106,933,1,0,0,0,108,946,1,0,0,0,110,948,1,0,0,0,112,950,1,0,0,0,114, + 952,1,0,0,0,116,956,1,0,0,0,118,961,1,0,0,0,120,963,1,0,0,0,122,967,1, + 0,0,0,124,128,3,2,1,0,125,128,3,6,3,0,126,128,3,82,41,0,127,124,1,0,0, + 0,127,125,1,0,0,0,127,126,1,0,0,0,128,129,1,0,0,0,129,130,5,0,0,1,130, + 1,1,0,0,0,131,137,3,4,2,0,132,133,5,176,0,0,133,134,5,4,0,0,134,136,3, + 4,2,0,135,132,1,0,0,0,136,139,1,0,0,0,137,135,1,0,0,0,137,138,1,0,0,0, + 138,3,1,0,0,0,139,137,1,0,0,0,140,147,3,6,3,0,141,142,5,220,0,0,142,143, + 3,2,1,0,143,144,5,236,0,0,144,147,1,0,0,0,145,147,3,122,61,0,146,140, + 1,0,0,0,146,141,1,0,0,0,146,145,1,0,0,0,147,5,1,0,0,0,148,150,3,8,4,0, + 149,148,1,0,0,0,149,150,1,0,0,0,150,151,1,0,0,0,151,153,5,146,0,0,152, + 154,5,49,0,0,153,152,1,0,0,0,153,154,1,0,0,0,154,156,1,0,0,0,155,157, + 3,10,5,0,156,155,1,0,0,0,156,157,1,0,0,0,157,158,1,0,0,0,158,160,3,72, + 36,0,159,161,3,12,6,0,160,159,1,0,0,0,160,161,1,0,0,0,161,163,1,0,0,0, + 162,164,3,14,7,0,163,162,1,0,0,0,163,164,1,0,0,0,164,166,1,0,0,0,165, + 167,3,18,9,0,166,165,1,0,0,0,166,167,1,0,0,0,167,169,1,0,0,0,168,170, + 3,20,10,0,169,168,1,0,0,0,169,170,1,0,0,0,170,172,1,0,0,0,171,173,3,22, + 11,0,172,171,1,0,0,0,172,173,1,0,0,0,173,176,1,0,0,0,174,175,5,189,0, + 0,175,177,7,0,0,0,176,174,1,0,0,0,176,177,1,0,0,0,177,180,1,0,0,0,178, + 179,5,189,0,0,179,181,5,169,0,0,180,178,1,0,0,0,180,181,1,0,0,0,181,183, + 1,0,0,0,182,184,3,24,12,0,183,182,1,0,0,0,183,184,1,0,0,0,184,186,1,0, + 0,0,185,187,3,16,8,0,186,185,1,0,0,0,186,187,1,0,0,0,187,189,1,0,0,0, + 188,190,3,26,13,0,189,188,1,0,0,0,189,190,1,0,0,0,190,193,1,0,0,0,191, + 194,3,30,15,0,192,194,3,32,16,0,193,191,1,0,0,0,193,192,1,0,0,0,193,194, + 1,0,0,0,194,196,1,0,0,0,195,197,3,34,17,0,196,195,1,0,0,0,196,197,1,0, + 0,0,197,7,1,0,0,0,198,199,5,189,0,0,199,200,3,86,43,0,200,9,1,0,0,0,201, + 202,5,168,0,0,202,205,5,198,0,0,203,204,5,189,0,0,204,206,5,164,0,0,205, + 203,1,0,0,0,205,206,1,0,0,0,206,11,1,0,0,0,207,208,5,68,0,0,208,209,3, + 36,18,0,209,13,1,0,0,0,210,212,7,1,0,0,211,210,1,0,0,0,211,212,1,0,0, + 0,212,213,1,0,0,0,213,214,5,9,0,0,214,215,5,90,0,0,215,216,3,72,36,0, + 216,15,1,0,0,0,217,218,5,188,0,0,218,219,3,118,59,0,219,220,5,10,0,0, + 220,221,5,220,0,0,221,222,3,56,28,0,222,232,5,236,0,0,223,224,5,206,0, + 0,224,225,3,118,59,0,225,226,5,10,0,0,226,227,5,220,0,0,227,228,3,56, + 28,0,228,229,5,236,0,0,229,231,1,0,0,0,230,223,1,0,0,0,231,234,1,0,0, + 0,232,230,1,0,0,0,232,233,1,0,0,0,233,17,1,0,0,0,234,232,1,0,0,0,235, + 236,5,129,0,0,236,237,3,74,37,0,237,19,1,0,0,0,238,239,5,187,0,0,239, + 240,3,74,37,0,240,21,1,0,0,0,241,242,5,73,0,0,242,249,5,18,0,0,243,244, + 7,0,0,0,244,245,5,220,0,0,245,246,3,72,36,0,246,247,5,236,0,0,247,250, + 1,0,0,0,248,250,3,72,36,0,249,243,1,0,0,0,249,248,1,0,0,0,250,23,1,0, + 0,0,251,252,5,74,0,0,252,253,3,74,37,0,253,25,1,0,0,0,254,255,5,122,0, + 0,255,256,5,18,0,0,256,257,3,46,23,0,257,27,1,0,0,0,258,259,5,122,0,0, + 259,260,5,18,0,0,260,261,3,72,36,0,261,29,1,0,0,0,262,263,5,99,0,0,263, + 266,3,74,37,0,264,265,5,206,0,0,265,267,3,74,37,0,266,264,1,0,0,0,266, + 267,1,0,0,0,267,272,1,0,0,0,268,269,5,189,0,0,269,273,5,164,0,0,270,271, + 5,18,0,0,271,273,3,72,36,0,272,268,1,0,0,0,272,270,1,0,0,0,272,273,1, + 0,0,0,273,292,1,0,0,0,274,275,5,99,0,0,275,278,3,74,37,0,276,277,5,189, + 0,0,277,279,5,164,0,0,278,276,1,0,0,0,278,279,1,0,0,0,279,280,1,0,0,0, + 280,281,5,118,0,0,281,282,3,74,37,0,282,292,1,0,0,0,283,284,5,99,0,0, + 284,285,3,74,37,0,285,286,5,118,0,0,286,289,3,74,37,0,287,288,5,18,0, + 0,288,290,3,72,36,0,289,287,1,0,0,0,289,290,1,0,0,0,290,292,1,0,0,0,291, + 262,1,0,0,0,291,274,1,0,0,0,291,283,1,0,0,0,292,31,1,0,0,0,293,294,5, + 118,0,0,294,295,3,74,37,0,295,33,1,0,0,0,296,297,5,150,0,0,297,298,3, + 52,26,0,298,35,1,0,0,0,299,300,6,18,-1,0,300,302,3,94,47,0,301,303,5, + 61,0,0,302,301,1,0,0,0,302,303,1,0,0,0,303,305,1,0,0,0,304,306,3,44,22, + 0,305,304,1,0,0,0,305,306,1,0,0,0,306,312,1,0,0,0,307,308,5,220,0,0,308, + 309,3,36,18,0,309,310,5,236,0,0,310,312,1,0,0,0,311,299,1,0,0,0,311,307, + 1,0,0,0,312,327,1,0,0,0,313,314,10,3,0,0,314,315,3,40,20,0,315,316,3, + 36,18,4,316,326,1,0,0,0,317,319,10,4,0,0,318,320,3,38,19,0,319,318,1, + 0,0,0,319,320,1,0,0,0,320,321,1,0,0,0,321,322,5,90,0,0,322,323,3,36,18, + 0,323,324,3,42,21,0,324,326,1,0,0,0,325,313,1,0,0,0,325,317,1,0,0,0,326, + 329,1,0,0,0,327,325,1,0,0,0,327,328,1,0,0,0,328,37,1,0,0,0,329,327,1, + 0,0,0,330,332,7,2,0,0,331,330,1,0,0,0,331,332,1,0,0,0,332,333,1,0,0,0, + 333,340,5,84,0,0,334,336,5,84,0,0,335,337,7,2,0,0,336,335,1,0,0,0,336, + 337,1,0,0,0,337,340,1,0,0,0,338,340,7,2,0,0,339,331,1,0,0,0,339,334,1, + 0,0,0,339,338,1,0,0,0,340,374,1,0,0,0,341,343,7,3,0,0,342,341,1,0,0,0, + 342,343,1,0,0,0,343,344,1,0,0,0,344,346,7,4,0,0,345,347,5,123,0,0,346, + 345,1,0,0,0,346,347,1,0,0,0,347,356,1,0,0,0,348,350,7,4,0,0,349,351,5, + 123,0,0,350,349,1,0,0,0,350,351,1,0,0,0,351,353,1,0,0,0,352,354,7,3,0, + 0,353,352,1,0,0,0,353,354,1,0,0,0,354,356,1,0,0,0,355,342,1,0,0,0,355, + 348,1,0,0,0,356,374,1,0,0,0,357,359,7,5,0,0,358,357,1,0,0,0,358,359,1, + 0,0,0,359,360,1,0,0,0,360,362,5,69,0,0,361,363,5,123,0,0,362,361,1,0, + 0,0,362,363,1,0,0,0,363,372,1,0,0,0,364,366,5,69,0,0,365,367,5,123,0, + 0,366,365,1,0,0,0,366,367,1,0,0,0,367,369,1,0,0,0,368,370,7,5,0,0,369, + 368,1,0,0,0,369,370,1,0,0,0,370,372,1,0,0,0,371,358,1,0,0,0,371,364,1, + 0,0,0,372,374,1,0,0,0,373,339,1,0,0,0,373,355,1,0,0,0,373,371,1,0,0,0, + 374,39,1,0,0,0,375,376,5,31,0,0,376,379,5,90,0,0,377,379,5,206,0,0,378, + 375,1,0,0,0,378,377,1,0,0,0,379,41,1,0,0,0,380,381,5,119,0,0,381,390, + 3,72,36,0,382,383,5,179,0,0,383,384,5,220,0,0,384,385,3,72,36,0,385,386, + 5,236,0,0,386,390,1,0,0,0,387,388,5,179,0,0,388,390,3,72,36,0,389,380, + 1,0,0,0,389,382,1,0,0,0,389,387,1,0,0,0,390,43,1,0,0,0,391,392,5,144, + 0,0,392,395,3,50,25,0,393,394,5,118,0,0,394,396,3,50,25,0,395,393,1,0, + 0,0,395,396,1,0,0,0,396,45,1,0,0,0,397,402,3,48,24,0,398,399,5,206,0, + 0,399,401,3,48,24,0,400,398,1,0,0,0,401,404,1,0,0,0,402,400,1,0,0,0,402, + 403,1,0,0,0,403,47,1,0,0,0,404,402,1,0,0,0,405,407,3,74,37,0,406,408, + 7,6,0,0,407,406,1,0,0,0,407,408,1,0,0,0,408,411,1,0,0,0,409,410,5,117, + 0,0,410,412,7,7,0,0,411,409,1,0,0,0,411,412,1,0,0,0,412,415,1,0,0,0,413, + 414,5,26,0,0,414,416,5,200,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,49, + 1,0,0,0,417,424,3,122,61,0,418,421,3,106,53,0,419,420,5,238,0,0,420,422, + 3,106,53,0,421,419,1,0,0,0,421,422,1,0,0,0,422,424,1,0,0,0,423,417,1, + 0,0,0,423,418,1,0,0,0,424,51,1,0,0,0,425,430,3,54,27,0,426,427,5,206, + 0,0,427,429,3,54,27,0,428,426,1,0,0,0,429,432,1,0,0,0,430,428,1,0,0,0, + 430,431,1,0,0,0,431,53,1,0,0,0,432,430,1,0,0,0,433,434,3,118,59,0,434, + 435,5,212,0,0,435,436,3,108,54,0,436,55,1,0,0,0,437,439,3,58,29,0,438, + 437,1,0,0,0,438,439,1,0,0,0,439,441,1,0,0,0,440,442,3,60,30,0,441,440, + 1,0,0,0,441,442,1,0,0,0,442,444,1,0,0,0,443,445,3,62,31,0,444,443,1,0, + 0,0,444,445,1,0,0,0,445,57,1,0,0,0,446,447,5,126,0,0,447,448,5,18,0,0, + 448,449,3,72,36,0,449,59,1,0,0,0,450,451,5,122,0,0,451,452,5,18,0,0,452, + 453,3,46,23,0,453,61,1,0,0,0,454,455,7,8,0,0,455,456,3,64,32,0,456,63, + 1,0,0,0,457,464,3,66,33,0,458,459,5,16,0,0,459,460,3,66,33,0,460,461, + 5,6,0,0,461,462,3,66,33,0,462,464,1,0,0,0,463,457,1,0,0,0,463,458,1,0, + 0,0,464,65,1,0,0,0,465,466,5,33,0,0,466,478,5,142,0,0,467,468,5,175,0, + 0,468,478,5,128,0,0,469,470,5,175,0,0,470,478,5,64,0,0,471,472,3,106, + 53,0,472,473,5,128,0,0,473,478,1,0,0,0,474,475,3,106,53,0,475,476,5,64, + 0,0,476,478,1,0,0,0,477,465,1,0,0,0,477,467,1,0,0,0,477,469,1,0,0,0,477, + 471,1,0,0,0,477,474,1,0,0,0,478,67,1,0,0,0,479,480,3,74,37,0,480,481, + 5,0,0,1,481,69,1,0,0,0,482,530,3,118,59,0,483,484,3,118,59,0,484,485, + 5,220,0,0,485,486,3,118,59,0,486,493,3,70,35,0,487,488,5,206,0,0,488, + 489,3,118,59,0,489,490,3,70,35,0,490,492,1,0,0,0,491,487,1,0,0,0,492, + 495,1,0,0,0,493,491,1,0,0,0,493,494,1,0,0,0,494,496,1,0,0,0,495,493,1, + 0,0,0,496,497,5,236,0,0,497,530,1,0,0,0,498,499,3,118,59,0,499,500,5, + 220,0,0,500,505,3,120,60,0,501,502,5,206,0,0,502,504,3,120,60,0,503,501, + 1,0,0,0,504,507,1,0,0,0,505,503,1,0,0,0,505,506,1,0,0,0,506,508,1,0,0, + 0,507,505,1,0,0,0,508,509,5,236,0,0,509,530,1,0,0,0,510,511,3,118,59, + 0,511,512,5,220,0,0,512,517,3,70,35,0,513,514,5,206,0,0,514,516,3,70, + 35,0,515,513,1,0,0,0,516,519,1,0,0,0,517,515,1,0,0,0,517,518,1,0,0,0, + 518,520,1,0,0,0,519,517,1,0,0,0,520,521,5,236,0,0,521,530,1,0,0,0,522, + 523,3,118,59,0,523,525,5,220,0,0,524,526,3,72,36,0,525,524,1,0,0,0,525, + 526,1,0,0,0,526,527,1,0,0,0,527,528,5,236,0,0,528,530,1,0,0,0,529,482, + 1,0,0,0,529,483,1,0,0,0,529,498,1,0,0,0,529,510,1,0,0,0,529,522,1,0,0, + 0,530,71,1,0,0,0,531,536,3,74,37,0,532,533,5,206,0,0,533,535,3,74,37, + 0,534,532,1,0,0,0,535,538,1,0,0,0,536,534,1,0,0,0,536,537,1,0,0,0,537, + 73,1,0,0,0,538,536,1,0,0,0,539,540,6,37,-1,0,540,542,5,19,0,0,541,543, + 3,74,37,0,542,541,1,0,0,0,542,543,1,0,0,0,543,549,1,0,0,0,544,545,5,186, + 0,0,545,546,3,74,37,0,546,547,5,163,0,0,547,548,3,74,37,0,548,550,1,0, + 0,0,549,544,1,0,0,0,550,551,1,0,0,0,551,549,1,0,0,0,551,552,1,0,0,0,552, + 555,1,0,0,0,553,554,5,52,0,0,554,556,3,74,37,0,555,553,1,0,0,0,555,556, + 1,0,0,0,556,557,1,0,0,0,557,558,5,53,0,0,558,663,1,0,0,0,559,560,5,20, + 0,0,560,561,5,220,0,0,561,562,3,74,37,0,562,563,5,10,0,0,563,564,3,70, + 35,0,564,565,5,236,0,0,565,663,1,0,0,0,566,567,5,36,0,0,567,663,5,200, + 0,0,568,569,5,86,0,0,569,570,3,74,37,0,570,571,3,110,55,0,571,663,1,0, + 0,0,572,573,5,155,0,0,573,574,5,220,0,0,574,575,3,74,37,0,575,576,5,68, + 0,0,576,579,3,74,37,0,577,578,5,65,0,0,578,580,3,74,37,0,579,577,1,0, + 0,0,579,580,1,0,0,0,580,581,1,0,0,0,581,582,5,236,0,0,582,663,1,0,0,0, + 583,584,5,166,0,0,584,663,5,200,0,0,585,586,5,171,0,0,586,587,5,220,0, + 0,587,588,7,9,0,0,588,589,5,200,0,0,589,590,5,68,0,0,590,591,3,74,37, + 0,591,592,5,236,0,0,592,663,1,0,0,0,593,594,3,118,59,0,594,596,5,220, + 0,0,595,597,3,72,36,0,596,595,1,0,0,0,596,597,1,0,0,0,597,598,1,0,0,0, + 598,599,5,236,0,0,599,600,1,0,0,0,600,601,5,125,0,0,601,602,5,220,0,0, + 602,603,3,56,28,0,603,604,5,236,0,0,604,663,1,0,0,0,605,606,3,118,59, + 0,606,608,5,220,0,0,607,609,3,72,36,0,608,607,1,0,0,0,608,609,1,0,0,0, + 609,610,1,0,0,0,610,611,5,236,0,0,611,612,1,0,0,0,612,613,5,125,0,0,613, + 614,3,118,59,0,614,663,1,0,0,0,615,621,3,118,59,0,616,618,5,220,0,0,617, + 619,3,72,36,0,618,617,1,0,0,0,618,619,1,0,0,0,619,620,1,0,0,0,620,622, + 5,236,0,0,621,616,1,0,0,0,621,622,1,0,0,0,622,623,1,0,0,0,623,625,5,220, + 0,0,624,626,5,49,0,0,625,624,1,0,0,0,625,626,1,0,0,0,626,628,1,0,0,0, + 627,629,3,76,38,0,628,627,1,0,0,0,628,629,1,0,0,0,629,630,1,0,0,0,630, + 631,5,236,0,0,631,663,1,0,0,0,632,663,3,82,41,0,633,663,3,108,54,0,634, + 635,5,208,0,0,635,663,3,74,37,18,636,637,5,115,0,0,637,663,3,74,37,12, + 638,639,3,98,49,0,639,640,5,210,0,0,640,642,1,0,0,0,641,638,1,0,0,0,641, + 642,1,0,0,0,642,643,1,0,0,0,643,663,5,202,0,0,644,645,5,220,0,0,645,646, + 3,2,1,0,646,647,5,236,0,0,647,663,1,0,0,0,648,649,5,220,0,0,649,650,3, + 74,37,0,650,651,5,236,0,0,651,663,1,0,0,0,652,653,5,220,0,0,653,654,3, + 72,36,0,654,655,5,236,0,0,655,663,1,0,0,0,656,658,5,219,0,0,657,659,3, + 72,36,0,658,657,1,0,0,0,658,659,1,0,0,0,659,660,1,0,0,0,660,663,5,235, + 0,0,661,663,3,90,45,0,662,539,1,0,0,0,662,559,1,0,0,0,662,566,1,0,0,0, + 662,568,1,0,0,0,662,572,1,0,0,0,662,583,1,0,0,0,662,585,1,0,0,0,662,593, + 1,0,0,0,662,605,1,0,0,0,662,615,1,0,0,0,662,632,1,0,0,0,662,633,1,0,0, + 0,662,634,1,0,0,0,662,636,1,0,0,0,662,641,1,0,0,0,662,644,1,0,0,0,662, + 648,1,0,0,0,662,652,1,0,0,0,662,656,1,0,0,0,662,661,1,0,0,0,663,757,1, + 0,0,0,664,668,10,17,0,0,665,669,5,202,0,0,666,669,5,238,0,0,667,669,5, + 227,0,0,668,665,1,0,0,0,668,666,1,0,0,0,668,667,1,0,0,0,669,670,1,0,0, + 0,670,756,3,74,37,18,671,675,10,16,0,0,672,676,5,228,0,0,673,676,5,208, + 0,0,674,676,5,207,0,0,675,672,1,0,0,0,675,673,1,0,0,0,675,674,1,0,0,0, + 676,677,1,0,0,0,677,756,3,74,37,17,678,703,10,15,0,0,679,704,5,211,0, + 0,680,704,5,212,0,0,681,704,5,223,0,0,682,704,5,221,0,0,683,704,5,222, + 0,0,684,704,5,213,0,0,685,704,5,214,0,0,686,688,5,115,0,0,687,686,1,0, + 0,0,687,688,1,0,0,0,688,689,1,0,0,0,689,691,5,80,0,0,690,692,5,25,0,0, + 691,690,1,0,0,0,691,692,1,0,0,0,692,704,1,0,0,0,693,695,5,115,0,0,694, + 693,1,0,0,0,694,695,1,0,0,0,695,696,1,0,0,0,696,704,7,10,0,0,697,704, + 5,232,0,0,698,704,5,233,0,0,699,704,5,225,0,0,700,704,5,216,0,0,701,704, + 5,217,0,0,702,704,5,224,0,0,703,679,1,0,0,0,703,680,1,0,0,0,703,681,1, + 0,0,0,703,682,1,0,0,0,703,683,1,0,0,0,703,684,1,0,0,0,703,685,1,0,0,0, + 703,687,1,0,0,0,703,694,1,0,0,0,703,697,1,0,0,0,703,698,1,0,0,0,703,699, + 1,0,0,0,703,700,1,0,0,0,703,701,1,0,0,0,703,702,1,0,0,0,704,705,1,0,0, + 0,705,756,3,74,37,16,706,707,10,13,0,0,707,708,5,226,0,0,708,756,3,74, + 37,14,709,710,10,11,0,0,710,711,5,6,0,0,711,756,3,74,37,12,712,713,10, + 10,0,0,713,714,5,121,0,0,714,756,3,74,37,11,715,717,10,9,0,0,716,718, + 5,115,0,0,717,716,1,0,0,0,717,718,1,0,0,0,718,719,1,0,0,0,719,720,5,16, + 0,0,720,721,3,74,37,0,721,722,5,6,0,0,722,723,3,74,37,10,723,756,1,0, + 0,0,724,725,10,8,0,0,725,726,5,229,0,0,726,727,3,74,37,0,727,728,5,205, + 0,0,728,729,3,74,37,8,729,756,1,0,0,0,730,731,10,21,0,0,731,732,5,219, + 0,0,732,733,3,74,37,0,733,734,5,235,0,0,734,756,1,0,0,0,735,736,10,20, + 0,0,736,737,5,210,0,0,737,756,5,198,0,0,738,739,10,19,0,0,739,740,5,210, + 0,0,740,756,3,118,59,0,741,742,10,14,0,0,742,744,5,88,0,0,743,745,5,115, + 0,0,744,743,1,0,0,0,744,745,1,0,0,0,745,746,1,0,0,0,746,756,5,116,0,0, + 747,753,10,7,0,0,748,754,3,116,58,0,749,750,5,10,0,0,750,754,3,118,59, + 0,751,752,5,10,0,0,752,754,5,200,0,0,753,748,1,0,0,0,753,749,1,0,0,0, + 753,751,1,0,0,0,754,756,1,0,0,0,755,664,1,0,0,0,755,671,1,0,0,0,755,678, + 1,0,0,0,755,706,1,0,0,0,755,709,1,0,0,0,755,712,1,0,0,0,755,715,1,0,0, + 0,755,724,1,0,0,0,755,730,1,0,0,0,755,735,1,0,0,0,755,738,1,0,0,0,755, + 741,1,0,0,0,755,747,1,0,0,0,756,759,1,0,0,0,757,755,1,0,0,0,757,758,1, + 0,0,0,758,75,1,0,0,0,759,757,1,0,0,0,760,765,3,78,39,0,761,762,5,206, + 0,0,762,764,3,78,39,0,763,761,1,0,0,0,764,767,1,0,0,0,765,763,1,0,0,0, + 765,766,1,0,0,0,766,77,1,0,0,0,767,765,1,0,0,0,768,771,3,80,40,0,769, + 771,3,74,37,0,770,768,1,0,0,0,770,769,1,0,0,0,771,79,1,0,0,0,772,773, + 5,220,0,0,773,778,3,118,59,0,774,775,5,206,0,0,775,777,3,118,59,0,776, + 774,1,0,0,0,777,780,1,0,0,0,778,776,1,0,0,0,778,779,1,0,0,0,779,781,1, + 0,0,0,780,778,1,0,0,0,781,782,5,236,0,0,782,792,1,0,0,0,783,788,3,118, + 59,0,784,785,5,206,0,0,785,787,3,118,59,0,786,784,1,0,0,0,787,790,1,0, + 0,0,788,786,1,0,0,0,788,789,1,0,0,0,789,792,1,0,0,0,790,788,1,0,0,0,791, + 772,1,0,0,0,791,783,1,0,0,0,792,793,1,0,0,0,793,794,5,201,0,0,794,795, + 3,74,37,0,795,81,1,0,0,0,796,797,5,222,0,0,797,801,3,118,59,0,798,800, + 3,84,42,0,799,798,1,0,0,0,800,803,1,0,0,0,801,799,1,0,0,0,801,802,1,0, + 0,0,802,804,1,0,0,0,803,801,1,0,0,0,804,805,5,238,0,0,805,806,5,214,0, + 0,806,825,1,0,0,0,807,808,5,222,0,0,808,812,3,118,59,0,809,811,3,84,42, + 0,810,809,1,0,0,0,811,814,1,0,0,0,812,810,1,0,0,0,812,813,1,0,0,0,813, + 815,1,0,0,0,814,812,1,0,0,0,815,817,5,214,0,0,816,818,3,82,41,0,817,816, + 1,0,0,0,817,818,1,0,0,0,818,819,1,0,0,0,819,820,5,222,0,0,820,821,5,238, + 0,0,821,822,3,118,59,0,822,823,5,214,0,0,823,825,1,0,0,0,824,796,1,0, + 0,0,824,807,1,0,0,0,825,83,1,0,0,0,826,827,3,118,59,0,827,828,5,212,0, + 0,828,829,5,200,0,0,829,838,1,0,0,0,830,831,3,118,59,0,831,832,5,212, + 0,0,832,833,5,218,0,0,833,834,3,74,37,0,834,835,5,234,0,0,835,838,1,0, + 0,0,836,838,3,118,59,0,837,826,1,0,0,0,837,830,1,0,0,0,837,836,1,0,0, + 0,838,85,1,0,0,0,839,844,3,88,44,0,840,841,5,206,0,0,841,843,3,88,44, + 0,842,840,1,0,0,0,843,846,1,0,0,0,844,842,1,0,0,0,844,845,1,0,0,0,845, + 87,1,0,0,0,846,844,1,0,0,0,847,848,3,118,59,0,848,849,5,10,0,0,849,850, + 5,220,0,0,850,851,3,2,1,0,851,852,5,236,0,0,852,858,1,0,0,0,853,854,3, + 74,37,0,854,855,5,10,0,0,855,856,3,118,59,0,856,858,1,0,0,0,857,847,1, + 0,0,0,857,853,1,0,0,0,858,89,1,0,0,0,859,867,3,122,61,0,860,861,3,98, + 49,0,861,862,5,210,0,0,862,864,1,0,0,0,863,860,1,0,0,0,863,864,1,0,0, + 0,864,865,1,0,0,0,865,867,3,92,46,0,866,859,1,0,0,0,866,863,1,0,0,0,867, + 91,1,0,0,0,868,873,3,118,59,0,869,870,5,210,0,0,870,872,3,118,59,0,871, + 869,1,0,0,0,872,875,1,0,0,0,873,871,1,0,0,0,873,874,1,0,0,0,874,93,1, + 0,0,0,875,873,1,0,0,0,876,877,6,47,-1,0,877,886,3,98,49,0,878,886,3,96, + 48,0,879,880,5,220,0,0,880,881,3,2,1,0,881,882,5,236,0,0,882,886,1,0, + 0,0,883,886,3,82,41,0,884,886,3,122,61,0,885,876,1,0,0,0,885,878,1,0, + 0,0,885,879,1,0,0,0,885,883,1,0,0,0,885,884,1,0,0,0,886,895,1,0,0,0,887, + 891,10,3,0,0,888,892,3,116,58,0,889,890,5,10,0,0,890,892,3,118,59,0,891, + 888,1,0,0,0,891,889,1,0,0,0,892,894,1,0,0,0,893,887,1,0,0,0,894,897,1, + 0,0,0,895,893,1,0,0,0,895,896,1,0,0,0,896,95,1,0,0,0,897,895,1,0,0,0, + 898,899,3,118,59,0,899,901,5,220,0,0,900,902,3,100,50,0,901,900,1,0,0, + 0,901,902,1,0,0,0,902,903,1,0,0,0,903,904,5,236,0,0,904,97,1,0,0,0,905, + 906,3,102,51,0,906,907,5,210,0,0,907,909,1,0,0,0,908,905,1,0,0,0,908, + 909,1,0,0,0,909,910,1,0,0,0,910,911,3,118,59,0,911,99,1,0,0,0,912,917, + 3,74,37,0,913,914,5,206,0,0,914,916,3,74,37,0,915,913,1,0,0,0,916,919, + 1,0,0,0,917,915,1,0,0,0,917,918,1,0,0,0,918,101,1,0,0,0,919,917,1,0,0, + 0,920,921,3,118,59,0,921,103,1,0,0,0,922,931,5,196,0,0,923,924,5,210, + 0,0,924,931,7,11,0,0,925,926,5,198,0,0,926,928,5,210,0,0,927,929,7,11, + 0,0,928,927,1,0,0,0,928,929,1,0,0,0,929,931,1,0,0,0,930,922,1,0,0,0,930, + 923,1,0,0,0,930,925,1,0,0,0,931,105,1,0,0,0,932,934,7,12,0,0,933,932, + 1,0,0,0,933,934,1,0,0,0,934,941,1,0,0,0,935,942,3,104,52,0,936,942,5, + 197,0,0,937,942,5,198,0,0,938,942,5,199,0,0,939,942,5,82,0,0,940,942, + 5,113,0,0,941,935,1,0,0,0,941,936,1,0,0,0,941,937,1,0,0,0,941,938,1,0, + 0,0,941,939,1,0,0,0,941,940,1,0,0,0,942,107,1,0,0,0,943,947,3,106,53, + 0,944,947,5,200,0,0,945,947,5,116,0,0,946,943,1,0,0,0,946,944,1,0,0,0, + 946,945,1,0,0,0,947,109,1,0,0,0,948,949,7,13,0,0,949,111,1,0,0,0,950, + 951,7,14,0,0,951,113,1,0,0,0,952,953,7,15,0,0,953,115,1,0,0,0,954,957, + 5,195,0,0,955,957,3,114,57,0,956,954,1,0,0,0,956,955,1,0,0,0,957,117, + 1,0,0,0,958,962,5,195,0,0,959,962,3,110,55,0,960,962,3,112,56,0,961,958, + 1,0,0,0,961,959,1,0,0,0,961,960,1,0,0,0,962,119,1,0,0,0,963,964,5,200, + 0,0,964,965,5,212,0,0,965,966,3,106,53,0,966,121,1,0,0,0,967,968,5,218, + 0,0,968,969,3,118,59,0,969,970,5,234,0,0,970,123,1,0,0,0,120,127,137, + 146,149,153,156,160,163,166,169,172,176,180,183,186,189,193,196,205,211, + 232,249,266,272,278,289,291,302,305,311,319,325,327,331,336,339,342,346, + 350,353,355,358,362,366,369,371,373,378,389,395,402,407,411,415,421,423, + 430,438,441,444,463,477,493,505,517,525,529,536,542,551,555,579,596,608, + 618,621,625,628,641,658,662,668,675,687,691,694,703,717,744,753,755,757, + 765,770,778,788,791,801,812,817,824,837,844,857,863,866,873,885,891,895, + 901,908,917,928,930,933,941,946,956,961 }; staticData->serializedATN = antlr4::atn::SerializedATNView(serializedATNSegment, sizeof(serializedATNSegment) / sizeof(serializedATNSegment[0])); @@ -4552,41 +4550,6 @@ std::any HogQLParser::ColumnExprAliasContext::accept(tree::ParseTreeVisitor *vis else return visitor->visitChildren(this); } -//----------------- ColumnExprExtractContext ------------------------------------------------------------------ - -tree::TerminalNode* HogQLParser::ColumnExprExtractContext::EXTRACT() { - return getToken(HogQLParser::EXTRACT, 0); -} - -tree::TerminalNode* HogQLParser::ColumnExprExtractContext::LPAREN() { - return getToken(HogQLParser::LPAREN, 0); -} - -HogQLParser::IntervalContext* HogQLParser::ColumnExprExtractContext::interval() { - return getRuleContext(0); -} - -tree::TerminalNode* HogQLParser::ColumnExprExtractContext::FROM() { - return getToken(HogQLParser::FROM, 0); -} - -HogQLParser::ColumnExprContext* HogQLParser::ColumnExprExtractContext::columnExpr() { - return getRuleContext(0); -} - -tree::TerminalNode* HogQLParser::ColumnExprExtractContext::RPAREN() { - return getToken(HogQLParser::RPAREN, 0); -} - -HogQLParser::ColumnExprExtractContext::ColumnExprExtractContext(ColumnExprContext *ctx) { copyFrom(ctx); } - - -std::any HogQLParser::ColumnExprExtractContext::accept(tree::ParseTreeVisitor *visitor) { - if (auto parserVisitor = dynamic_cast(visitor)) - return parserVisitor->visitColumnExprExtract(this); - else - return visitor->visitChildren(this); -} //----------------- ColumnExprNegateContext ------------------------------------------------------------------ tree::TerminalNode* HogQLParser::ColumnExprNegateContext::DASH() { @@ -5514,7 +5477,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(669); + setState(662); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 80, _ctx)) { case 1: { @@ -5599,86 +5562,67 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { } case 4: { - _localctx = _tracker.createInstance(_localctx); + _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; setState(568); - match(HogQLParser::EXTRACT); + match(HogQLParser::INTERVAL); setState(569); - match(HogQLParser::LPAREN); + columnExpr(0); setState(570); interval(); - setState(571); - match(HogQLParser::FROM); - setState(572); - columnExpr(0); - setState(573); - match(HogQLParser::RPAREN); break; } case 5: { - _localctx = _tracker.createInstance(_localctx); - _ctx = _localctx; - previousContext = _localctx; - setState(575); - match(HogQLParser::INTERVAL); - setState(576); - columnExpr(0); - setState(577); - interval(); - break; - } - - case 6: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(579); + setState(572); match(HogQLParser::SUBSTRING); - setState(580); + setState(573); match(HogQLParser::LPAREN); - setState(581); + setState(574); columnExpr(0); - setState(582); + setState(575); match(HogQLParser::FROM); - setState(583); + setState(576); columnExpr(0); - setState(586); + setState(579); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::FOR) { - setState(584); + setState(577); match(HogQLParser::FOR); - setState(585); + setState(578); columnExpr(0); } - setState(588); + setState(581); match(HogQLParser::RPAREN); break; } - case 7: { + case 6: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(590); + setState(583); match(HogQLParser::TIMESTAMP); - setState(591); + setState(584); match(HogQLParser::STRING_LITERAL); break; } - case 8: { + case 7: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(592); + setState(585); match(HogQLParser::TRIM); - setState(593); + setState(586); match(HogQLParser::LPAREN); - setState(594); + setState(587); _la = _input->LA(1); if (!(_la == HogQLParser::BOTH || _la == HogQLParser::LEADING || _la == HogQLParser::TRAILING)) { _errHandler->recoverInline(this); @@ -5687,27 +5631,27 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _errHandler->reportMatch(this); consume(); } - setState(595); + setState(588); match(HogQLParser::STRING_LITERAL); - setState(596); + setState(589); match(HogQLParser::FROM); - setState(597); + setState(590); columnExpr(0); - setState(598); + setState(591); match(HogQLParser::RPAREN); break; } - case 9: { + case 8: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(600); + setState(593); identifier(); - setState(601); + setState(594); match(HogQLParser::LPAREN); - setState(603); + setState(596); _errHandler->sync(this); _la = _input->LA(1); @@ -5716,32 +5660,32 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(602); + setState(595); columnExprList(); } - setState(605); + setState(598); match(HogQLParser::RPAREN); - setState(607); + setState(600); match(HogQLParser::OVER); - setState(608); + setState(601); match(HogQLParser::LPAREN); - setState(609); + setState(602); windowExpr(); - setState(610); + setState(603); match(HogQLParser::RPAREN); break; } - case 10: { + case 9: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(612); + setState(605); identifier(); - setState(613); + setState(606); match(HogQLParser::LPAREN); - setState(615); + setState(608); _errHandler->sync(this); _la = _input->LA(1); @@ -5750,32 +5694,32 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(614); + setState(607); columnExprList(); } - setState(617); + setState(610); match(HogQLParser::RPAREN); - setState(619); + setState(612); match(HogQLParser::OVER); - setState(620); + setState(613); identifier(); break; } - case 11: { + case 10: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(622); + setState(615); identifier(); - setState(628); + setState(621); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 75, _ctx)) { case 1: { - setState(623); + setState(616); match(HogQLParser::LPAREN); - setState(625); + setState(618); _errHandler->sync(this); _la = _input->LA(1); @@ -5784,10 +5728,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(624); + setState(617); columnExprList(); } - setState(627); + setState(620); match(HogQLParser::RPAREN); break; } @@ -5795,14 +5739,14 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(630); + setState(623); match(HogQLParser::LPAREN); - setState(632); + setState(625); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 76, _ctx)) { case 1: { - setState(631); + setState(624); match(HogQLParser::DISTINCT); break; } @@ -5810,7 +5754,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(635); + setState(628); _errHandler->sync(this); _la = _input->LA(1); @@ -5819,59 +5763,59 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(634); + setState(627); columnArgList(); } - setState(637); + setState(630); match(HogQLParser::RPAREN); break; } - case 12: { + case 11: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(639); + setState(632); hogqlxTagElement(); break; } - case 13: { + case 12: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(640); + setState(633); literal(); break; } - case 14: { + case 13: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(641); + setState(634); match(HogQLParser::DASH); - setState(642); + setState(635); columnExpr(18); break; } - case 15: { + case 14: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(643); + setState(636); match(HogQLParser::NOT); - setState(644); + setState(637); columnExpr(12); break; } - case 16: { + case 15: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(648); + setState(641); _errHandler->sync(this); _la = _input->LA(1); @@ -5881,62 +5825,62 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 128)) & -9) != 0) || _la == HogQLParser::JSON_TRUE || _la == HogQLParser::IDENTIFIER) { - setState(645); + setState(638); tableIdentifier(); - setState(646); + setState(639); match(HogQLParser::DOT); } - setState(650); + setState(643); match(HogQLParser::ASTERISK); break; } - case 17: { + case 16: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(651); + setState(644); match(HogQLParser::LPAREN); - setState(652); + setState(645); selectUnionStmt(); - setState(653); + setState(646); match(HogQLParser::RPAREN); break; } - case 18: { + case 17: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(655); + setState(648); match(HogQLParser::LPAREN); - setState(656); + setState(649); columnExpr(0); - setState(657); + setState(650); match(HogQLParser::RPAREN); break; } - case 19: { + case 18: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(659); + setState(652); match(HogQLParser::LPAREN); - setState(660); + setState(653); columnExprList(); - setState(661); + setState(654); match(HogQLParser::RPAREN); break; } - case 20: { + case 19: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(663); + setState(656); match(HogQLParser::LBRACKET); - setState(665); + setState(658); _errHandler->sync(this); _la = _input->LA(1); @@ -5945,19 +5889,19 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(664); + setState(657); columnExprList(); } - setState(667); + setState(660); match(HogQLParser::RBRACKET); break; } - case 21: { + case 20: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(668); + setState(661); columnIdentifier(); break; } @@ -5966,7 +5910,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { break; } _ctx->stop = _input->LT(-1); - setState(764); + setState(757); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 91, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { @@ -5974,7 +5918,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { if (!_parseListeners.empty()) triggerExitRuleEvent(); previousContext = _localctx; - setState(762); + setState(755); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 90, _ctx)) { case 1: { @@ -5982,26 +5926,26 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(671); + setState(664); if (!(precpred(_ctx, 17))) throw FailedPredicateException(this, "precpred(_ctx, 17)"); - setState(675); + setState(668); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::ASTERISK: { - setState(672); + setState(665); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::ASTERISK); break; } case HogQLParser::SLASH: { - setState(673); + setState(666); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::SLASH); break; } case HogQLParser::PERCENT: { - setState(674); + setState(667); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::PERCENT); break; } @@ -6009,7 +5953,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: throw NoViableAltException(this); } - setState(677); + setState(670); antlrcpp::downCast(_localctx)->right = columnExpr(18); break; } @@ -6019,26 +5963,26 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(678); + setState(671); if (!(precpred(_ctx, 16))) throw FailedPredicateException(this, "precpred(_ctx, 16)"); - setState(682); + setState(675); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::PLUS: { - setState(679); + setState(672); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::PLUS); break; } case HogQLParser::DASH: { - setState(680); + setState(673); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::DASH); break; } case HogQLParser::CONCAT: { - setState(681); + setState(674); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::CONCAT); break; } @@ -6046,7 +5990,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: throw NoViableAltException(this); } - setState(684); + setState(677); antlrcpp::downCast(_localctx)->right = columnExpr(17); break; } @@ -6056,86 +6000,86 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(685); + setState(678); if (!(precpred(_ctx, 15))) throw FailedPredicateException(this, "precpred(_ctx, 15)"); - setState(710); + setState(703); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 86, _ctx)) { case 1: { - setState(686); + setState(679); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::EQ_DOUBLE); break; } case 2: { - setState(687); + setState(680); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::EQ_SINGLE); break; } case 3: { - setState(688); + setState(681); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_EQ); break; } case 4: { - setState(689); + setState(682); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::LT_EQ); break; } case 5: { - setState(690); + setState(683); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::LT); break; } case 6: { - setState(691); + setState(684); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::GT_EQ); break; } case 7: { - setState(692); + setState(685); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::GT); break; } case 8: { - setState(694); + setState(687); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(693); + setState(686); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT); } - setState(696); + setState(689); match(HogQLParser::IN); - setState(698); + setState(691); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COHORT) { - setState(697); + setState(690); match(HogQLParser::COHORT); } break; } case 9: { - setState(701); + setState(694); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(700); + setState(693); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT); } - setState(703); + setState(696); _la = _input->LA(1); if (!(_la == HogQLParser::ILIKE @@ -6150,37 +6094,37 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { } case 10: { - setState(704); + setState(697); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::REGEX_SINGLE); break; } case 11: { - setState(705); + setState(698); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::REGEX_DOUBLE); break; } case 12: { - setState(706); + setState(699); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_REGEX); break; } case 13: { - setState(707); + setState(700); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::IREGEX_SINGLE); break; } case 14: { - setState(708); + setState(701); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::IREGEX_DOUBLE); break; } case 15: { - setState(709); + setState(702); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_IREGEX); break; } @@ -6188,7 +6132,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(712); + setState(705); antlrcpp::downCast(_localctx)->right = columnExpr(16); break; } @@ -6197,12 +6141,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(713); + setState(706); if (!(precpred(_ctx, 13))) throw FailedPredicateException(this, "precpred(_ctx, 13)"); - setState(714); + setState(707); match(HogQLParser::NULLISH); - setState(715); + setState(708); columnExpr(14); break; } @@ -6211,12 +6155,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(716); + setState(709); if (!(precpred(_ctx, 11))) throw FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(717); + setState(710); match(HogQLParser::AND); - setState(718); + setState(711); columnExpr(12); break; } @@ -6225,12 +6169,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(719); + setState(712); if (!(precpred(_ctx, 10))) throw FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(720); + setState(713); match(HogQLParser::OR); - setState(721); + setState(714); columnExpr(11); break; } @@ -6239,24 +6183,24 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(722); + setState(715); if (!(precpred(_ctx, 9))) throw FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(724); + setState(717); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(723); + setState(716); match(HogQLParser::NOT); } - setState(726); + setState(719); match(HogQLParser::BETWEEN); - setState(727); + setState(720); columnExpr(0); - setState(728); + setState(721); match(HogQLParser::AND); - setState(729); + setState(722); columnExpr(10); break; } @@ -6265,16 +6209,16 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(731); + setState(724); if (!(precpred(_ctx, 8))) throw FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(732); + setState(725); match(HogQLParser::QUERY); - setState(733); + setState(726); columnExpr(0); - setState(734); + setState(727); match(HogQLParser::COLON); - setState(735); + setState(728); columnExpr(8); break; } @@ -6283,14 +6227,14 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(737); + setState(730); if (!(precpred(_ctx, 21))) throw FailedPredicateException(this, "precpred(_ctx, 21)"); - setState(738); + setState(731); match(HogQLParser::LBRACKET); - setState(739); + setState(732); columnExpr(0); - setState(740); + setState(733); match(HogQLParser::RBRACKET); break; } @@ -6299,12 +6243,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(742); + setState(735); if (!(precpred(_ctx, 20))) throw FailedPredicateException(this, "precpred(_ctx, 20)"); - setState(743); + setState(736); match(HogQLParser::DOT); - setState(744); + setState(737); match(HogQLParser::DECIMAL_LITERAL); break; } @@ -6313,12 +6257,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(745); + setState(738); if (!(precpred(_ctx, 19))) throw FailedPredicateException(this, "precpred(_ctx, 19)"); - setState(746); + setState(739); match(HogQLParser::DOT); - setState(747); + setState(740); identifier(); break; } @@ -6327,20 +6271,20 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(748); + setState(741); if (!(precpred(_ctx, 14))) throw FailedPredicateException(this, "precpred(_ctx, 14)"); - setState(749); + setState(742); match(HogQLParser::IS); - setState(751); + setState(744); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(750); + setState(743); match(HogQLParser::NOT); } - setState(753); + setState(746); match(HogQLParser::NULL_SQL); break; } @@ -6349,30 +6293,30 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(754); + setState(747); if (!(precpred(_ctx, 7))) throw FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(760); + setState(753); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 89, _ctx)) { case 1: { - setState(755); + setState(748); alias(); break; } case 2: { - setState(756); + setState(749); match(HogQLParser::AS); - setState(757); + setState(750); identifier(); break; } case 3: { - setState(758); + setState(751); match(HogQLParser::AS); - setState(759); + setState(752); match(HogQLParser::STRING_LITERAL); break; } @@ -6387,7 +6331,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { break; } } - setState(766); + setState(759); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 91, _ctx); } @@ -6449,17 +6393,17 @@ HogQLParser::ColumnArgListContext* HogQLParser::columnArgList() { }); try { enterOuterAlt(_localctx, 1); - setState(767); + setState(760); columnArgExpr(); - setState(772); + setState(765); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(768); + setState(761); match(HogQLParser::COMMA); - setState(769); + setState(762); columnArgExpr(); - setState(774); + setState(767); _errHandler->sync(this); _la = _input->LA(1); } @@ -6513,19 +6457,19 @@ HogQLParser::ColumnArgExprContext* HogQLParser::columnArgExpr() { exitRule(); }); try { - setState(777); + setState(770); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 93, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(775); + setState(768); columnLambdaExpr(); break; } case 2: { enterOuterAlt(_localctx, 2); - setState(776); + setState(769); columnExpr(0); break; } @@ -6609,27 +6553,27 @@ HogQLParser::ColumnLambdaExprContext* HogQLParser::columnLambdaExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(798); + setState(791); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LPAREN: { - setState(779); + setState(772); match(HogQLParser::LPAREN); - setState(780); + setState(773); identifier(); - setState(785); + setState(778); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(781); + setState(774); match(HogQLParser::COMMA); - setState(782); + setState(775); identifier(); - setState(787); + setState(780); _errHandler->sync(this); _la = _input->LA(1); } - setState(788); + setState(781); match(HogQLParser::RPAREN); break; } @@ -6821,17 +6765,17 @@ HogQLParser::ColumnLambdaExprContext* HogQLParser::columnLambdaExpr() { case HogQLParser::JSON_FALSE: case HogQLParser::JSON_TRUE: case HogQLParser::IDENTIFIER: { - setState(790); + setState(783); identifier(); - setState(795); + setState(788); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(791); + setState(784); match(HogQLParser::COMMA); - setState(792); + setState(785); identifier(); - setState(797); + setState(790); _errHandler->sync(this); _la = _input->LA(1); } @@ -6841,9 +6785,9 @@ HogQLParser::ColumnLambdaExprContext* HogQLParser::columnLambdaExpr() { default: throw NoViableAltException(this); } - setState(800); + setState(793); match(HogQLParser::ARROW); - setState(801); + setState(794); columnExpr(0); } @@ -6970,17 +6914,17 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { exitRule(); }); try { - setState(831); + setState(824); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 100, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(803); + setState(796); match(HogQLParser::LT); - setState(804); + setState(797); identifier(); - setState(808); + setState(801); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && @@ -6989,15 +6933,15 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { ((1ULL << (_la - 128)) & -9) != 0) || _la == HogQLParser::JSON_TRUE || _la == HogQLParser::IDENTIFIER) { - setState(805); + setState(798); hogqlxTagAttribute(); - setState(810); + setState(803); _errHandler->sync(this); _la = _input->LA(1); } - setState(811); + setState(804); match(HogQLParser::SLASH); - setState(812); + setState(805); match(HogQLParser::GT); break; } @@ -7005,11 +6949,11 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(814); + setState(807); match(HogQLParser::LT); - setState(815); + setState(808); identifier(); - setState(819); + setState(812); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && @@ -7018,20 +6962,20 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { ((1ULL << (_la - 128)) & -9) != 0) || _la == HogQLParser::JSON_TRUE || _la == HogQLParser::IDENTIFIER) { - setState(816); + setState(809); hogqlxTagAttribute(); - setState(821); + setState(814); _errHandler->sync(this); _la = _input->LA(1); } - setState(822); + setState(815); match(HogQLParser::GT); - setState(824); + setState(817); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 99, _ctx)) { case 1: { - setState(823); + setState(816); hogqlxTagElement(); break; } @@ -7039,13 +6983,13 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { default: break; } - setState(826); + setState(819); match(HogQLParser::LT); - setState(827); + setState(820); match(HogQLParser::SLASH); - setState(828); + setState(821); identifier(); - setState(829); + setState(822); match(HogQLParser::GT); break; } @@ -7119,38 +7063,38 @@ HogQLParser::HogqlxTagAttributeContext* HogQLParser::hogqlxTagAttribute() { exitRule(); }); try { - setState(844); + setState(837); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 101, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(833); + setState(826); identifier(); - setState(834); + setState(827); match(HogQLParser::EQ_SINGLE); - setState(835); + setState(828); match(HogQLParser::STRING_LITERAL); break; } case 2: { enterOuterAlt(_localctx, 2); - setState(837); + setState(830); identifier(); - setState(838); + setState(831); match(HogQLParser::EQ_SINGLE); - setState(839); + setState(832); match(HogQLParser::LBRACE); - setState(840); + setState(833); columnExpr(0); - setState(841); + setState(834); match(HogQLParser::RBRACE); break; } case 3: { enterOuterAlt(_localctx, 3); - setState(843); + setState(836); identifier(); break; } @@ -7218,17 +7162,17 @@ HogQLParser::WithExprListContext* HogQLParser::withExprList() { }); try { enterOuterAlt(_localctx, 1); - setState(846); + setState(839); withExpr(); - setState(851); + setState(844); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(847); + setState(840); match(HogQLParser::COMMA); - setState(848); + setState(841); withExpr(); - setState(853); + setState(846); _errHandler->sync(this); _la = _input->LA(1); } @@ -7324,21 +7268,21 @@ HogQLParser::WithExprContext* HogQLParser::withExpr() { exitRule(); }); try { - setState(864); + setState(857); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 103, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(854); + setState(847); identifier(); - setState(855); + setState(848); match(HogQLParser::AS); - setState(856); + setState(849); match(HogQLParser::LPAREN); - setState(857); + setState(850); selectUnionStmt(); - setState(858); + setState(851); match(HogQLParser::RPAREN); break; } @@ -7346,11 +7290,11 @@ HogQLParser::WithExprContext* HogQLParser::withExpr() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(860); + setState(853); columnExpr(0); - setState(861); + setState(854); match(HogQLParser::AS); - setState(862); + setState(855); identifier(); break; } @@ -7416,12 +7360,12 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { exitRule(); }); try { - setState(873); + setState(866); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LBRACE: { enterOuterAlt(_localctx, 1); - setState(866); + setState(859); placeholder(); break; } @@ -7614,14 +7558,14 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { case HogQLParser::JSON_TRUE: case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 2); - setState(870); + setState(863); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 104, _ctx)) { case 1: { - setState(867); + setState(860); tableIdentifier(); - setState(868); + setState(861); match(HogQLParser::DOT); break; } @@ -7629,7 +7573,7 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { default: break; } - setState(872); + setState(865); nestedIdentifier(); break; } @@ -7697,19 +7641,19 @@ HogQLParser::NestedIdentifierContext* HogQLParser::nestedIdentifier() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(875); + setState(868); identifier(); - setState(880); + setState(873); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 106, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(876); + setState(869); match(HogQLParser::DOT); - setState(877); + setState(870); identifier(); } - setState(882); + setState(875); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 106, _ctx); } @@ -7875,7 +7819,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(892); + setState(885); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 107, _ctx)) { case 1: { @@ -7883,7 +7827,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _ctx = _localctx; previousContext = _localctx; - setState(884); + setState(877); tableIdentifier(); break; } @@ -7892,7 +7836,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(885); + setState(878); tableFunctionExpr(); break; } @@ -7901,11 +7845,11 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(886); + setState(879); match(HogQLParser::LPAREN); - setState(887); + setState(880); selectUnionStmt(); - setState(888); + setState(881); match(HogQLParser::RPAREN); break; } @@ -7914,7 +7858,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(890); + setState(883); hogqlxTagElement(); break; } @@ -7923,7 +7867,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(891); + setState(884); placeholder(); break; } @@ -7932,7 +7876,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { break; } _ctx->stop = _input->LT(-1); - setState(902); + setState(895); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 109, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { @@ -7943,10 +7887,10 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleTableExpr); - setState(894); + setState(887); if (!(precpred(_ctx, 3))) throw FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(898); + setState(891); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::DATE: @@ -7954,15 +7898,15 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { case HogQLParser::ID: case HogQLParser::KEY: case HogQLParser::IDENTIFIER: { - setState(895); + setState(888); alias(); break; } case HogQLParser::AS: { - setState(896); + setState(889); match(HogQLParser::AS); - setState(897); + setState(890); identifier(); break; } @@ -7971,7 +7915,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { throw NoViableAltException(this); } } - setState(904); + setState(897); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 109, _ctx); } @@ -8033,11 +7977,11 @@ HogQLParser::TableFunctionExprContext* HogQLParser::tableFunctionExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(905); + setState(898); identifier(); - setState(906); + setState(899); match(HogQLParser::LPAREN); - setState(908); + setState(901); _errHandler->sync(this); _la = _input->LA(1); @@ -8046,10 +7990,10 @@ HogQLParser::TableFunctionExprContext* HogQLParser::tableFunctionExpr() { ((1ULL << (_la - 64)) & -1) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & -9) != 0) || ((((_la - 192) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 192)) & 70263309817) != 0)) { - setState(907); + setState(900); tableArgList(); } - setState(910); + setState(903); match(HogQLParser::RPAREN); } @@ -8106,14 +8050,14 @@ HogQLParser::TableIdentifierContext* HogQLParser::tableIdentifier() { }); try { enterOuterAlt(_localctx, 1); - setState(915); + setState(908); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 111, _ctx)) { case 1: { - setState(912); + setState(905); databaseIdentifier(); - setState(913); + setState(906); match(HogQLParser::DOT); break; } @@ -8121,7 +8065,7 @@ HogQLParser::TableIdentifierContext* HogQLParser::tableIdentifier() { default: break; } - setState(917); + setState(910); identifier(); } @@ -8183,17 +8127,17 @@ HogQLParser::TableArgListContext* HogQLParser::tableArgList() { }); try { enterOuterAlt(_localctx, 1); - setState(919); + setState(912); columnExpr(0); - setState(924); + setState(917); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(920); + setState(913); match(HogQLParser::COMMA); - setState(921); + setState(914); columnExpr(0); - setState(926); + setState(919); _errHandler->sync(this); _la = _input->LA(1); } @@ -8244,7 +8188,7 @@ HogQLParser::DatabaseIdentifierContext* HogQLParser::databaseIdentifier() { }); try { enterOuterAlt(_localctx, 1); - setState(927); + setState(920); identifier(); } @@ -8309,21 +8253,21 @@ HogQLParser::FloatingLiteralContext* HogQLParser::floatingLiteral() { exitRule(); }); try { - setState(937); + setState(930); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::FLOATING_LITERAL: { enterOuterAlt(_localctx, 1); - setState(929); + setState(922); match(HogQLParser::FLOATING_LITERAL); break; } case HogQLParser::DOT: { enterOuterAlt(_localctx, 2); - setState(930); + setState(923); match(HogQLParser::DOT); - setState(931); + setState(924); _la = _input->LA(1); if (!(_la == HogQLParser::OCTAL_LITERAL @@ -8339,16 +8283,16 @@ HogQLParser::FloatingLiteralContext* HogQLParser::floatingLiteral() { case HogQLParser::DECIMAL_LITERAL: { enterOuterAlt(_localctx, 3); - setState(932); + setState(925); match(HogQLParser::DECIMAL_LITERAL); - setState(933); + setState(926); match(HogQLParser::DOT); - setState(935); + setState(928); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 113, _ctx)) { case 1: { - setState(934); + setState(927); _la = _input->LA(1); if (!(_la == HogQLParser::OCTAL_LITERAL @@ -8447,14 +8391,14 @@ HogQLParser::NumberLiteralContext* HogQLParser::numberLiteral() { }); try { enterOuterAlt(_localctx, 1); - setState(940); + setState(933); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::DASH || _la == HogQLParser::PLUS) { - setState(939); + setState(932); _la = _input->LA(1); if (!(_la == HogQLParser::DASH @@ -8466,41 +8410,41 @@ HogQLParser::NumberLiteralContext* HogQLParser::numberLiteral() { consume(); } } - setState(948); + setState(941); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 116, _ctx)) { case 1: { - setState(942); + setState(935); floatingLiteral(); break; } case 2: { - setState(943); + setState(936); match(HogQLParser::OCTAL_LITERAL); break; } case 3: { - setState(944); + setState(937); match(HogQLParser::DECIMAL_LITERAL); break; } case 4: { - setState(945); + setState(938); match(HogQLParser::HEXADECIMAL_LITERAL); break; } case 5: { - setState(946); + setState(939); match(HogQLParser::INF); break; } case 6: { - setState(947); + setState(940); match(HogQLParser::NAN_SQL); break; } @@ -8562,7 +8506,7 @@ HogQLParser::LiteralContext* HogQLParser::literal() { exitRule(); }); try { - setState(953); + setState(946); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::INF: @@ -8575,21 +8519,21 @@ HogQLParser::LiteralContext* HogQLParser::literal() { case HogQLParser::DOT: case HogQLParser::PLUS: { enterOuterAlt(_localctx, 1); - setState(950); + setState(943); numberLiteral(); break; } case HogQLParser::STRING_LITERAL: { enterOuterAlt(_localctx, 2); - setState(951); + setState(944); match(HogQLParser::STRING_LITERAL); break; } case HogQLParser::NULL_SQL: { enterOuterAlt(_localctx, 3); - setState(952); + setState(945); match(HogQLParser::NULL_SQL); break; } @@ -8673,7 +8617,7 @@ HogQLParser::IntervalContext* HogQLParser::interval() { }); try { enterOuterAlt(_localctx, 1); - setState(955); + setState(948); _la = _input->LA(1); if (!(_la == HogQLParser::DAY || ((((_la - 76) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 76)) & 72057615512764417) != 0) || ((((_la - 145) & ~ 0x3fULL) == 0) && @@ -9440,7 +9384,7 @@ HogQLParser::KeywordContext* HogQLParser::keyword() { }); try { enterOuterAlt(_localctx, 1); - setState(957); + setState(950); _la = _input->LA(1); if (!(((((_la - 2) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 2)) & -34368126977) != 0) || ((((_la - 66) & ~ 0x3fULL) == 0) && @@ -9512,7 +9456,7 @@ HogQLParser::KeywordForAliasContext* HogQLParser::keywordForAlias() { }); try { enterOuterAlt(_localctx, 1); - setState(959); + setState(952); _la = _input->LA(1); if (!(((((_la - 36) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 36)) & 36030996109328385) != 0))) { @@ -9572,12 +9516,12 @@ HogQLParser::AliasContext* HogQLParser::alias() { exitRule(); }); try { - setState(963); + setState(956); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 1); - setState(961); + setState(954); match(HogQLParser::IDENTIFIER); break; } @@ -9587,7 +9531,7 @@ HogQLParser::AliasContext* HogQLParser::alias() { case HogQLParser::ID: case HogQLParser::KEY: { enterOuterAlt(_localctx, 2); - setState(962); + setState(955); keywordForAlias(); break; } @@ -9649,12 +9593,12 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { exitRule(); }); try { - setState(968); + setState(961); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 1); - setState(965); + setState(958); match(HogQLParser::IDENTIFIER); break; } @@ -9668,7 +9612,7 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { case HogQLParser::WEEK: case HogQLParser::YEAR: { enterOuterAlt(_localctx, 2); - setState(966); + setState(959); interval(); break; } @@ -9852,7 +9796,7 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { case HogQLParser::JSON_FALSE: case HogQLParser::JSON_TRUE: { enterOuterAlt(_localctx, 3); - setState(967); + setState(960); keyword(); break; } @@ -9915,11 +9859,11 @@ HogQLParser::EnumValueContext* HogQLParser::enumValue() { }); try { enterOuterAlt(_localctx, 1); - setState(970); + setState(963); match(HogQLParser::STRING_LITERAL); - setState(971); + setState(964); match(HogQLParser::EQ_SINGLE); - setState(972); + setState(965); numberLiteral(); } @@ -9976,11 +9920,11 @@ HogQLParser::PlaceholderContext* HogQLParser::placeholder() { }); try { enterOuterAlt(_localctx, 1); - setState(974); + setState(967); match(HogQLParser::LBRACE); - setState(975); + setState(968); identifier(); - setState(976); + setState(969); match(HogQLParser::RBRACE); } diff --git a/hogql_parser/HogQLParser.h b/hogql_parser/HogQLParser.h index 49c38fad2be89..058a3332af50c 100644 --- a/hogql_parser/HogQLParser.h +++ b/hogql_parser/HogQLParser.h @@ -988,20 +988,6 @@ class HogQLParser : public antlr4::Parser { virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; }; - class ColumnExprExtractContext : public ColumnExprContext { - public: - ColumnExprExtractContext(ColumnExprContext *ctx); - - antlr4::tree::TerminalNode *EXTRACT(); - antlr4::tree::TerminalNode *LPAREN(); - IntervalContext *interval(); - antlr4::tree::TerminalNode *FROM(); - ColumnExprContext *columnExpr(); - antlr4::tree::TerminalNode *RPAREN(); - - virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; - }; - class ColumnExprNegateContext : public ColumnExprContext { public: ColumnExprNegateContext(ColumnExprContext *ctx); diff --git a/hogql_parser/HogQLParser.interp b/hogql_parser/HogQLParser.interp index 2b24b72a2819d..b159bc05eb424 100644 --- a/hogql_parser/HogQLParser.interp +++ b/hogql_parser/HogQLParser.interp @@ -554,4 +554,4 @@ placeholder atn: -[4, 1, 242, 979, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 3, 0, 128, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 136, 8, 1, 10, 1, 12, 1, 139, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 147, 8, 2, 1, 3, 3, 3, 150, 8, 3, 1, 3, 1, 3, 3, 3, 154, 8, 3, 1, 3, 3, 3, 157, 8, 3, 1, 3, 1, 3, 3, 3, 161, 8, 3, 1, 3, 3, 3, 164, 8, 3, 1, 3, 3, 3, 167, 8, 3, 1, 3, 3, 3, 170, 8, 3, 1, 3, 3, 3, 173, 8, 3, 1, 3, 1, 3, 3, 3, 177, 8, 3, 1, 3, 1, 3, 3, 3, 181, 8, 3, 1, 3, 3, 3, 184, 8, 3, 1, 3, 3, 3, 187, 8, 3, 1, 3, 3, 3, 190, 8, 3, 1, 3, 1, 3, 3, 3, 194, 8, 3, 1, 3, 3, 3, 197, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 206, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 3, 7, 212, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 231, 8, 8, 10, 8, 12, 8, 234, 9, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 250, 8, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 267, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 273, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 290, 8, 15, 3, 15, 292, 8, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 3, 18, 303, 8, 18, 1, 18, 3, 18, 306, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 312, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 320, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 326, 8, 18, 10, 18, 12, 18, 329, 9, 18, 1, 19, 3, 19, 332, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 337, 8, 19, 1, 19, 3, 19, 340, 8, 19, 1, 19, 3, 19, 343, 8, 19, 1, 19, 1, 19, 3, 19, 347, 8, 19, 1, 19, 1, 19, 3, 19, 351, 8, 19, 1, 19, 3, 19, 354, 8, 19, 3, 19, 356, 8, 19, 1, 19, 3, 19, 359, 8, 19, 1, 19, 1, 19, 3, 19, 363, 8, 19, 1, 19, 1, 19, 3, 19, 367, 8, 19, 1, 19, 3, 19, 370, 8, 19, 3, 19, 372, 8, 19, 3, 19, 374, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 379, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 390, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 396, 8, 22, 1, 23, 1, 23, 1, 23, 5, 23, 401, 8, 23, 10, 23, 12, 23, 404, 9, 23, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 422, 8, 25, 3, 25, 424, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 429, 8, 26, 10, 26, 12, 26, 432, 9, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 439, 8, 28, 1, 28, 3, 28, 442, 8, 28, 1, 28, 3, 28, 445, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 478, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 492, 8, 35, 10, 35, 12, 35, 495, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 504, 8, 35, 10, 35, 12, 35, 507, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 516, 8, 35, 10, 35, 12, 35, 519, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 526, 8, 35, 1, 35, 1, 35, 3, 35, 530, 8, 35, 1, 36, 1, 36, 1, 36, 5, 36, 535, 8, 36, 10, 36, 12, 36, 538, 9, 36, 1, 37, 1, 37, 1, 37, 3, 37, 543, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 550, 8, 37, 11, 37, 12, 37, 551, 1, 37, 1, 37, 3, 37, 556, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 604, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 616, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 626, 8, 37, 1, 37, 3, 37, 629, 8, 37, 1, 37, 1, 37, 3, 37, 633, 8, 37, 1, 37, 3, 37, 636, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 649, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 666, 8, 37, 1, 37, 1, 37, 3, 37, 670, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 676, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 683, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 695, 8, 37, 1, 37, 1, 37, 3, 37, 699, 8, 37, 1, 37, 3, 37, 702, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 711, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 725, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 752, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 761, 8, 37, 5, 37, 763, 8, 37, 10, 37, 12, 37, 766, 9, 37, 1, 38, 1, 38, 1, 38, 5, 38, 771, 8, 38, 10, 38, 12, 38, 774, 9, 38, 1, 39, 1, 39, 3, 39, 778, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 784, 8, 40, 10, 40, 12, 40, 787, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 794, 8, 40, 10, 40, 12, 40, 797, 9, 40, 3, 40, 799, 8, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 807, 8, 41, 10, 41, 12, 41, 810, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 818, 8, 41, 10, 41, 12, 41, 821, 9, 41, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 832, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 845, 8, 42, 1, 43, 1, 43, 1, 43, 5, 43, 850, 8, 43, 10, 43, 12, 43, 853, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 865, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 871, 8, 45, 1, 45, 3, 45, 874, 8, 45, 1, 46, 1, 46, 1, 46, 5, 46, 879, 8, 46, 10, 46, 12, 46, 882, 9, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 893, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 899, 8, 47, 5, 47, 901, 8, 47, 10, 47, 12, 47, 904, 9, 47, 1, 48, 1, 48, 1, 48, 3, 48, 909, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 916, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 5, 50, 923, 8, 50, 10, 50, 12, 50, 926, 9, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 936, 8, 52, 3, 52, 938, 8, 52, 1, 53, 3, 53, 941, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 949, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 954, 8, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 3, 58, 964, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 969, 8, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 0, 3, 36, 74, 94, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 16, 2, 0, 32, 32, 141, 141, 2, 0, 84, 84, 96, 96, 3, 0, 4, 4, 8, 8, 12, 12, 4, 0, 4, 4, 7, 8, 12, 12, 147, 147, 2, 0, 96, 96, 140, 140, 2, 0, 4, 4, 8, 8, 2, 0, 11, 11, 42, 43, 2, 0, 62, 62, 93, 93, 2, 0, 133, 133, 143, 143, 3, 0, 17, 17, 95, 95, 170, 170, 2, 0, 79, 79, 98, 98, 1, 0, 197, 198, 2, 0, 208, 208, 228, 228, 8, 0, 37, 37, 76, 76, 108, 108, 110, 110, 132, 132, 145, 145, 185, 185, 190, 190, 13, 0, 2, 24, 26, 36, 38, 75, 77, 81, 83, 107, 109, 109, 111, 112, 114, 115, 117, 130, 133, 144, 146, 184, 186, 189, 191, 192, 4, 0, 36, 36, 62, 62, 77, 77, 91, 91, 1107, 0, 127, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 146, 1, 0, 0, 0, 6, 149, 1, 0, 0, 0, 8, 198, 1, 0, 0, 0, 10, 201, 1, 0, 0, 0, 12, 207, 1, 0, 0, 0, 14, 211, 1, 0, 0, 0, 16, 217, 1, 0, 0, 0, 18, 235, 1, 0, 0, 0, 20, 238, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 251, 1, 0, 0, 0, 26, 254, 1, 0, 0, 0, 28, 258, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 293, 1, 0, 0, 0, 34, 296, 1, 0, 0, 0, 36, 311, 1, 0, 0, 0, 38, 373, 1, 0, 0, 0, 40, 378, 1, 0, 0, 0, 42, 389, 1, 0, 0, 0, 44, 391, 1, 0, 0, 0, 46, 397, 1, 0, 0, 0, 48, 405, 1, 0, 0, 0, 50, 423, 1, 0, 0, 0, 52, 425, 1, 0, 0, 0, 54, 433, 1, 0, 0, 0, 56, 438, 1, 0, 0, 0, 58, 446, 1, 0, 0, 0, 60, 450, 1, 0, 0, 0, 62, 454, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 477, 1, 0, 0, 0, 68, 479, 1, 0, 0, 0, 70, 529, 1, 0, 0, 0, 72, 531, 1, 0, 0, 0, 74, 669, 1, 0, 0, 0, 76, 767, 1, 0, 0, 0, 78, 777, 1, 0, 0, 0, 80, 798, 1, 0, 0, 0, 82, 831, 1, 0, 0, 0, 84, 844, 1, 0, 0, 0, 86, 846, 1, 0, 0, 0, 88, 864, 1, 0, 0, 0, 90, 873, 1, 0, 0, 0, 92, 875, 1, 0, 0, 0, 94, 892, 1, 0, 0, 0, 96, 905, 1, 0, 0, 0, 98, 915, 1, 0, 0, 0, 100, 919, 1, 0, 0, 0, 102, 927, 1, 0, 0, 0, 104, 937, 1, 0, 0, 0, 106, 940, 1, 0, 0, 0, 108, 953, 1, 0, 0, 0, 110, 955, 1, 0, 0, 0, 112, 957, 1, 0, 0, 0, 114, 959, 1, 0, 0, 0, 116, 963, 1, 0, 0, 0, 118, 968, 1, 0, 0, 0, 120, 970, 1, 0, 0, 0, 122, 974, 1, 0, 0, 0, 124, 128, 3, 2, 1, 0, 125, 128, 3, 6, 3, 0, 126, 128, 3, 82, 41, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 137, 3, 4, 2, 0, 132, 133, 5, 176, 0, 0, 133, 134, 5, 4, 0, 0, 134, 136, 3, 4, 2, 0, 135, 132, 1, 0, 0, 0, 136, 139, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 3, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 147, 3, 6, 3, 0, 141, 142, 5, 220, 0, 0, 142, 143, 3, 2, 1, 0, 143, 144, 5, 236, 0, 0, 144, 147, 1, 0, 0, 0, 145, 147, 3, 122, 61, 0, 146, 140, 1, 0, 0, 0, 146, 141, 1, 0, 0, 0, 146, 145, 1, 0, 0, 0, 147, 5, 1, 0, 0, 0, 148, 150, 3, 8, 4, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 153, 5, 146, 0, 0, 152, 154, 5, 49, 0, 0, 153, 152, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 156, 1, 0, 0, 0, 155, 157, 3, 10, 5, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 3, 72, 36, 0, 159, 161, 3, 12, 6, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 163, 1, 0, 0, 0, 162, 164, 3, 14, 7, 0, 163, 162, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 167, 3, 18, 9, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 170, 3, 20, 10, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 172, 1, 0, 0, 0, 171, 173, 3, 22, 11, 0, 172, 171, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 175, 5, 189, 0, 0, 175, 177, 7, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 179, 5, 189, 0, 0, 179, 181, 5, 169, 0, 0, 180, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 183, 1, 0, 0, 0, 182, 184, 3, 24, 12, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 187, 3, 16, 8, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 189, 1, 0, 0, 0, 188, 190, 3, 26, 13, 0, 189, 188, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, 194, 3, 30, 15, 0, 192, 194, 3, 32, 16, 0, 193, 191, 1, 0, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 197, 3, 34, 17, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 7, 1, 0, 0, 0, 198, 199, 5, 189, 0, 0, 199, 200, 3, 86, 43, 0, 200, 9, 1, 0, 0, 0, 201, 202, 5, 168, 0, 0, 202, 205, 5, 198, 0, 0, 203, 204, 5, 189, 0, 0, 204, 206, 5, 164, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 11, 1, 0, 0, 0, 207, 208, 5, 68, 0, 0, 208, 209, 3, 36, 18, 0, 209, 13, 1, 0, 0, 0, 210, 212, 7, 1, 0, 0, 211, 210, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 9, 0, 0, 214, 215, 5, 90, 0, 0, 215, 216, 3, 72, 36, 0, 216, 15, 1, 0, 0, 0, 217, 218, 5, 188, 0, 0, 218, 219, 3, 118, 59, 0, 219, 220, 5, 10, 0, 0, 220, 221, 5, 220, 0, 0, 221, 222, 3, 56, 28, 0, 222, 232, 5, 236, 0, 0, 223, 224, 5, 206, 0, 0, 224, 225, 3, 118, 59, 0, 225, 226, 5, 10, 0, 0, 226, 227, 5, 220, 0, 0, 227, 228, 3, 56, 28, 0, 228, 229, 5, 236, 0, 0, 229, 231, 1, 0, 0, 0, 230, 223, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 17, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 235, 236, 5, 129, 0, 0, 236, 237, 3, 74, 37, 0, 237, 19, 1, 0, 0, 0, 238, 239, 5, 187, 0, 0, 239, 240, 3, 74, 37, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 73, 0, 0, 242, 249, 5, 18, 0, 0, 243, 244, 7, 0, 0, 0, 244, 245, 5, 220, 0, 0, 245, 246, 3, 72, 36, 0, 246, 247, 5, 236, 0, 0, 247, 250, 1, 0, 0, 0, 248, 250, 3, 72, 36, 0, 249, 243, 1, 0, 0, 0, 249, 248, 1, 0, 0, 0, 250, 23, 1, 0, 0, 0, 251, 252, 5, 74, 0, 0, 252, 253, 3, 74, 37, 0, 253, 25, 1, 0, 0, 0, 254, 255, 5, 122, 0, 0, 255, 256, 5, 18, 0, 0, 256, 257, 3, 46, 23, 0, 257, 27, 1, 0, 0, 0, 258, 259, 5, 122, 0, 0, 259, 260, 5, 18, 0, 0, 260, 261, 3, 72, 36, 0, 261, 29, 1, 0, 0, 0, 262, 263, 5, 99, 0, 0, 263, 266, 3, 74, 37, 0, 264, 265, 5, 206, 0, 0, 265, 267, 3, 74, 37, 0, 266, 264, 1, 0, 0, 0, 266, 267, 1, 0, 0, 0, 267, 272, 1, 0, 0, 0, 268, 269, 5, 189, 0, 0, 269, 273, 5, 164, 0, 0, 270, 271, 5, 18, 0, 0, 271, 273, 3, 72, 36, 0, 272, 268, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 292, 1, 0, 0, 0, 274, 275, 5, 99, 0, 0, 275, 278, 3, 74, 37, 0, 276, 277, 5, 189, 0, 0, 277, 279, 5, 164, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 118, 0, 0, 281, 282, 3, 74, 37, 0, 282, 292, 1, 0, 0, 0, 283, 284, 5, 99, 0, 0, 284, 285, 3, 74, 37, 0, 285, 286, 5, 118, 0, 0, 286, 289, 3, 74, 37, 0, 287, 288, 5, 18, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 292, 1, 0, 0, 0, 291, 262, 1, 0, 0, 0, 291, 274, 1, 0, 0, 0, 291, 283, 1, 0, 0, 0, 292, 31, 1, 0, 0, 0, 293, 294, 5, 118, 0, 0, 294, 295, 3, 74, 37, 0, 295, 33, 1, 0, 0, 0, 296, 297, 5, 150, 0, 0, 297, 298, 3, 52, 26, 0, 298, 35, 1, 0, 0, 0, 299, 300, 6, 18, -1, 0, 300, 302, 3, 94, 47, 0, 301, 303, 5, 61, 0, 0, 302, 301, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 305, 1, 0, 0, 0, 304, 306, 3, 44, 22, 0, 305, 304, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 312, 1, 0, 0, 0, 307, 308, 5, 220, 0, 0, 308, 309, 3, 36, 18, 0, 309, 310, 5, 236, 0, 0, 310, 312, 1, 0, 0, 0, 311, 299, 1, 0, 0, 0, 311, 307, 1, 0, 0, 0, 312, 327, 1, 0, 0, 0, 313, 314, 10, 3, 0, 0, 314, 315, 3, 40, 20, 0, 315, 316, 3, 36, 18, 4, 316, 326, 1, 0, 0, 0, 317, 319, 10, 4, 0, 0, 318, 320, 3, 38, 19, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 90, 0, 0, 322, 323, 3, 36, 18, 0, 323, 324, 3, 42, 21, 0, 324, 326, 1, 0, 0, 0, 325, 313, 1, 0, 0, 0, 325, 317, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 37, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 332, 7, 2, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 340, 5, 84, 0, 0, 334, 336, 5, 84, 0, 0, 335, 337, 7, 2, 0, 0, 336, 335, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 340, 7, 2, 0, 0, 339, 331, 1, 0, 0, 0, 339, 334, 1, 0, 0, 0, 339, 338, 1, 0, 0, 0, 340, 374, 1, 0, 0, 0, 341, 343, 7, 3, 0, 0, 342, 341, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 346, 7, 4, 0, 0, 345, 347, 5, 123, 0, 0, 346, 345, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 356, 1, 0, 0, 0, 348, 350, 7, 4, 0, 0, 349, 351, 5, 123, 0, 0, 350, 349, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 354, 7, 3, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 342, 1, 0, 0, 0, 355, 348, 1, 0, 0, 0, 356, 374, 1, 0, 0, 0, 357, 359, 7, 5, 0, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 5, 69, 0, 0, 361, 363, 5, 123, 0, 0, 362, 361, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 372, 1, 0, 0, 0, 364, 366, 5, 69, 0, 0, 365, 367, 5, 123, 0, 0, 366, 365, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 370, 7, 5, 0, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 358, 1, 0, 0, 0, 371, 364, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 339, 1, 0, 0, 0, 373, 355, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 39, 1, 0, 0, 0, 375, 376, 5, 31, 0, 0, 376, 379, 5, 90, 0, 0, 377, 379, 5, 206, 0, 0, 378, 375, 1, 0, 0, 0, 378, 377, 1, 0, 0, 0, 379, 41, 1, 0, 0, 0, 380, 381, 5, 119, 0, 0, 381, 390, 3, 72, 36, 0, 382, 383, 5, 179, 0, 0, 383, 384, 5, 220, 0, 0, 384, 385, 3, 72, 36, 0, 385, 386, 5, 236, 0, 0, 386, 390, 1, 0, 0, 0, 387, 388, 5, 179, 0, 0, 388, 390, 3, 72, 36, 0, 389, 380, 1, 0, 0, 0, 389, 382, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 43, 1, 0, 0, 0, 391, 392, 5, 144, 0, 0, 392, 395, 3, 50, 25, 0, 393, 394, 5, 118, 0, 0, 394, 396, 3, 50, 25, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 45, 1, 0, 0, 0, 397, 402, 3, 48, 24, 0, 398, 399, 5, 206, 0, 0, 399, 401, 3, 48, 24, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 47, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 407, 3, 74, 37, 0, 406, 408, 7, 6, 0, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 117, 0, 0, 410, 412, 7, 7, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 26, 0, 0, 414, 416, 5, 200, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 49, 1, 0, 0, 0, 417, 424, 3, 122, 61, 0, 418, 421, 3, 106, 53, 0, 419, 420, 5, 238, 0, 0, 420, 422, 3, 106, 53, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 417, 1, 0, 0, 0, 423, 418, 1, 0, 0, 0, 424, 51, 1, 0, 0, 0, 425, 430, 3, 54, 27, 0, 426, 427, 5, 206, 0, 0, 427, 429, 3, 54, 27, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 53, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 3, 118, 59, 0, 434, 435, 5, 212, 0, 0, 435, 436, 3, 108, 54, 0, 436, 55, 1, 0, 0, 0, 437, 439, 3, 58, 29, 0, 438, 437, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0, 0, 0, 440, 442, 3, 60, 30, 0, 441, 440, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 445, 3, 62, 31, 0, 444, 443, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 57, 1, 0, 0, 0, 446, 447, 5, 126, 0, 0, 447, 448, 5, 18, 0, 0, 448, 449, 3, 72, 36, 0, 449, 59, 1, 0, 0, 0, 450, 451, 5, 122, 0, 0, 451, 452, 5, 18, 0, 0, 452, 453, 3, 46, 23, 0, 453, 61, 1, 0, 0, 0, 454, 455, 7, 8, 0, 0, 455, 456, 3, 64, 32, 0, 456, 63, 1, 0, 0, 0, 457, 464, 3, 66, 33, 0, 458, 459, 5, 16, 0, 0, 459, 460, 3, 66, 33, 0, 460, 461, 5, 6, 0, 0, 461, 462, 3, 66, 33, 0, 462, 464, 1, 0, 0, 0, 463, 457, 1, 0, 0, 0, 463, 458, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 466, 5, 33, 0, 0, 466, 478, 5, 142, 0, 0, 467, 468, 5, 175, 0, 0, 468, 478, 5, 128, 0, 0, 469, 470, 5, 175, 0, 0, 470, 478, 5, 64, 0, 0, 471, 472, 3, 106, 53, 0, 472, 473, 5, 128, 0, 0, 473, 478, 1, 0, 0, 0, 474, 475, 3, 106, 53, 0, 475, 476, 5, 64, 0, 0, 476, 478, 1, 0, 0, 0, 477, 465, 1, 0, 0, 0, 477, 467, 1, 0, 0, 0, 477, 469, 1, 0, 0, 0, 477, 471, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 478, 67, 1, 0, 0, 0, 479, 480, 3, 74, 37, 0, 480, 481, 5, 0, 0, 1, 481, 69, 1, 0, 0, 0, 482, 530, 3, 118, 59, 0, 483, 484, 3, 118, 59, 0, 484, 485, 5, 220, 0, 0, 485, 486, 3, 118, 59, 0, 486, 493, 3, 70, 35, 0, 487, 488, 5, 206, 0, 0, 488, 489, 3, 118, 59, 0, 489, 490, 3, 70, 35, 0, 490, 492, 1, 0, 0, 0, 491, 487, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 497, 5, 236, 0, 0, 497, 530, 1, 0, 0, 0, 498, 499, 3, 118, 59, 0, 499, 500, 5, 220, 0, 0, 500, 505, 3, 120, 60, 0, 501, 502, 5, 206, 0, 0, 502, 504, 3, 120, 60, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 5, 236, 0, 0, 509, 530, 1, 0, 0, 0, 510, 511, 3, 118, 59, 0, 511, 512, 5, 220, 0, 0, 512, 517, 3, 70, 35, 0, 513, 514, 5, 206, 0, 0, 514, 516, 3, 70, 35, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 520, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 5, 236, 0, 0, 521, 530, 1, 0, 0, 0, 522, 523, 3, 118, 59, 0, 523, 525, 5, 220, 0, 0, 524, 526, 3, 72, 36, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 5, 236, 0, 0, 528, 530, 1, 0, 0, 0, 529, 482, 1, 0, 0, 0, 529, 483, 1, 0, 0, 0, 529, 498, 1, 0, 0, 0, 529, 510, 1, 0, 0, 0, 529, 522, 1, 0, 0, 0, 530, 71, 1, 0, 0, 0, 531, 536, 3, 74, 37, 0, 532, 533, 5, 206, 0, 0, 533, 535, 3, 74, 37, 0, 534, 532, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 73, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 6, 37, -1, 0, 540, 542, 5, 19, 0, 0, 541, 543, 3, 74, 37, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 549, 1, 0, 0, 0, 544, 545, 5, 186, 0, 0, 545, 546, 3, 74, 37, 0, 546, 547, 5, 163, 0, 0, 547, 548, 3, 74, 37, 0, 548, 550, 1, 0, 0, 0, 549, 544, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 5, 52, 0, 0, 554, 556, 3, 74, 37, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 5, 53, 0, 0, 558, 670, 1, 0, 0, 0, 559, 560, 5, 20, 0, 0, 560, 561, 5, 220, 0, 0, 561, 562, 3, 74, 37, 0, 562, 563, 5, 10, 0, 0, 563, 564, 3, 70, 35, 0, 564, 565, 5, 236, 0, 0, 565, 670, 1, 0, 0, 0, 566, 567, 5, 36, 0, 0, 567, 670, 5, 200, 0, 0, 568, 569, 5, 59, 0, 0, 569, 570, 5, 220, 0, 0, 570, 571, 3, 110, 55, 0, 571, 572, 5, 68, 0, 0, 572, 573, 3, 74, 37, 0, 573, 574, 5, 236, 0, 0, 574, 670, 1, 0, 0, 0, 575, 576, 5, 86, 0, 0, 576, 577, 3, 74, 37, 0, 577, 578, 3, 110, 55, 0, 578, 670, 1, 0, 0, 0, 579, 580, 5, 155, 0, 0, 580, 581, 5, 220, 0, 0, 581, 582, 3, 74, 37, 0, 582, 583, 5, 68, 0, 0, 583, 586, 3, 74, 37, 0, 584, 585, 5, 65, 0, 0, 585, 587, 3, 74, 37, 0, 586, 584, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 5, 236, 0, 0, 589, 670, 1, 0, 0, 0, 590, 591, 5, 166, 0, 0, 591, 670, 5, 200, 0, 0, 592, 593, 5, 171, 0, 0, 593, 594, 5, 220, 0, 0, 594, 595, 7, 9, 0, 0, 595, 596, 5, 200, 0, 0, 596, 597, 5, 68, 0, 0, 597, 598, 3, 74, 37, 0, 598, 599, 5, 236, 0, 0, 599, 670, 1, 0, 0, 0, 600, 601, 3, 118, 59, 0, 601, 603, 5, 220, 0, 0, 602, 604, 3, 72, 36, 0, 603, 602, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 606, 5, 236, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 5, 125, 0, 0, 608, 609, 5, 220, 0, 0, 609, 610, 3, 56, 28, 0, 610, 611, 5, 236, 0, 0, 611, 670, 1, 0, 0, 0, 612, 613, 3, 118, 59, 0, 613, 615, 5, 220, 0, 0, 614, 616, 3, 72, 36, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 5, 236, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 5, 125, 0, 0, 620, 621, 3, 118, 59, 0, 621, 670, 1, 0, 0, 0, 622, 628, 3, 118, 59, 0, 623, 625, 5, 220, 0, 0, 624, 626, 3, 72, 36, 0, 625, 624, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 629, 5, 236, 0, 0, 628, 623, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 632, 5, 220, 0, 0, 631, 633, 5, 49, 0, 0, 632, 631, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 635, 1, 0, 0, 0, 634, 636, 3, 76, 38, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 638, 5, 236, 0, 0, 638, 670, 1, 0, 0, 0, 639, 670, 3, 82, 41, 0, 640, 670, 3, 108, 54, 0, 641, 642, 5, 208, 0, 0, 642, 670, 3, 74, 37, 18, 643, 644, 5, 115, 0, 0, 644, 670, 3, 74, 37, 12, 645, 646, 3, 98, 49, 0, 646, 647, 5, 210, 0, 0, 647, 649, 1, 0, 0, 0, 648, 645, 1, 0, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 670, 5, 202, 0, 0, 651, 652, 5, 220, 0, 0, 652, 653, 3, 2, 1, 0, 653, 654, 5, 236, 0, 0, 654, 670, 1, 0, 0, 0, 655, 656, 5, 220, 0, 0, 656, 657, 3, 74, 37, 0, 657, 658, 5, 236, 0, 0, 658, 670, 1, 0, 0, 0, 659, 660, 5, 220, 0, 0, 660, 661, 3, 72, 36, 0, 661, 662, 5, 236, 0, 0, 662, 670, 1, 0, 0, 0, 663, 665, 5, 219, 0, 0, 664, 666, 3, 72, 36, 0, 665, 664, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 670, 5, 235, 0, 0, 668, 670, 3, 90, 45, 0, 669, 539, 1, 0, 0, 0, 669, 559, 1, 0, 0, 0, 669, 566, 1, 0, 0, 0, 669, 568, 1, 0, 0, 0, 669, 575, 1, 0, 0, 0, 669, 579, 1, 0, 0, 0, 669, 590, 1, 0, 0, 0, 669, 592, 1, 0, 0, 0, 669, 600, 1, 0, 0, 0, 669, 612, 1, 0, 0, 0, 669, 622, 1, 0, 0, 0, 669, 639, 1, 0, 0, 0, 669, 640, 1, 0, 0, 0, 669, 641, 1, 0, 0, 0, 669, 643, 1, 0, 0, 0, 669, 648, 1, 0, 0, 0, 669, 651, 1, 0, 0, 0, 669, 655, 1, 0, 0, 0, 669, 659, 1, 0, 0, 0, 669, 663, 1, 0, 0, 0, 669, 668, 1, 0, 0, 0, 670, 764, 1, 0, 0, 0, 671, 675, 10, 17, 0, 0, 672, 676, 5, 202, 0, 0, 673, 676, 5, 238, 0, 0, 674, 676, 5, 227, 0, 0, 675, 672, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 763, 3, 74, 37, 18, 678, 682, 10, 16, 0, 0, 679, 683, 5, 228, 0, 0, 680, 683, 5, 208, 0, 0, 681, 683, 5, 207, 0, 0, 682, 679, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 682, 681, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 763, 3, 74, 37, 17, 685, 710, 10, 15, 0, 0, 686, 711, 5, 211, 0, 0, 687, 711, 5, 212, 0, 0, 688, 711, 5, 223, 0, 0, 689, 711, 5, 221, 0, 0, 690, 711, 5, 222, 0, 0, 691, 711, 5, 213, 0, 0, 692, 711, 5, 214, 0, 0, 693, 695, 5, 115, 0, 0, 694, 693, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 698, 5, 80, 0, 0, 697, 699, 5, 25, 0, 0, 698, 697, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 711, 1, 0, 0, 0, 700, 702, 5, 115, 0, 0, 701, 700, 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 711, 7, 10, 0, 0, 704, 711, 5, 232, 0, 0, 705, 711, 5, 233, 0, 0, 706, 711, 5, 225, 0, 0, 707, 711, 5, 216, 0, 0, 708, 711, 5, 217, 0, 0, 709, 711, 5, 224, 0, 0, 710, 686, 1, 0, 0, 0, 710, 687, 1, 0, 0, 0, 710, 688, 1, 0, 0, 0, 710, 689, 1, 0, 0, 0, 710, 690, 1, 0, 0, 0, 710, 691, 1, 0, 0, 0, 710, 692, 1, 0, 0, 0, 710, 694, 1, 0, 0, 0, 710, 701, 1, 0, 0, 0, 710, 704, 1, 0, 0, 0, 710, 705, 1, 0, 0, 0, 710, 706, 1, 0, 0, 0, 710, 707, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 763, 3, 74, 37, 16, 713, 714, 10, 13, 0, 0, 714, 715, 5, 226, 0, 0, 715, 763, 3, 74, 37, 14, 716, 717, 10, 11, 0, 0, 717, 718, 5, 6, 0, 0, 718, 763, 3, 74, 37, 12, 719, 720, 10, 10, 0, 0, 720, 721, 5, 121, 0, 0, 721, 763, 3, 74, 37, 11, 722, 724, 10, 9, 0, 0, 723, 725, 5, 115, 0, 0, 724, 723, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 5, 16, 0, 0, 727, 728, 3, 74, 37, 0, 728, 729, 5, 6, 0, 0, 729, 730, 3, 74, 37, 10, 730, 763, 1, 0, 0, 0, 731, 732, 10, 8, 0, 0, 732, 733, 5, 229, 0, 0, 733, 734, 3, 74, 37, 0, 734, 735, 5, 205, 0, 0, 735, 736, 3, 74, 37, 8, 736, 763, 1, 0, 0, 0, 737, 738, 10, 21, 0, 0, 738, 739, 5, 219, 0, 0, 739, 740, 3, 74, 37, 0, 740, 741, 5, 235, 0, 0, 741, 763, 1, 0, 0, 0, 742, 743, 10, 20, 0, 0, 743, 744, 5, 210, 0, 0, 744, 763, 5, 198, 0, 0, 745, 746, 10, 19, 0, 0, 746, 747, 5, 210, 0, 0, 747, 763, 3, 118, 59, 0, 748, 749, 10, 14, 0, 0, 749, 751, 5, 88, 0, 0, 750, 752, 5, 115, 0, 0, 751, 750, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 753, 763, 5, 116, 0, 0, 754, 760, 10, 7, 0, 0, 755, 761, 3, 116, 58, 0, 756, 757, 5, 10, 0, 0, 757, 761, 3, 118, 59, 0, 758, 759, 5, 10, 0, 0, 759, 761, 5, 200, 0, 0, 760, 755, 1, 0, 0, 0, 760, 756, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 671, 1, 0, 0, 0, 762, 678, 1, 0, 0, 0, 762, 685, 1, 0, 0, 0, 762, 713, 1, 0, 0, 0, 762, 716, 1, 0, 0, 0, 762, 719, 1, 0, 0, 0, 762, 722, 1, 0, 0, 0, 762, 731, 1, 0, 0, 0, 762, 737, 1, 0, 0, 0, 762, 742, 1, 0, 0, 0, 762, 745, 1, 0, 0, 0, 762, 748, 1, 0, 0, 0, 762, 754, 1, 0, 0, 0, 763, 766, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 75, 1, 0, 0, 0, 766, 764, 1, 0, 0, 0, 767, 772, 3, 78, 39, 0, 768, 769, 5, 206, 0, 0, 769, 771, 3, 78, 39, 0, 770, 768, 1, 0, 0, 0, 771, 774, 1, 0, 0, 0, 772, 770, 1, 0, 0, 0, 772, 773, 1, 0, 0, 0, 773, 77, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 775, 778, 3, 80, 40, 0, 776, 778, 3, 74, 37, 0, 777, 775, 1, 0, 0, 0, 777, 776, 1, 0, 0, 0, 778, 79, 1, 0, 0, 0, 779, 780, 5, 220, 0, 0, 780, 785, 3, 118, 59, 0, 781, 782, 5, 206, 0, 0, 782, 784, 3, 118, 59, 0, 783, 781, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 788, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 789, 5, 236, 0, 0, 789, 799, 1, 0, 0, 0, 790, 795, 3, 118, 59, 0, 791, 792, 5, 206, 0, 0, 792, 794, 3, 118, 59, 0, 793, 791, 1, 0, 0, 0, 794, 797, 1, 0, 0, 0, 795, 793, 1, 0, 0, 0, 795, 796, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 798, 779, 1, 0, 0, 0, 798, 790, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 5, 201, 0, 0, 801, 802, 3, 74, 37, 0, 802, 81, 1, 0, 0, 0, 803, 804, 5, 222, 0, 0, 804, 808, 3, 118, 59, 0, 805, 807, 3, 84, 42, 0, 806, 805, 1, 0, 0, 0, 807, 810, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 811, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 811, 812, 5, 238, 0, 0, 812, 813, 5, 214, 0, 0, 813, 832, 1, 0, 0, 0, 814, 815, 5, 222, 0, 0, 815, 819, 3, 118, 59, 0, 816, 818, 3, 84, 42, 0, 817, 816, 1, 0, 0, 0, 818, 821, 1, 0, 0, 0, 819, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 822, 1, 0, 0, 0, 821, 819, 1, 0, 0, 0, 822, 824, 5, 214, 0, 0, 823, 825, 3, 82, 41, 0, 824, 823, 1, 0, 0, 0, 824, 825, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 827, 5, 222, 0, 0, 827, 828, 5, 238, 0, 0, 828, 829, 3, 118, 59, 0, 829, 830, 5, 214, 0, 0, 830, 832, 1, 0, 0, 0, 831, 803, 1, 0, 0, 0, 831, 814, 1, 0, 0, 0, 832, 83, 1, 0, 0, 0, 833, 834, 3, 118, 59, 0, 834, 835, 5, 212, 0, 0, 835, 836, 5, 200, 0, 0, 836, 845, 1, 0, 0, 0, 837, 838, 3, 118, 59, 0, 838, 839, 5, 212, 0, 0, 839, 840, 5, 218, 0, 0, 840, 841, 3, 74, 37, 0, 841, 842, 5, 234, 0, 0, 842, 845, 1, 0, 0, 0, 843, 845, 3, 118, 59, 0, 844, 833, 1, 0, 0, 0, 844, 837, 1, 0, 0, 0, 844, 843, 1, 0, 0, 0, 845, 85, 1, 0, 0, 0, 846, 851, 3, 88, 44, 0, 847, 848, 5, 206, 0, 0, 848, 850, 3, 88, 44, 0, 849, 847, 1, 0, 0, 0, 850, 853, 1, 0, 0, 0, 851, 849, 1, 0, 0, 0, 851, 852, 1, 0, 0, 0, 852, 87, 1, 0, 0, 0, 853, 851, 1, 0, 0, 0, 854, 855, 3, 118, 59, 0, 855, 856, 5, 10, 0, 0, 856, 857, 5, 220, 0, 0, 857, 858, 3, 2, 1, 0, 858, 859, 5, 236, 0, 0, 859, 865, 1, 0, 0, 0, 860, 861, 3, 74, 37, 0, 861, 862, 5, 10, 0, 0, 862, 863, 3, 118, 59, 0, 863, 865, 1, 0, 0, 0, 864, 854, 1, 0, 0, 0, 864, 860, 1, 0, 0, 0, 865, 89, 1, 0, 0, 0, 866, 874, 3, 122, 61, 0, 867, 868, 3, 98, 49, 0, 868, 869, 5, 210, 0, 0, 869, 871, 1, 0, 0, 0, 870, 867, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 3, 92, 46, 0, 873, 866, 1, 0, 0, 0, 873, 870, 1, 0, 0, 0, 874, 91, 1, 0, 0, 0, 875, 880, 3, 118, 59, 0, 876, 877, 5, 210, 0, 0, 877, 879, 3, 118, 59, 0, 878, 876, 1, 0, 0, 0, 879, 882, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 93, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 883, 884, 6, 47, -1, 0, 884, 893, 3, 98, 49, 0, 885, 893, 3, 96, 48, 0, 886, 887, 5, 220, 0, 0, 887, 888, 3, 2, 1, 0, 888, 889, 5, 236, 0, 0, 889, 893, 1, 0, 0, 0, 890, 893, 3, 82, 41, 0, 891, 893, 3, 122, 61, 0, 892, 883, 1, 0, 0, 0, 892, 885, 1, 0, 0, 0, 892, 886, 1, 0, 0, 0, 892, 890, 1, 0, 0, 0, 892, 891, 1, 0, 0, 0, 893, 902, 1, 0, 0, 0, 894, 898, 10, 3, 0, 0, 895, 899, 3, 116, 58, 0, 896, 897, 5, 10, 0, 0, 897, 899, 3, 118, 59, 0, 898, 895, 1, 0, 0, 0, 898, 896, 1, 0, 0, 0, 899, 901, 1, 0, 0, 0, 900, 894, 1, 0, 0, 0, 901, 904, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 95, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 905, 906, 3, 118, 59, 0, 906, 908, 5, 220, 0, 0, 907, 909, 3, 100, 50, 0, 908, 907, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 5, 236, 0, 0, 911, 97, 1, 0, 0, 0, 912, 913, 3, 102, 51, 0, 913, 914, 5, 210, 0, 0, 914, 916, 1, 0, 0, 0, 915, 912, 1, 0, 0, 0, 915, 916, 1, 0, 0, 0, 916, 917, 1, 0, 0, 0, 917, 918, 3, 118, 59, 0, 918, 99, 1, 0, 0, 0, 919, 924, 3, 74, 37, 0, 920, 921, 5, 206, 0, 0, 921, 923, 3, 74, 37, 0, 922, 920, 1, 0, 0, 0, 923, 926, 1, 0, 0, 0, 924, 922, 1, 0, 0, 0, 924, 925, 1, 0, 0, 0, 925, 101, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 927, 928, 3, 118, 59, 0, 928, 103, 1, 0, 0, 0, 929, 938, 5, 196, 0, 0, 930, 931, 5, 210, 0, 0, 931, 938, 7, 11, 0, 0, 932, 933, 5, 198, 0, 0, 933, 935, 5, 210, 0, 0, 934, 936, 7, 11, 0, 0, 935, 934, 1, 0, 0, 0, 935, 936, 1, 0, 0, 0, 936, 938, 1, 0, 0, 0, 937, 929, 1, 0, 0, 0, 937, 930, 1, 0, 0, 0, 937, 932, 1, 0, 0, 0, 938, 105, 1, 0, 0, 0, 939, 941, 7, 12, 0, 0, 940, 939, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 948, 1, 0, 0, 0, 942, 949, 3, 104, 52, 0, 943, 949, 5, 197, 0, 0, 944, 949, 5, 198, 0, 0, 945, 949, 5, 199, 0, 0, 946, 949, 5, 82, 0, 0, 947, 949, 5, 113, 0, 0, 948, 942, 1, 0, 0, 0, 948, 943, 1, 0, 0, 0, 948, 944, 1, 0, 0, 0, 948, 945, 1, 0, 0, 0, 948, 946, 1, 0, 0, 0, 948, 947, 1, 0, 0, 0, 949, 107, 1, 0, 0, 0, 950, 954, 3, 106, 53, 0, 951, 954, 5, 200, 0, 0, 952, 954, 5, 116, 0, 0, 953, 950, 1, 0, 0, 0, 953, 951, 1, 0, 0, 0, 953, 952, 1, 0, 0, 0, 954, 109, 1, 0, 0, 0, 955, 956, 7, 13, 0, 0, 956, 111, 1, 0, 0, 0, 957, 958, 7, 14, 0, 0, 958, 113, 1, 0, 0, 0, 959, 960, 7, 15, 0, 0, 960, 115, 1, 0, 0, 0, 961, 964, 5, 195, 0, 0, 962, 964, 3, 114, 57, 0, 963, 961, 1, 0, 0, 0, 963, 962, 1, 0, 0, 0, 964, 117, 1, 0, 0, 0, 965, 969, 5, 195, 0, 0, 966, 969, 3, 110, 55, 0, 967, 969, 3, 112, 56, 0, 968, 965, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 967, 1, 0, 0, 0, 969, 119, 1, 0, 0, 0, 970, 971, 5, 200, 0, 0, 971, 972, 5, 212, 0, 0, 972, 973, 3, 106, 53, 0, 973, 121, 1, 0, 0, 0, 974, 975, 5, 218, 0, 0, 975, 976, 3, 118, 59, 0, 976, 977, 5, 234, 0, 0, 977, 123, 1, 0, 0, 0, 120, 127, 137, 146, 149, 153, 156, 160, 163, 166, 169, 172, 176, 180, 183, 186, 189, 193, 196, 205, 211, 232, 249, 266, 272, 278, 289, 291, 302, 305, 311, 319, 325, 327, 331, 336, 339, 342, 346, 350, 353, 355, 358, 362, 366, 369, 371, 373, 378, 389, 395, 402, 407, 411, 415, 421, 423, 430, 438, 441, 444, 463, 477, 493, 505, 517, 525, 529, 536, 542, 551, 555, 586, 603, 615, 625, 628, 632, 635, 648, 665, 669, 675, 682, 694, 698, 701, 710, 724, 751, 760, 762, 764, 772, 777, 785, 795, 798, 808, 819, 824, 831, 844, 851, 864, 870, 873, 880, 892, 898, 902, 908, 915, 924, 935, 937, 940, 948, 953, 963, 968] \ No newline at end of file +[4, 1, 242, 972, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 3, 0, 128, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 136, 8, 1, 10, 1, 12, 1, 139, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 147, 8, 2, 1, 3, 3, 3, 150, 8, 3, 1, 3, 1, 3, 3, 3, 154, 8, 3, 1, 3, 3, 3, 157, 8, 3, 1, 3, 1, 3, 3, 3, 161, 8, 3, 1, 3, 3, 3, 164, 8, 3, 1, 3, 3, 3, 167, 8, 3, 1, 3, 3, 3, 170, 8, 3, 1, 3, 3, 3, 173, 8, 3, 1, 3, 1, 3, 3, 3, 177, 8, 3, 1, 3, 1, 3, 3, 3, 181, 8, 3, 1, 3, 3, 3, 184, 8, 3, 1, 3, 3, 3, 187, 8, 3, 1, 3, 3, 3, 190, 8, 3, 1, 3, 1, 3, 3, 3, 194, 8, 3, 1, 3, 3, 3, 197, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 206, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 3, 7, 212, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 231, 8, 8, 10, 8, 12, 8, 234, 9, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 250, 8, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 267, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 273, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 290, 8, 15, 3, 15, 292, 8, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 3, 18, 303, 8, 18, 1, 18, 3, 18, 306, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 312, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 320, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 326, 8, 18, 10, 18, 12, 18, 329, 9, 18, 1, 19, 3, 19, 332, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 337, 8, 19, 1, 19, 3, 19, 340, 8, 19, 1, 19, 3, 19, 343, 8, 19, 1, 19, 1, 19, 3, 19, 347, 8, 19, 1, 19, 1, 19, 3, 19, 351, 8, 19, 1, 19, 3, 19, 354, 8, 19, 3, 19, 356, 8, 19, 1, 19, 3, 19, 359, 8, 19, 1, 19, 1, 19, 3, 19, 363, 8, 19, 1, 19, 1, 19, 3, 19, 367, 8, 19, 1, 19, 3, 19, 370, 8, 19, 3, 19, 372, 8, 19, 3, 19, 374, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 379, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 390, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 396, 8, 22, 1, 23, 1, 23, 1, 23, 5, 23, 401, 8, 23, 10, 23, 12, 23, 404, 9, 23, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 422, 8, 25, 3, 25, 424, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 429, 8, 26, 10, 26, 12, 26, 432, 9, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 439, 8, 28, 1, 28, 3, 28, 442, 8, 28, 1, 28, 3, 28, 445, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 478, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 492, 8, 35, 10, 35, 12, 35, 495, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 504, 8, 35, 10, 35, 12, 35, 507, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 516, 8, 35, 10, 35, 12, 35, 519, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 526, 8, 35, 1, 35, 1, 35, 3, 35, 530, 8, 35, 1, 36, 1, 36, 1, 36, 5, 36, 535, 8, 36, 10, 36, 12, 36, 538, 9, 36, 1, 37, 1, 37, 1, 37, 3, 37, 543, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 550, 8, 37, 11, 37, 12, 37, 551, 1, 37, 1, 37, 3, 37, 556, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 580, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 597, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 609, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 619, 8, 37, 1, 37, 3, 37, 622, 8, 37, 1, 37, 1, 37, 3, 37, 626, 8, 37, 1, 37, 3, 37, 629, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 642, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 659, 8, 37, 1, 37, 1, 37, 3, 37, 663, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 669, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 676, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 688, 8, 37, 1, 37, 1, 37, 3, 37, 692, 8, 37, 1, 37, 3, 37, 695, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 704, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 718, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 745, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 754, 8, 37, 5, 37, 756, 8, 37, 10, 37, 12, 37, 759, 9, 37, 1, 38, 1, 38, 1, 38, 5, 38, 764, 8, 38, 10, 38, 12, 38, 767, 9, 38, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 777, 8, 40, 10, 40, 12, 40, 780, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 787, 8, 40, 10, 40, 12, 40, 790, 9, 40, 3, 40, 792, 8, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 800, 8, 41, 10, 41, 12, 41, 803, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 811, 8, 41, 10, 41, 12, 41, 814, 9, 41, 1, 41, 1, 41, 3, 41, 818, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 838, 8, 42, 1, 43, 1, 43, 1, 43, 5, 43, 843, 8, 43, 10, 43, 12, 43, 846, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 858, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 864, 8, 45, 1, 45, 3, 45, 867, 8, 45, 1, 46, 1, 46, 1, 46, 5, 46, 872, 8, 46, 10, 46, 12, 46, 875, 9, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 886, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 892, 8, 47, 5, 47, 894, 8, 47, 10, 47, 12, 47, 897, 9, 47, 1, 48, 1, 48, 1, 48, 3, 48, 902, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 909, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 5, 50, 916, 8, 50, 10, 50, 12, 50, 919, 9, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 929, 8, 52, 3, 52, 931, 8, 52, 1, 53, 3, 53, 934, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 942, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 947, 8, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 3, 58, 957, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 962, 8, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 0, 3, 36, 74, 94, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 16, 2, 0, 32, 32, 141, 141, 2, 0, 84, 84, 96, 96, 3, 0, 4, 4, 8, 8, 12, 12, 4, 0, 4, 4, 7, 8, 12, 12, 147, 147, 2, 0, 96, 96, 140, 140, 2, 0, 4, 4, 8, 8, 2, 0, 11, 11, 42, 43, 2, 0, 62, 62, 93, 93, 2, 0, 133, 133, 143, 143, 3, 0, 17, 17, 95, 95, 170, 170, 2, 0, 79, 79, 98, 98, 1, 0, 197, 198, 2, 0, 208, 208, 228, 228, 8, 0, 37, 37, 76, 76, 108, 108, 110, 110, 132, 132, 145, 145, 185, 185, 190, 190, 13, 0, 2, 24, 26, 36, 38, 75, 77, 81, 83, 107, 109, 109, 111, 112, 114, 115, 117, 130, 133, 144, 146, 184, 186, 189, 191, 192, 4, 0, 36, 36, 62, 62, 77, 77, 91, 91, 1099, 0, 127, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 146, 1, 0, 0, 0, 6, 149, 1, 0, 0, 0, 8, 198, 1, 0, 0, 0, 10, 201, 1, 0, 0, 0, 12, 207, 1, 0, 0, 0, 14, 211, 1, 0, 0, 0, 16, 217, 1, 0, 0, 0, 18, 235, 1, 0, 0, 0, 20, 238, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 251, 1, 0, 0, 0, 26, 254, 1, 0, 0, 0, 28, 258, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 293, 1, 0, 0, 0, 34, 296, 1, 0, 0, 0, 36, 311, 1, 0, 0, 0, 38, 373, 1, 0, 0, 0, 40, 378, 1, 0, 0, 0, 42, 389, 1, 0, 0, 0, 44, 391, 1, 0, 0, 0, 46, 397, 1, 0, 0, 0, 48, 405, 1, 0, 0, 0, 50, 423, 1, 0, 0, 0, 52, 425, 1, 0, 0, 0, 54, 433, 1, 0, 0, 0, 56, 438, 1, 0, 0, 0, 58, 446, 1, 0, 0, 0, 60, 450, 1, 0, 0, 0, 62, 454, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 477, 1, 0, 0, 0, 68, 479, 1, 0, 0, 0, 70, 529, 1, 0, 0, 0, 72, 531, 1, 0, 0, 0, 74, 662, 1, 0, 0, 0, 76, 760, 1, 0, 0, 0, 78, 770, 1, 0, 0, 0, 80, 791, 1, 0, 0, 0, 82, 824, 1, 0, 0, 0, 84, 837, 1, 0, 0, 0, 86, 839, 1, 0, 0, 0, 88, 857, 1, 0, 0, 0, 90, 866, 1, 0, 0, 0, 92, 868, 1, 0, 0, 0, 94, 885, 1, 0, 0, 0, 96, 898, 1, 0, 0, 0, 98, 908, 1, 0, 0, 0, 100, 912, 1, 0, 0, 0, 102, 920, 1, 0, 0, 0, 104, 930, 1, 0, 0, 0, 106, 933, 1, 0, 0, 0, 108, 946, 1, 0, 0, 0, 110, 948, 1, 0, 0, 0, 112, 950, 1, 0, 0, 0, 114, 952, 1, 0, 0, 0, 116, 956, 1, 0, 0, 0, 118, 961, 1, 0, 0, 0, 120, 963, 1, 0, 0, 0, 122, 967, 1, 0, 0, 0, 124, 128, 3, 2, 1, 0, 125, 128, 3, 6, 3, 0, 126, 128, 3, 82, 41, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 137, 3, 4, 2, 0, 132, 133, 5, 176, 0, 0, 133, 134, 5, 4, 0, 0, 134, 136, 3, 4, 2, 0, 135, 132, 1, 0, 0, 0, 136, 139, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 3, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 147, 3, 6, 3, 0, 141, 142, 5, 220, 0, 0, 142, 143, 3, 2, 1, 0, 143, 144, 5, 236, 0, 0, 144, 147, 1, 0, 0, 0, 145, 147, 3, 122, 61, 0, 146, 140, 1, 0, 0, 0, 146, 141, 1, 0, 0, 0, 146, 145, 1, 0, 0, 0, 147, 5, 1, 0, 0, 0, 148, 150, 3, 8, 4, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 153, 5, 146, 0, 0, 152, 154, 5, 49, 0, 0, 153, 152, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 156, 1, 0, 0, 0, 155, 157, 3, 10, 5, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 3, 72, 36, 0, 159, 161, 3, 12, 6, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 163, 1, 0, 0, 0, 162, 164, 3, 14, 7, 0, 163, 162, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 167, 3, 18, 9, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 170, 3, 20, 10, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 172, 1, 0, 0, 0, 171, 173, 3, 22, 11, 0, 172, 171, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 175, 5, 189, 0, 0, 175, 177, 7, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 179, 5, 189, 0, 0, 179, 181, 5, 169, 0, 0, 180, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 183, 1, 0, 0, 0, 182, 184, 3, 24, 12, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 187, 3, 16, 8, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 189, 1, 0, 0, 0, 188, 190, 3, 26, 13, 0, 189, 188, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, 194, 3, 30, 15, 0, 192, 194, 3, 32, 16, 0, 193, 191, 1, 0, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 197, 3, 34, 17, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 7, 1, 0, 0, 0, 198, 199, 5, 189, 0, 0, 199, 200, 3, 86, 43, 0, 200, 9, 1, 0, 0, 0, 201, 202, 5, 168, 0, 0, 202, 205, 5, 198, 0, 0, 203, 204, 5, 189, 0, 0, 204, 206, 5, 164, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 11, 1, 0, 0, 0, 207, 208, 5, 68, 0, 0, 208, 209, 3, 36, 18, 0, 209, 13, 1, 0, 0, 0, 210, 212, 7, 1, 0, 0, 211, 210, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 9, 0, 0, 214, 215, 5, 90, 0, 0, 215, 216, 3, 72, 36, 0, 216, 15, 1, 0, 0, 0, 217, 218, 5, 188, 0, 0, 218, 219, 3, 118, 59, 0, 219, 220, 5, 10, 0, 0, 220, 221, 5, 220, 0, 0, 221, 222, 3, 56, 28, 0, 222, 232, 5, 236, 0, 0, 223, 224, 5, 206, 0, 0, 224, 225, 3, 118, 59, 0, 225, 226, 5, 10, 0, 0, 226, 227, 5, 220, 0, 0, 227, 228, 3, 56, 28, 0, 228, 229, 5, 236, 0, 0, 229, 231, 1, 0, 0, 0, 230, 223, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 17, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 235, 236, 5, 129, 0, 0, 236, 237, 3, 74, 37, 0, 237, 19, 1, 0, 0, 0, 238, 239, 5, 187, 0, 0, 239, 240, 3, 74, 37, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 73, 0, 0, 242, 249, 5, 18, 0, 0, 243, 244, 7, 0, 0, 0, 244, 245, 5, 220, 0, 0, 245, 246, 3, 72, 36, 0, 246, 247, 5, 236, 0, 0, 247, 250, 1, 0, 0, 0, 248, 250, 3, 72, 36, 0, 249, 243, 1, 0, 0, 0, 249, 248, 1, 0, 0, 0, 250, 23, 1, 0, 0, 0, 251, 252, 5, 74, 0, 0, 252, 253, 3, 74, 37, 0, 253, 25, 1, 0, 0, 0, 254, 255, 5, 122, 0, 0, 255, 256, 5, 18, 0, 0, 256, 257, 3, 46, 23, 0, 257, 27, 1, 0, 0, 0, 258, 259, 5, 122, 0, 0, 259, 260, 5, 18, 0, 0, 260, 261, 3, 72, 36, 0, 261, 29, 1, 0, 0, 0, 262, 263, 5, 99, 0, 0, 263, 266, 3, 74, 37, 0, 264, 265, 5, 206, 0, 0, 265, 267, 3, 74, 37, 0, 266, 264, 1, 0, 0, 0, 266, 267, 1, 0, 0, 0, 267, 272, 1, 0, 0, 0, 268, 269, 5, 189, 0, 0, 269, 273, 5, 164, 0, 0, 270, 271, 5, 18, 0, 0, 271, 273, 3, 72, 36, 0, 272, 268, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 292, 1, 0, 0, 0, 274, 275, 5, 99, 0, 0, 275, 278, 3, 74, 37, 0, 276, 277, 5, 189, 0, 0, 277, 279, 5, 164, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 118, 0, 0, 281, 282, 3, 74, 37, 0, 282, 292, 1, 0, 0, 0, 283, 284, 5, 99, 0, 0, 284, 285, 3, 74, 37, 0, 285, 286, 5, 118, 0, 0, 286, 289, 3, 74, 37, 0, 287, 288, 5, 18, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 292, 1, 0, 0, 0, 291, 262, 1, 0, 0, 0, 291, 274, 1, 0, 0, 0, 291, 283, 1, 0, 0, 0, 292, 31, 1, 0, 0, 0, 293, 294, 5, 118, 0, 0, 294, 295, 3, 74, 37, 0, 295, 33, 1, 0, 0, 0, 296, 297, 5, 150, 0, 0, 297, 298, 3, 52, 26, 0, 298, 35, 1, 0, 0, 0, 299, 300, 6, 18, -1, 0, 300, 302, 3, 94, 47, 0, 301, 303, 5, 61, 0, 0, 302, 301, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 305, 1, 0, 0, 0, 304, 306, 3, 44, 22, 0, 305, 304, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 312, 1, 0, 0, 0, 307, 308, 5, 220, 0, 0, 308, 309, 3, 36, 18, 0, 309, 310, 5, 236, 0, 0, 310, 312, 1, 0, 0, 0, 311, 299, 1, 0, 0, 0, 311, 307, 1, 0, 0, 0, 312, 327, 1, 0, 0, 0, 313, 314, 10, 3, 0, 0, 314, 315, 3, 40, 20, 0, 315, 316, 3, 36, 18, 4, 316, 326, 1, 0, 0, 0, 317, 319, 10, 4, 0, 0, 318, 320, 3, 38, 19, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 90, 0, 0, 322, 323, 3, 36, 18, 0, 323, 324, 3, 42, 21, 0, 324, 326, 1, 0, 0, 0, 325, 313, 1, 0, 0, 0, 325, 317, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 37, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 332, 7, 2, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 340, 5, 84, 0, 0, 334, 336, 5, 84, 0, 0, 335, 337, 7, 2, 0, 0, 336, 335, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 340, 7, 2, 0, 0, 339, 331, 1, 0, 0, 0, 339, 334, 1, 0, 0, 0, 339, 338, 1, 0, 0, 0, 340, 374, 1, 0, 0, 0, 341, 343, 7, 3, 0, 0, 342, 341, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 346, 7, 4, 0, 0, 345, 347, 5, 123, 0, 0, 346, 345, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 356, 1, 0, 0, 0, 348, 350, 7, 4, 0, 0, 349, 351, 5, 123, 0, 0, 350, 349, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 354, 7, 3, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 342, 1, 0, 0, 0, 355, 348, 1, 0, 0, 0, 356, 374, 1, 0, 0, 0, 357, 359, 7, 5, 0, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 5, 69, 0, 0, 361, 363, 5, 123, 0, 0, 362, 361, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 372, 1, 0, 0, 0, 364, 366, 5, 69, 0, 0, 365, 367, 5, 123, 0, 0, 366, 365, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 370, 7, 5, 0, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 358, 1, 0, 0, 0, 371, 364, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 339, 1, 0, 0, 0, 373, 355, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 39, 1, 0, 0, 0, 375, 376, 5, 31, 0, 0, 376, 379, 5, 90, 0, 0, 377, 379, 5, 206, 0, 0, 378, 375, 1, 0, 0, 0, 378, 377, 1, 0, 0, 0, 379, 41, 1, 0, 0, 0, 380, 381, 5, 119, 0, 0, 381, 390, 3, 72, 36, 0, 382, 383, 5, 179, 0, 0, 383, 384, 5, 220, 0, 0, 384, 385, 3, 72, 36, 0, 385, 386, 5, 236, 0, 0, 386, 390, 1, 0, 0, 0, 387, 388, 5, 179, 0, 0, 388, 390, 3, 72, 36, 0, 389, 380, 1, 0, 0, 0, 389, 382, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 43, 1, 0, 0, 0, 391, 392, 5, 144, 0, 0, 392, 395, 3, 50, 25, 0, 393, 394, 5, 118, 0, 0, 394, 396, 3, 50, 25, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 45, 1, 0, 0, 0, 397, 402, 3, 48, 24, 0, 398, 399, 5, 206, 0, 0, 399, 401, 3, 48, 24, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 47, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 407, 3, 74, 37, 0, 406, 408, 7, 6, 0, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 117, 0, 0, 410, 412, 7, 7, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 26, 0, 0, 414, 416, 5, 200, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 49, 1, 0, 0, 0, 417, 424, 3, 122, 61, 0, 418, 421, 3, 106, 53, 0, 419, 420, 5, 238, 0, 0, 420, 422, 3, 106, 53, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 417, 1, 0, 0, 0, 423, 418, 1, 0, 0, 0, 424, 51, 1, 0, 0, 0, 425, 430, 3, 54, 27, 0, 426, 427, 5, 206, 0, 0, 427, 429, 3, 54, 27, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 53, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 3, 118, 59, 0, 434, 435, 5, 212, 0, 0, 435, 436, 3, 108, 54, 0, 436, 55, 1, 0, 0, 0, 437, 439, 3, 58, 29, 0, 438, 437, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0, 0, 0, 440, 442, 3, 60, 30, 0, 441, 440, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 445, 3, 62, 31, 0, 444, 443, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 57, 1, 0, 0, 0, 446, 447, 5, 126, 0, 0, 447, 448, 5, 18, 0, 0, 448, 449, 3, 72, 36, 0, 449, 59, 1, 0, 0, 0, 450, 451, 5, 122, 0, 0, 451, 452, 5, 18, 0, 0, 452, 453, 3, 46, 23, 0, 453, 61, 1, 0, 0, 0, 454, 455, 7, 8, 0, 0, 455, 456, 3, 64, 32, 0, 456, 63, 1, 0, 0, 0, 457, 464, 3, 66, 33, 0, 458, 459, 5, 16, 0, 0, 459, 460, 3, 66, 33, 0, 460, 461, 5, 6, 0, 0, 461, 462, 3, 66, 33, 0, 462, 464, 1, 0, 0, 0, 463, 457, 1, 0, 0, 0, 463, 458, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 466, 5, 33, 0, 0, 466, 478, 5, 142, 0, 0, 467, 468, 5, 175, 0, 0, 468, 478, 5, 128, 0, 0, 469, 470, 5, 175, 0, 0, 470, 478, 5, 64, 0, 0, 471, 472, 3, 106, 53, 0, 472, 473, 5, 128, 0, 0, 473, 478, 1, 0, 0, 0, 474, 475, 3, 106, 53, 0, 475, 476, 5, 64, 0, 0, 476, 478, 1, 0, 0, 0, 477, 465, 1, 0, 0, 0, 477, 467, 1, 0, 0, 0, 477, 469, 1, 0, 0, 0, 477, 471, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 478, 67, 1, 0, 0, 0, 479, 480, 3, 74, 37, 0, 480, 481, 5, 0, 0, 1, 481, 69, 1, 0, 0, 0, 482, 530, 3, 118, 59, 0, 483, 484, 3, 118, 59, 0, 484, 485, 5, 220, 0, 0, 485, 486, 3, 118, 59, 0, 486, 493, 3, 70, 35, 0, 487, 488, 5, 206, 0, 0, 488, 489, 3, 118, 59, 0, 489, 490, 3, 70, 35, 0, 490, 492, 1, 0, 0, 0, 491, 487, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 497, 5, 236, 0, 0, 497, 530, 1, 0, 0, 0, 498, 499, 3, 118, 59, 0, 499, 500, 5, 220, 0, 0, 500, 505, 3, 120, 60, 0, 501, 502, 5, 206, 0, 0, 502, 504, 3, 120, 60, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 5, 236, 0, 0, 509, 530, 1, 0, 0, 0, 510, 511, 3, 118, 59, 0, 511, 512, 5, 220, 0, 0, 512, 517, 3, 70, 35, 0, 513, 514, 5, 206, 0, 0, 514, 516, 3, 70, 35, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 520, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 5, 236, 0, 0, 521, 530, 1, 0, 0, 0, 522, 523, 3, 118, 59, 0, 523, 525, 5, 220, 0, 0, 524, 526, 3, 72, 36, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 5, 236, 0, 0, 528, 530, 1, 0, 0, 0, 529, 482, 1, 0, 0, 0, 529, 483, 1, 0, 0, 0, 529, 498, 1, 0, 0, 0, 529, 510, 1, 0, 0, 0, 529, 522, 1, 0, 0, 0, 530, 71, 1, 0, 0, 0, 531, 536, 3, 74, 37, 0, 532, 533, 5, 206, 0, 0, 533, 535, 3, 74, 37, 0, 534, 532, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 73, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 6, 37, -1, 0, 540, 542, 5, 19, 0, 0, 541, 543, 3, 74, 37, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 549, 1, 0, 0, 0, 544, 545, 5, 186, 0, 0, 545, 546, 3, 74, 37, 0, 546, 547, 5, 163, 0, 0, 547, 548, 3, 74, 37, 0, 548, 550, 1, 0, 0, 0, 549, 544, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 5, 52, 0, 0, 554, 556, 3, 74, 37, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 5, 53, 0, 0, 558, 663, 1, 0, 0, 0, 559, 560, 5, 20, 0, 0, 560, 561, 5, 220, 0, 0, 561, 562, 3, 74, 37, 0, 562, 563, 5, 10, 0, 0, 563, 564, 3, 70, 35, 0, 564, 565, 5, 236, 0, 0, 565, 663, 1, 0, 0, 0, 566, 567, 5, 36, 0, 0, 567, 663, 5, 200, 0, 0, 568, 569, 5, 86, 0, 0, 569, 570, 3, 74, 37, 0, 570, 571, 3, 110, 55, 0, 571, 663, 1, 0, 0, 0, 572, 573, 5, 155, 0, 0, 573, 574, 5, 220, 0, 0, 574, 575, 3, 74, 37, 0, 575, 576, 5, 68, 0, 0, 576, 579, 3, 74, 37, 0, 577, 578, 5, 65, 0, 0, 578, 580, 3, 74, 37, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 5, 236, 0, 0, 582, 663, 1, 0, 0, 0, 583, 584, 5, 166, 0, 0, 584, 663, 5, 200, 0, 0, 585, 586, 5, 171, 0, 0, 586, 587, 5, 220, 0, 0, 587, 588, 7, 9, 0, 0, 588, 589, 5, 200, 0, 0, 589, 590, 5, 68, 0, 0, 590, 591, 3, 74, 37, 0, 591, 592, 5, 236, 0, 0, 592, 663, 1, 0, 0, 0, 593, 594, 3, 118, 59, 0, 594, 596, 5, 220, 0, 0, 595, 597, 3, 72, 36, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 5, 236, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 5, 125, 0, 0, 601, 602, 5, 220, 0, 0, 602, 603, 3, 56, 28, 0, 603, 604, 5, 236, 0, 0, 604, 663, 1, 0, 0, 0, 605, 606, 3, 118, 59, 0, 606, 608, 5, 220, 0, 0, 607, 609, 3, 72, 36, 0, 608, 607, 1, 0, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 5, 236, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 5, 125, 0, 0, 613, 614, 3, 118, 59, 0, 614, 663, 1, 0, 0, 0, 615, 621, 3, 118, 59, 0, 616, 618, 5, 220, 0, 0, 617, 619, 3, 72, 36, 0, 618, 617, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 622, 5, 236, 0, 0, 621, 616, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 625, 5, 220, 0, 0, 624, 626, 5, 49, 0, 0, 625, 624, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 628, 1, 0, 0, 0, 627, 629, 3, 76, 38, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 236, 0, 0, 631, 663, 1, 0, 0, 0, 632, 663, 3, 82, 41, 0, 633, 663, 3, 108, 54, 0, 634, 635, 5, 208, 0, 0, 635, 663, 3, 74, 37, 18, 636, 637, 5, 115, 0, 0, 637, 663, 3, 74, 37, 12, 638, 639, 3, 98, 49, 0, 639, 640, 5, 210, 0, 0, 640, 642, 1, 0, 0, 0, 641, 638, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 663, 5, 202, 0, 0, 644, 645, 5, 220, 0, 0, 645, 646, 3, 2, 1, 0, 646, 647, 5, 236, 0, 0, 647, 663, 1, 0, 0, 0, 648, 649, 5, 220, 0, 0, 649, 650, 3, 74, 37, 0, 650, 651, 5, 236, 0, 0, 651, 663, 1, 0, 0, 0, 652, 653, 5, 220, 0, 0, 653, 654, 3, 72, 36, 0, 654, 655, 5, 236, 0, 0, 655, 663, 1, 0, 0, 0, 656, 658, 5, 219, 0, 0, 657, 659, 3, 72, 36, 0, 658, 657, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 663, 5, 235, 0, 0, 661, 663, 3, 90, 45, 0, 662, 539, 1, 0, 0, 0, 662, 559, 1, 0, 0, 0, 662, 566, 1, 0, 0, 0, 662, 568, 1, 0, 0, 0, 662, 572, 1, 0, 0, 0, 662, 583, 1, 0, 0, 0, 662, 585, 1, 0, 0, 0, 662, 593, 1, 0, 0, 0, 662, 605, 1, 0, 0, 0, 662, 615, 1, 0, 0, 0, 662, 632, 1, 0, 0, 0, 662, 633, 1, 0, 0, 0, 662, 634, 1, 0, 0, 0, 662, 636, 1, 0, 0, 0, 662, 641, 1, 0, 0, 0, 662, 644, 1, 0, 0, 0, 662, 648, 1, 0, 0, 0, 662, 652, 1, 0, 0, 0, 662, 656, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 757, 1, 0, 0, 0, 664, 668, 10, 17, 0, 0, 665, 669, 5, 202, 0, 0, 666, 669, 5, 238, 0, 0, 667, 669, 5, 227, 0, 0, 668, 665, 1, 0, 0, 0, 668, 666, 1, 0, 0, 0, 668, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 756, 3, 74, 37, 18, 671, 675, 10, 16, 0, 0, 672, 676, 5, 228, 0, 0, 673, 676, 5, 208, 0, 0, 674, 676, 5, 207, 0, 0, 675, 672, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 756, 3, 74, 37, 17, 678, 703, 10, 15, 0, 0, 679, 704, 5, 211, 0, 0, 680, 704, 5, 212, 0, 0, 681, 704, 5, 223, 0, 0, 682, 704, 5, 221, 0, 0, 683, 704, 5, 222, 0, 0, 684, 704, 5, 213, 0, 0, 685, 704, 5, 214, 0, 0, 686, 688, 5, 115, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 5, 80, 0, 0, 690, 692, 5, 25, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 704, 1, 0, 0, 0, 693, 695, 5, 115, 0, 0, 694, 693, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 704, 7, 10, 0, 0, 697, 704, 5, 232, 0, 0, 698, 704, 5, 233, 0, 0, 699, 704, 5, 225, 0, 0, 700, 704, 5, 216, 0, 0, 701, 704, 5, 217, 0, 0, 702, 704, 5, 224, 0, 0, 703, 679, 1, 0, 0, 0, 703, 680, 1, 0, 0, 0, 703, 681, 1, 0, 0, 0, 703, 682, 1, 0, 0, 0, 703, 683, 1, 0, 0, 0, 703, 684, 1, 0, 0, 0, 703, 685, 1, 0, 0, 0, 703, 687, 1, 0, 0, 0, 703, 694, 1, 0, 0, 0, 703, 697, 1, 0, 0, 0, 703, 698, 1, 0, 0, 0, 703, 699, 1, 0, 0, 0, 703, 700, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 756, 3, 74, 37, 16, 706, 707, 10, 13, 0, 0, 707, 708, 5, 226, 0, 0, 708, 756, 3, 74, 37, 14, 709, 710, 10, 11, 0, 0, 710, 711, 5, 6, 0, 0, 711, 756, 3, 74, 37, 12, 712, 713, 10, 10, 0, 0, 713, 714, 5, 121, 0, 0, 714, 756, 3, 74, 37, 11, 715, 717, 10, 9, 0, 0, 716, 718, 5, 115, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 720, 5, 16, 0, 0, 720, 721, 3, 74, 37, 0, 721, 722, 5, 6, 0, 0, 722, 723, 3, 74, 37, 10, 723, 756, 1, 0, 0, 0, 724, 725, 10, 8, 0, 0, 725, 726, 5, 229, 0, 0, 726, 727, 3, 74, 37, 0, 727, 728, 5, 205, 0, 0, 728, 729, 3, 74, 37, 8, 729, 756, 1, 0, 0, 0, 730, 731, 10, 21, 0, 0, 731, 732, 5, 219, 0, 0, 732, 733, 3, 74, 37, 0, 733, 734, 5, 235, 0, 0, 734, 756, 1, 0, 0, 0, 735, 736, 10, 20, 0, 0, 736, 737, 5, 210, 0, 0, 737, 756, 5, 198, 0, 0, 738, 739, 10, 19, 0, 0, 739, 740, 5, 210, 0, 0, 740, 756, 3, 118, 59, 0, 741, 742, 10, 14, 0, 0, 742, 744, 5, 88, 0, 0, 743, 745, 5, 115, 0, 0, 744, 743, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 756, 5, 116, 0, 0, 747, 753, 10, 7, 0, 0, 748, 754, 3, 116, 58, 0, 749, 750, 5, 10, 0, 0, 750, 754, 3, 118, 59, 0, 751, 752, 5, 10, 0, 0, 752, 754, 5, 200, 0, 0, 753, 748, 1, 0, 0, 0, 753, 749, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 756, 1, 0, 0, 0, 755, 664, 1, 0, 0, 0, 755, 671, 1, 0, 0, 0, 755, 678, 1, 0, 0, 0, 755, 706, 1, 0, 0, 0, 755, 709, 1, 0, 0, 0, 755, 712, 1, 0, 0, 0, 755, 715, 1, 0, 0, 0, 755, 724, 1, 0, 0, 0, 755, 730, 1, 0, 0, 0, 755, 735, 1, 0, 0, 0, 755, 738, 1, 0, 0, 0, 755, 741, 1, 0, 0, 0, 755, 747, 1, 0, 0, 0, 756, 759, 1, 0, 0, 0, 757, 755, 1, 0, 0, 0, 757, 758, 1, 0, 0, 0, 758, 75, 1, 0, 0, 0, 759, 757, 1, 0, 0, 0, 760, 765, 3, 78, 39, 0, 761, 762, 5, 206, 0, 0, 762, 764, 3, 78, 39, 0, 763, 761, 1, 0, 0, 0, 764, 767, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 77, 1, 0, 0, 0, 767, 765, 1, 0, 0, 0, 768, 771, 3, 80, 40, 0, 769, 771, 3, 74, 37, 0, 770, 768, 1, 0, 0, 0, 770, 769, 1, 0, 0, 0, 771, 79, 1, 0, 0, 0, 772, 773, 5, 220, 0, 0, 773, 778, 3, 118, 59, 0, 774, 775, 5, 206, 0, 0, 775, 777, 3, 118, 59, 0, 776, 774, 1, 0, 0, 0, 777, 780, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 781, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 781, 782, 5, 236, 0, 0, 782, 792, 1, 0, 0, 0, 783, 788, 3, 118, 59, 0, 784, 785, 5, 206, 0, 0, 785, 787, 3, 118, 59, 0, 786, 784, 1, 0, 0, 0, 787, 790, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 791, 772, 1, 0, 0, 0, 791, 783, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 794, 5, 201, 0, 0, 794, 795, 3, 74, 37, 0, 795, 81, 1, 0, 0, 0, 796, 797, 5, 222, 0, 0, 797, 801, 3, 118, 59, 0, 798, 800, 3, 84, 42, 0, 799, 798, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 801, 802, 1, 0, 0, 0, 802, 804, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 804, 805, 5, 238, 0, 0, 805, 806, 5, 214, 0, 0, 806, 825, 1, 0, 0, 0, 807, 808, 5, 222, 0, 0, 808, 812, 3, 118, 59, 0, 809, 811, 3, 84, 42, 0, 810, 809, 1, 0, 0, 0, 811, 814, 1, 0, 0, 0, 812, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 815, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 815, 817, 5, 214, 0, 0, 816, 818, 3, 82, 41, 0, 817, 816, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 5, 222, 0, 0, 820, 821, 5, 238, 0, 0, 821, 822, 3, 118, 59, 0, 822, 823, 5, 214, 0, 0, 823, 825, 1, 0, 0, 0, 824, 796, 1, 0, 0, 0, 824, 807, 1, 0, 0, 0, 825, 83, 1, 0, 0, 0, 826, 827, 3, 118, 59, 0, 827, 828, 5, 212, 0, 0, 828, 829, 5, 200, 0, 0, 829, 838, 1, 0, 0, 0, 830, 831, 3, 118, 59, 0, 831, 832, 5, 212, 0, 0, 832, 833, 5, 218, 0, 0, 833, 834, 3, 74, 37, 0, 834, 835, 5, 234, 0, 0, 835, 838, 1, 0, 0, 0, 836, 838, 3, 118, 59, 0, 837, 826, 1, 0, 0, 0, 837, 830, 1, 0, 0, 0, 837, 836, 1, 0, 0, 0, 838, 85, 1, 0, 0, 0, 839, 844, 3, 88, 44, 0, 840, 841, 5, 206, 0, 0, 841, 843, 3, 88, 44, 0, 842, 840, 1, 0, 0, 0, 843, 846, 1, 0, 0, 0, 844, 842, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 87, 1, 0, 0, 0, 846, 844, 1, 0, 0, 0, 847, 848, 3, 118, 59, 0, 848, 849, 5, 10, 0, 0, 849, 850, 5, 220, 0, 0, 850, 851, 3, 2, 1, 0, 851, 852, 5, 236, 0, 0, 852, 858, 1, 0, 0, 0, 853, 854, 3, 74, 37, 0, 854, 855, 5, 10, 0, 0, 855, 856, 3, 118, 59, 0, 856, 858, 1, 0, 0, 0, 857, 847, 1, 0, 0, 0, 857, 853, 1, 0, 0, 0, 858, 89, 1, 0, 0, 0, 859, 867, 3, 122, 61, 0, 860, 861, 3, 98, 49, 0, 861, 862, 5, 210, 0, 0, 862, 864, 1, 0, 0, 0, 863, 860, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 865, 1, 0, 0, 0, 865, 867, 3, 92, 46, 0, 866, 859, 1, 0, 0, 0, 866, 863, 1, 0, 0, 0, 867, 91, 1, 0, 0, 0, 868, 873, 3, 118, 59, 0, 869, 870, 5, 210, 0, 0, 870, 872, 3, 118, 59, 0, 871, 869, 1, 0, 0, 0, 872, 875, 1, 0, 0, 0, 873, 871, 1, 0, 0, 0, 873, 874, 1, 0, 0, 0, 874, 93, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 876, 877, 6, 47, -1, 0, 877, 886, 3, 98, 49, 0, 878, 886, 3, 96, 48, 0, 879, 880, 5, 220, 0, 0, 880, 881, 3, 2, 1, 0, 881, 882, 5, 236, 0, 0, 882, 886, 1, 0, 0, 0, 883, 886, 3, 82, 41, 0, 884, 886, 3, 122, 61, 0, 885, 876, 1, 0, 0, 0, 885, 878, 1, 0, 0, 0, 885, 879, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 895, 1, 0, 0, 0, 887, 891, 10, 3, 0, 0, 888, 892, 3, 116, 58, 0, 889, 890, 5, 10, 0, 0, 890, 892, 3, 118, 59, 0, 891, 888, 1, 0, 0, 0, 891, 889, 1, 0, 0, 0, 892, 894, 1, 0, 0, 0, 893, 887, 1, 0, 0, 0, 894, 897, 1, 0, 0, 0, 895, 893, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 95, 1, 0, 0, 0, 897, 895, 1, 0, 0, 0, 898, 899, 3, 118, 59, 0, 899, 901, 5, 220, 0, 0, 900, 902, 3, 100, 50, 0, 901, 900, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 904, 5, 236, 0, 0, 904, 97, 1, 0, 0, 0, 905, 906, 3, 102, 51, 0, 906, 907, 5, 210, 0, 0, 907, 909, 1, 0, 0, 0, 908, 905, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 3, 118, 59, 0, 911, 99, 1, 0, 0, 0, 912, 917, 3, 74, 37, 0, 913, 914, 5, 206, 0, 0, 914, 916, 3, 74, 37, 0, 915, 913, 1, 0, 0, 0, 916, 919, 1, 0, 0, 0, 917, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 101, 1, 0, 0, 0, 919, 917, 1, 0, 0, 0, 920, 921, 3, 118, 59, 0, 921, 103, 1, 0, 0, 0, 922, 931, 5, 196, 0, 0, 923, 924, 5, 210, 0, 0, 924, 931, 7, 11, 0, 0, 925, 926, 5, 198, 0, 0, 926, 928, 5, 210, 0, 0, 927, 929, 7, 11, 0, 0, 928, 927, 1, 0, 0, 0, 928, 929, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 922, 1, 0, 0, 0, 930, 923, 1, 0, 0, 0, 930, 925, 1, 0, 0, 0, 931, 105, 1, 0, 0, 0, 932, 934, 7, 12, 0, 0, 933, 932, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 941, 1, 0, 0, 0, 935, 942, 3, 104, 52, 0, 936, 942, 5, 197, 0, 0, 937, 942, 5, 198, 0, 0, 938, 942, 5, 199, 0, 0, 939, 942, 5, 82, 0, 0, 940, 942, 5, 113, 0, 0, 941, 935, 1, 0, 0, 0, 941, 936, 1, 0, 0, 0, 941, 937, 1, 0, 0, 0, 941, 938, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 940, 1, 0, 0, 0, 942, 107, 1, 0, 0, 0, 943, 947, 3, 106, 53, 0, 944, 947, 5, 200, 0, 0, 945, 947, 5, 116, 0, 0, 946, 943, 1, 0, 0, 0, 946, 944, 1, 0, 0, 0, 946, 945, 1, 0, 0, 0, 947, 109, 1, 0, 0, 0, 948, 949, 7, 13, 0, 0, 949, 111, 1, 0, 0, 0, 950, 951, 7, 14, 0, 0, 951, 113, 1, 0, 0, 0, 952, 953, 7, 15, 0, 0, 953, 115, 1, 0, 0, 0, 954, 957, 5, 195, 0, 0, 955, 957, 3, 114, 57, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 117, 1, 0, 0, 0, 958, 962, 5, 195, 0, 0, 959, 962, 3, 110, 55, 0, 960, 962, 3, 112, 56, 0, 961, 958, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 960, 1, 0, 0, 0, 962, 119, 1, 0, 0, 0, 963, 964, 5, 200, 0, 0, 964, 965, 5, 212, 0, 0, 965, 966, 3, 106, 53, 0, 966, 121, 1, 0, 0, 0, 967, 968, 5, 218, 0, 0, 968, 969, 3, 118, 59, 0, 969, 970, 5, 234, 0, 0, 970, 123, 1, 0, 0, 0, 120, 127, 137, 146, 149, 153, 156, 160, 163, 166, 169, 172, 176, 180, 183, 186, 189, 193, 196, 205, 211, 232, 249, 266, 272, 278, 289, 291, 302, 305, 311, 319, 325, 327, 331, 336, 339, 342, 346, 350, 353, 355, 358, 362, 366, 369, 371, 373, 378, 389, 395, 402, 407, 411, 415, 421, 423, 430, 438, 441, 444, 463, 477, 493, 505, 517, 525, 529, 536, 542, 551, 555, 579, 596, 608, 618, 621, 625, 628, 641, 658, 662, 668, 675, 687, 691, 694, 703, 717, 744, 753, 755, 757, 765, 770, 778, 788, 791, 801, 812, 817, 824, 837, 844, 857, 863, 866, 873, 885, 891, 895, 901, 908, 917, 928, 930, 933, 941, 946, 956, 961] \ No newline at end of file diff --git a/hogql_parser/HogQLParserBaseVisitor.h b/hogql_parser/HogQLParserBaseVisitor.h index 00e3cd4e391ad..9d8132683af38 100644 --- a/hogql_parser/HogQLParserBaseVisitor.h +++ b/hogql_parser/HogQLParserBaseVisitor.h @@ -211,10 +211,6 @@ class HogQLParserBaseVisitor : public HogQLParserVisitor { return visitChildren(ctx); } - virtual std::any visitColumnExprExtract(HogQLParser::ColumnExprExtractContext *ctx) override { - return visitChildren(ctx); - } - virtual std::any visitColumnExprNegate(HogQLParser::ColumnExprNegateContext *ctx) override { return visitChildren(ctx); } diff --git a/hogql_parser/HogQLParserVisitor.h b/hogql_parser/HogQLParserVisitor.h index 8355b14ae1ea4..72a7eb7aaddc6 100644 --- a/hogql_parser/HogQLParserVisitor.h +++ b/hogql_parser/HogQLParserVisitor.h @@ -117,8 +117,6 @@ class HogQLParserVisitor : public antlr4::tree::AbstractParseTreeVisitor { virtual std::any visitColumnExprAlias(HogQLParser::ColumnExprAliasContext *context) = 0; - virtual std::any visitColumnExprExtract(HogQLParser::ColumnExprExtractContext *context) = 0; - virtual std::any visitColumnExprNegate(HogQLParser::ColumnExprNegateContext *context) = 0; virtual std::any visitColumnExprSubquery(HogQLParser::ColumnExprSubqueryContext *context) = 0; diff --git a/hogql_parser/parser.cpp b/hogql_parser/parser.cpp index 1c16cbe1280c1..1ee0cc910f8f6 100644 --- a/hogql_parser/parser.cpp +++ b/hogql_parser/parser.cpp @@ -1082,8 +1082,6 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { RETURN_NEW_AST_NODE("Alias", "{s:N,s:s#}", "expr", expr, "alias", alias.data(), alias.size()); } - VISIT_UNSUPPORTED(ColumnExprExtract) - VISIT(ColumnExprNegate) { PyObject* left = build_ast_node("Constant", "{s:i}", "value", 0); if (!left) throw PyInternalException(); diff --git a/hogql_parser/setup.py b/hogql_parser/setup.py index 6a95b379acd90..25b732863871c 100644 --- a/hogql_parser/setup.py +++ b/hogql_parser/setup.py @@ -32,7 +32,7 @@ setup( name="hogql_parser", - version="1.0.3", + version="1.0.4", url="https://github.com/PostHog/posthog/tree/master/hogql_parser", author="PostHog Inc.", author_email="hey@posthog.com", diff --git a/latest_migrations.manifest b/latest_migrations.manifest index f232dbc8c186c..970d033f08d66 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0397_projects_backfill +posthog: 0399_batchexportrun_records_total_count sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 781ad2980830b..b0ce8847d444c 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -1,6 +1,14 @@ posthog/temporal/common/utils.py:0: error: Argument 1 to "abstractclassmethod" has incompatible type "Callable[[HeartbeatDetails, Any], Any]"; expected "Callable[[type[Never], Any], Any]" [arg-type] posthog/temporal/common/utils.py:0: note: This is likely because "from_activity" has named arguments: "cls". Consider marking them positional-only posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/talk_api.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "DateTime | Date | datetime | date | str | float | int | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "str | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "DateTime | Date | datetime | date | str | float | int | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "str | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "DateTime | Date | datetime | date | str | float | int | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Item "None" of "DateTime | None" has no attribute "int_timestamp" [union-attr] +posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "str | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] posthog/hogql/database/argmax.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] posthog/hogql/database/argmax.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/argmax.py:0: note: Consider using "Sequence" instead, which is covariant @@ -77,6 +85,7 @@ posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: Statement is unreachable [unreachable] posthog/hogql/database/schema/person_distinct_ids.py:0: error: Argument 1 to "select_from_person_distinct_ids_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type] +posthog/hogql/database/schema/person_distinct_id_overrides.py:0: error: Argument 1 to "select_from_person_distinct_id_overrides_table" has incompatible type "dict[str, list[str]]"; expected "dict[str, list[str | int]]" [arg-type] posthog/hogql/database/schema/cohort_people.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] posthog/hogql/database/schema/cohort_people.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/cohort_people.py:0: note: Consider using "Sequence" instead, which is covariant @@ -97,6 +106,7 @@ posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fi posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] posthog/hogql/database/database.py:0: error: Incompatible types (expression has type "Literal['view', 'lazy_table']", TypedDict item "type" has type "Literal['integer', 'float', 'string', 'datetime', 'date', 'boolean', 'array', 'json', 'lazy_table', 'virtual_table', 'field_traverser', 'expression']") [typeddict-item] posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] @@ -123,11 +133,6 @@ posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible default f posthog/hogql_queries/utils/query_date_range.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True posthog/hogql_queries/utils/query_date_range.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "filter" has incompatible type "Callable[[Any], bool]"; expected "Callable[[Any], TypeGuard[bool]]" [arg-type] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "filter" has incompatible type "Callable[[Any], bool]"; expected "Callable[[Any], TypeGuard[Any]]" [arg-type] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 2 to "filter" has incompatible type "Any | None"; expected "Iterable[Any]" [arg-type] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 2 to "map" has incompatible type "Any | None"; expected "Iterable[Any]" [arg-type] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 2 to "map" has incompatible type "Any | None"; expected "Iterable[Any]" [arg-type] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 4 has incompatible type "str": "Literal[0, 1, 2, 3, 4] | None"; expected "str": "str | None" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Item "None" of "Any | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "float" has incompatible type "Any | None"; expected "str | Buffer | SupportsFloat | SupportsIndex" [arg-type] @@ -243,7 +248,6 @@ posthog/hogql/resolver.py:0: error: Argument 1 to "join" of "str" has incompatib posthog/temporal/data_imports/external_data_job.py:0: error: Argument "team_id" has incompatible type "int"; expected "str" [arg-type] posthog/temporal/data_imports/external_data_job.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/external_data_job.py:0: error: Argument "team_id" has incompatible type "int"; expected "str" [arg-type] -posthog/temporal/data_imports/external_data_job.py:0: error: Argument 2 to "DataImportPipeline" has incompatible type "DltSource"; expected "DltResource" [arg-type] posthog/hogql/transforms/lazy_tables.py:0: error: Incompatible default for argument "context" (default has type "None", argument has type "HogQLContext") [assignment] posthog/hogql/transforms/lazy_tables.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True posthog/hogql/transforms/lazy_tables.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase @@ -339,20 +343,12 @@ posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is u posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_histogram_bin_count" [union-attr] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown" [union-attr] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Argument 1 to "parse_expr" has incompatible type "str | float | list[str | float] | Any | None"; expected "str" [arg-type] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown" [union-attr] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Argument 1 to "parse_expr" has incompatible type "str | float | list[str | float] | Any | None"; expected "str" [arg-type] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Argument "exprs" to "Or" has incompatible type "list[CompareOperation]"; expected "list[Expr]" [arg-type] posthog/hogql_queries/insights/trends/breakdown.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql_queries/insights/trends/breakdown.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Incompatible types in assignment (expression has type "float", variable has type "int") [assignment] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Incompatible types in assignment (expression has type "float", variable has type "int") [assignment] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] -posthog/hogql_queries/insights/trends/breakdown.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] +posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] +posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown" [union-attr] +posthog/hogql_queries/insights/trends/breakdown.py:0: error: Argument 1 to "parse_expr" has incompatible type "str | float | list[str | float] | Any | None"; expected "str" [arg-type] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown" [union-attr] posthog/hogql_queries/insights/trends/breakdown.py:0: error: Argument "breakdown_field" to "get_properties_chain" has incompatible type "str | float | list[str | float] | Any | None"; expected "str" [arg-type] @@ -377,11 +373,11 @@ posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Signature posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Superclass: posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self) -> SelectQuery | SelectUnionQuery posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Subclass: -posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self, time_frame: str | int | None, series_index: int, breakdown_value: str | int | None = ..., compare: Compare | None = ...) -> SelectQuery | SelectUnionQuery +posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self, time_frame: str | None, series_index: int, breakdown_value: str | int | None = ..., compare: Compare | None = ...) -> SelectQuery | SelectUnionQuery posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Superclass: posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self) -> SelectQuery | SelectUnionQuery posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: Subclass: -posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self, time_frame: str | int | None, series_index: int, breakdown_value: str | int | None = ..., compare: Compare | None = ...) -> SelectQuery | SelectUnionQuery +posthog/hogql_queries/insights/trends/trends_query_runner.py:0: note: def to_actors_query(self, time_frame: str | None, series_index: int, breakdown_value: str | int | None = ..., compare: Compare | None = ...) -> SelectQuery | SelectUnionQuery posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Argument 1 to "_event_property" of "TrendsQueryRunner" has incompatible type "str | float | list[str | float] | None"; expected "str" [arg-type] posthog/hogql_queries/insights/retention_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "Call") [assignment] @@ -542,7 +538,6 @@ posthog/hogql_queries/insights/trends/test/test_aggregation_operations.py:0: err posthog/hogql_queries/insights/trends/test/test_aggregation_operations.py:0: error: Item "SelectQuery" of "SelectQuery | SelectUnionQuery | Field | Any | None" has no attribute "chain" [union-attr] posthog/hogql_queries/insights/trends/test/test_aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery | Field | Any | None" has no attribute "chain" [union-attr] posthog/hogql_queries/insights/trends/test/test_aggregation_operations.py:0: error: Item "None" of "SelectQuery | SelectUnionQuery | Field | Any | None" has no attribute "chain" [union-attr] -posthog/hogql_queries/insights/test/test_paginators.py:0: error: Argument 2 to "execute_hogql_query" of "HogQLHasMorePaginator" has incompatible type "SelectQuery | SelectUnionQuery"; expected "SelectQuery" [arg-type] posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index] posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index] posthog/hogql_queries/insights/test/test_paginators.py:0: error: Value of type "object" is not indexable [index] @@ -758,12 +753,8 @@ posthog/api/dashboards/dashboard_templates.py:0: error: Metaclass conflict: the ee/api/feature_flag_role_access.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Item "None" of "BatchExportRun | None" has no attribute "data_interval_start" [union-attr] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Item "None" of "BatchExportRun | None" has no attribute "data_interval_end" [union-attr] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Item "None" of "BatchExportRun | None" has no attribute "status" [union-attr] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Item "None" of "BatchExportRun | None" has no attribute "status" [union-attr] posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] diff --git a/package.json b/package.json index 770be74997198..ca948c3607d16 100644 --- a/package.json +++ b/package.json @@ -140,11 +140,12 @@ "maplibre-gl": "^3.5.1", "md5": "^2.3.0", "monaco-editor": "^0.39.0", + "natural-orderby": "^3.0.2", "papaparse": "^5.4.1", "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.116.3", + "posthog-js": "1.116.6", "posthog-js-lite": "2.5.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/functional_tests/api.ts b/plugin-server/functional_tests/api.ts index abbd770d7bb77..c6ff46bf5bf6d 100644 --- a/plugin-server/functional_tests/api.ts +++ b/plugin-server/functional_tests/api.ts @@ -106,6 +106,7 @@ export const capture = async ({ }) ), key: teamId ? teamId.toString() : '', + waitForAck: true, }) } diff --git a/plugin-server/functional_tests/jobs-consumer.test.ts b/plugin-server/functional_tests/jobs-consumer.test.ts index 30e2abd9af282..353bd3518397e 100644 --- a/plugin-server/functional_tests/jobs-consumer.test.ts +++ b/plugin-server/functional_tests/jobs-consumer.test.ts @@ -43,7 +43,7 @@ describe('dlq handling', () => { test.concurrent(`handles empty messages`, async () => { const key = uuidv4() - await produce({ topic: 'jobs', message: null, key }) + await produce({ topic: 'jobs', message: null, key, waitForAck: true }) await waitForExpect(() => { const messages = dlq.filter((message) => message.key?.toString() === key) @@ -54,7 +54,7 @@ describe('dlq handling', () => { test.concurrent(`handles invalid JSON`, async () => { const key = uuidv4() - await produce({ topic: 'jobs', message: Buffer.from('invalid json'), key }) + await produce({ topic: 'jobs', message: Buffer.from('invalid json'), key, waitForAck: true }) await waitForExpect(() => { const messages = dlq.filter((message) => message.key?.toString() === key) @@ -72,7 +72,7 @@ describe('dlq handling', () => { labels: { topic: 'jobs', partition: '0', groupId: 'jobs-inserter' }, }) - await produce({ topic: 'jobs', message: Buffer.from(''), key: '' }) + await produce({ topic: 'jobs', message: Buffer.from(''), key: '', waitForAck: true }) await waitForExpect(async () => { const metricAfter = await getMetric({ diff --git a/plugin-server/functional_tests/kafka.ts b/plugin-server/functional_tests/kafka.ts index c2ab7ac87a6ab..f431488b290ac 100644 --- a/plugin-server/functional_tests/kafka.ts +++ b/plugin-server/functional_tests/kafka.ts @@ -36,7 +36,17 @@ export async function createKafkaProducer() { return producer } -export async function produce({ topic, message, key }: { topic: string; message: Buffer | null; key: string }) { +export async function produce({ + topic, + message, + key, + waitForAck, +}: { + topic: string + message: Buffer | null + key: string + waitForAck: boolean +}) { producer = producer ?? (await createKafkaProducer()) - await defaultProduce({ producer, topic, value: message, key: Buffer.from(key) }) + await defaultProduce({ producer, topic, value: message, key: Buffer.from(key), waitForAck }) } diff --git a/plugin-server/functional_tests/scheduled-tasks-runner.test.ts b/plugin-server/functional_tests/scheduled-tasks-runner.test.ts index 3e3345245a644..48764ae7f90a7 100644 --- a/plugin-server/functional_tests/scheduled-tasks-runner.test.ts +++ b/plugin-server/functional_tests/scheduled-tasks-runner.test.ts @@ -43,7 +43,7 @@ describe('dlq handling', () => { test.concurrent(`handles empty messages`, async () => { const key = uuidv4() - await produce({ topic: 'scheduled_tasks', message: null, key }) + await produce({ topic: 'scheduled_tasks', message: null, key, waitForAck: true }) await waitForExpect(() => { const messages = dlq.filter((message) => message.key?.toString() === key) @@ -54,7 +54,7 @@ describe('dlq handling', () => { test.concurrent(`handles invalid JSON`, async () => { const key = uuidv4() - await produce({ topic: 'scheduled_tasks', message: Buffer.from('invalid json'), key }) + await produce({ topic: 'scheduled_tasks', message: Buffer.from('invalid json'), key, waitForAck: true }) await waitForExpect(() => { const messages = dlq.filter((message) => message.key?.toString() === key) @@ -69,6 +69,7 @@ describe('dlq handling', () => { topic: 'scheduled_tasks', message: Buffer.from(JSON.stringify({ taskType: 'invalidTaskType', pluginConfigId: 1 })), key, + waitForAck: true, }) await waitForExpect(() => { @@ -84,6 +85,7 @@ describe('dlq handling', () => { topic: 'scheduled_tasks', message: Buffer.from(JSON.stringify({ taskType: 'runEveryMinute', pluginConfigId: 'asdf' })), key, + waitForAck: true, }) await waitForExpect(() => { @@ -104,7 +106,7 @@ describe('dlq handling', () => { // NOTE: we don't actually care too much about the contents of the // message, just that it triggeres the consumer to try to process it. - await produce({ topic: 'scheduled_tasks', message: Buffer.from(''), key: '' }) + await produce({ topic: 'scheduled_tasks', message: Buffer.from(''), key: '', waitForAck: true }) await waitForExpect(async () => { const metricAfter = await getMetric({ diff --git a/plugin-server/functional_tests/session-recordings.test.ts b/plugin-server/functional_tests/session-recordings.test.ts index 62075bc6bd10f..783fbdbeb43cd 100644 --- a/plugin-server/functional_tests/session-recordings.test.ts +++ b/plugin-server/functional_tests/session-recordings.test.ts @@ -173,7 +173,12 @@ test.skip('consumer updates timestamp exported to prometheus', async () => { }, }) - await produce({ topic: KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, message: Buffer.from(''), key: '' }) + await produce({ + topic: KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, + message: Buffer.from(''), + key: '', + waitForAck: true, + }) await waitForExpect(async () => { const metricAfter = await getMetric({ @@ -245,6 +250,7 @@ test.skip(`handles message with no token or with token and no associated team_id topic: KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, message: Buffer.from(JSON.stringify({ uuid: noTokenUuid, data: JSON.stringify({}) })), key: noTokenKey, + waitForAck: true, }) await produce({ topic: KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, @@ -252,6 +258,7 @@ test.skip(`handles message with no token or with token and no associated team_id JSON.stringify({ uuid: noAssociatedTeamUuid, token: 'no associated team', data: JSON.stringify({}) }) ), key: noAssociatedTeamKey, + waitForAck: true, }) await capture(makeSessionMessage(teamId, 'should be ingested')) diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index 8e9b50afb9528..a6ee4e91a9b15 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -134,6 +134,7 @@ export function getDefaultConfig(): PluginsServerConfig { RUSTY_HOOK_FOR_TEAMS: '', RUSTY_HOOK_ROLLOUT_PERCENTAGE: 0, RUSTY_HOOK_URL: '', + CAPTURE_CONFIG_REDIS_HOST: null, STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes STARTUP_PROFILE_CPU: false, @@ -163,6 +164,9 @@ export function getDefaultConfig(): PluginsServerConfig { SESSION_RECORDING_DEBUG_PARTITION: undefined, SESSION_RECORDING_KAFKA_DEBUG: undefined, SESSION_RECORDING_MAX_PARALLEL_FLUSHES: 10, + SESSION_RECORDING_OVERFLOW_ENABLED: false, + SESSION_RECORDING_OVERFLOW_BUCKET_REPLENISH_RATE: 5_000_000, // 5MB/second uncompressed, sustained + SESSION_RECORDING_OVERFLOW_BUCKET_CAPACITY: 200_000_000, // 200MB burst } } diff --git a/plugin-server/src/kafka/producer.ts b/plugin-server/src/kafka/producer.ts index 7029a26c79fbd..062785f902bc4 100644 --- a/plugin-server/src/kafka/producer.ts +++ b/plugin-server/src/kafka/producer.ts @@ -7,6 +7,7 @@ import { NumberNullUndefined, ProducerGlobalConfig, } from 'node-rdkafka' +import { Summary } from 'prom-client' import { getSpan } from '../sentry' import { status } from '../utils/status' @@ -17,6 +18,13 @@ export type KafkaProducerConfig = { KAFKA_PRODUCER_QUEUE_BUFFERING_MAX_MESSAGES: number } +export const ingestEventKafkaProduceLatency = new Summary({ + name: 'ingest_event_kafka_produce_latency', + help: 'Wait time for individual Kafka produces', + labelNames: ['topic', 'waitForAck'], + percentiles: [0.5, 0.9, 0.95, 0.99], +}) + // Kafka production related functions using node-rdkafka. export const createKafkaProducer = async (globalConfig: ProducerGlobalConfig, producerConfig: KafkaProducerConfig) => { const producer = new RdKafkaProducer({ @@ -71,18 +79,22 @@ export const produce = async ({ value, key, headers = [], - waitForAck = true, + waitForAck, }: { producer: RdKafkaProducer topic: string value: MessageValue key: MessageKey headers?: MessageHeader[] - waitForAck?: boolean + waitForAck: boolean }): Promise => { status.debug('📤', 'Producing message', { topic: topic }) const produceSpan = getSpan()?.startChild({ op: 'kafka_produce' }) return await new Promise((resolve, reject) => { + const produceTimer = ingestEventKafkaProduceLatency + .labels({ topic, waitForAck: waitForAck.toString() }) + .startTimer() + if (waitForAck) { producer.produce( topic, @@ -100,6 +112,7 @@ export const produce = async ({ resolve(offset) } + produceTimer() produceSpan?.finish() } ) @@ -112,6 +125,7 @@ export const produce = async ({ produceSpan?.finish() }) resolve(undefined) + produceTimer() } }) } diff --git a/plugin-server/src/main/graphile-worker/schedule.ts b/plugin-server/src/main/graphile-worker/schedule.ts index d50c672cea428..16435d02c0466 100644 --- a/plugin-server/src/main/graphile-worker/schedule.ts +++ b/plugin-server/src/main/graphile-worker/schedule.ts @@ -56,8 +56,11 @@ export async function runScheduledTasks( for (const pluginConfigId of server.pluginSchedule?.[taskType] || []) { status.info('⏲️', 'queueing_schedule_task', { taskType, pluginConfigId }) await server.kafkaProducer.queueMessage({ - topic: KAFKA_SCHEDULED_TASKS, - messages: [{ key: pluginConfigId.toString(), value: JSON.stringify({ taskType, pluginConfigId }) }], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [{ key: pluginConfigId.toString(), value: JSON.stringify({ taskType, pluginConfigId }) }], + }, + waitForAck: true, }) graphileScheduledTaskCounter.labels({ status: 'queued', task: taskType }).inc() } diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts index 749e41c18c335..588c2c92beb86 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts @@ -15,6 +15,7 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics' import { ingestEventBatchingBatchCountSummary, ingestEventBatchingInputLengthSummary, + ingestEventEachBatchKafkaAckWait, ingestionOverflowingMessagesTotal, ingestionParallelism, ingestionParallelismPotential, @@ -41,7 +42,7 @@ type IngestionSplitBatch = { type IngestResult = { // Promises that the batch handler should await on before committing offsets, // contains the Kafka producer ACKs, to avoid blocking after every message. - promises?: Array> + ackPromises?: Array> } async function handleProcessingError( @@ -166,7 +167,7 @@ export async function eachBatchParallelIngestion( return await runner.runEventPipeline(pluginEvent) })) as IngestResult - result.promises?.forEach((promise) => + result.ackPromises?.forEach((promise) => processingPromises.push( promise.catch(async (error) => { await handleProcessingError(error, message, pluginEvent, queue) @@ -227,7 +228,9 @@ export async function eachBatchParallelIngestion( // impact the success. Delaying ACKs allows the producer to write in big batches for // better throughput and lower broker load. const awaitSpan = transaction.startChild({ op: 'awaitACKs', data: { promiseCount: processingPromises.length } }) + const kafkaAckWaitMetric = ingestEventEachBatchKafkaAckWait.startTimer() await Promise.all(processingPromises) + kafkaAckWaitMetric() awaitSpan.finish() for (const message of messages) { diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/metrics.ts b/plugin-server/src/main/ingestion-queues/batch-processing/metrics.ts index 42c1b06a27b5d..60563b6cabaaa 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/metrics.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/metrics.ts @@ -41,3 +41,9 @@ export const ingestEventBatchingBatchCountSummary = new Summary({ help: 'Number of batches of events', percentiles: [0.5, 0.9, 0.95, 0.99], }) + +export const ingestEventEachBatchKafkaAckWait = new Summary({ + name: 'ingest_event_each_batch_kafka_ack_wait', + help: 'Wait time for the batch of Kafka ACKs at the end of eachBatchParallelIngestion', + percentiles: [0.5, 0.9, 0.95, 0.99], +}) diff --git a/plugin-server/src/main/ingestion-queues/jobs-consumer.ts b/plugin-server/src/main/ingestion-queues/jobs-consumer.ts index 94549340da4fe..605a812068c51 100644 --- a/plugin-server/src/main/ingestion-queues/jobs-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/jobs-consumer.ts @@ -54,8 +54,11 @@ export const startJobsConsumer = async ({ }) // TODO: handle resolving offsets asynchronously await producer.queueMessage({ - topic: KAFKA_JOBS_DLQ, - messages: [{ value: message.value, key: message.key }], + kafkaMessage: { + topic: KAFKA_JOBS_DLQ, + messages: [{ value: message.value, key: message.key }], + }, + waitForAck: true, }) resolveOffset(message.offset) continue @@ -71,8 +74,11 @@ export const startJobsConsumer = async ({ }) // TODO: handle resolving offsets asynchronously await producer.queueMessage({ - topic: KAFKA_JOBS_DLQ, - messages: [{ value: message.value, key: message.key }], + kafkaMessage: { + topic: KAFKA_JOBS_DLQ, + messages: [{ value: message.value, key: message.key }], + }, + waitForAck: true, }) resolveOffset(message.offset) continue diff --git a/plugin-server/src/main/ingestion-queues/scheduled-tasks-consumer.ts b/plugin-server/src/main/ingestion-queues/scheduled-tasks-consumer.ts index 3de544ce2d0a4..83ea62fdfdd6f 100644 --- a/plugin-server/src/main/ingestion-queues/scheduled-tasks-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/scheduled-tasks-consumer.ts @@ -163,8 +163,11 @@ const getTasksFromBatch = async (batch: Batch, producer: KafkaProducerWrapper) = value: message.value, }) await producer.queueMessage({ - topic: KAFKA_SCHEDULED_TASKS_DLQ, - messages: [{ value: message.value, key: message.key }], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS_DLQ, + messages: [{ value: message.value, key: message.key }], + }, + waitForAck: true, }) continue } @@ -181,8 +184,11 @@ const getTasksFromBatch = async (batch: Batch, producer: KafkaProducerWrapper) = error: error.stack ?? error, }) await producer.queueMessage({ - topic: KAFKA_SCHEDULED_TASKS_DLQ, - messages: [{ value: message.value, key: message.key }], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS_DLQ, + messages: [{ value: message.value, key: message.key }], + }, + waitForAck: true, }) continue } @@ -190,8 +196,11 @@ const getTasksFromBatch = async (batch: Batch, producer: KafkaProducerWrapper) = if (!taskTypes.includes(task.taskType) || isNaN(task.pluginConfigId)) { status.warn('⚠️', `Invalid schema for partition ${batch.partition} offset ${message.offset}.`, task) await producer.queueMessage({ - topic: KAFKA_SCHEDULED_TASKS_DLQ, - messages: [{ value: message.value, key: message.key }], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS_DLQ, + messages: [{ value: message.value, key: message.key }], + }, + waitForAck: true, }) continue } diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts index 5729da5cb373e..1c581451e44ec 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts @@ -163,6 +163,7 @@ export class ConsoleLogsIngester { topic: KAFKA_LOG_ENTRIES, value: Buffer.from(JSON.stringify(cle)), key: event.session_id, + waitForAck: true, }) ) } catch (error) { diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/overflow-manager.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/overflow-manager.ts new file mode 100644 index 0000000000000..f7a10a6a63593 --- /dev/null +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/overflow-manager.ts @@ -0,0 +1,60 @@ +import { Redis } from 'ioredis' +import LRUCache from 'lru-cache' +import { Gauge } from 'prom-client' + +import { Limiter } from '../../../../utils/token-bucket' + +export const overflowTriggeredGauge = new Gauge({ + name: 'overflow_detection_triggered_total', + help: 'Number of entities that triggered overflow detection.', +}) + +/** + * OverflowManager handles consumer-side detection of hot partitions by + * accounting for data volumes per entity (a session_id, a distinct_id...) + * and maintains the Redis set that capture reads to route messages. + * + * The first time that the observed spike crosses the thresholds set via burstCapacity + * and replenishRate, the key is added to Redis and the metrics incremented, subsequent + * calls will return early until cooldownSeconds is reached. + */ +export class OverflowManager { + private limiter: Limiter + private triggered: LRUCache + + constructor( + burstCapacity: number, + replenishRate: number, + private cooldownSeconds: number, + private redisKey: string, + private redisClient: Redis + ) { + this.limiter = new Limiter(burstCapacity, replenishRate) + this.triggered = new LRUCache({ max: 1_000_000, maxAge: cooldownSeconds * 1000 }) + } + + public async observe(key: string, quantity: number, now?: number): Promise { + if (this.triggered.has(key)) { + // Cooldown state, return early + return + } + if (this.limiter.consume(key, quantity, now)) { + // Not triggering overflow, return early + return + } + this.triggered.set(key, true) + overflowTriggeredGauge.inc(1) + + // Set the `NX` arguments to not update existing entries: if a session already triggered overflow, + // it's cooldown will not be extended after we restart the consumers. + // The zset value is a timestamp in seconds. + const expiration = (now ?? Date.now()) / 1000 + this.cooldownSeconds + await this.redisClient.zadd(this.redisKey, 'NX', expiration, key) + + // Cleanup old entries with values expired more than one hour ago. + // We run the cleanup here because we assume this will only run a dozen times per day per region. + // If this code path becomes too hot, it should move to a singleton loop. + const expired = (now ?? Date.now()) / 1000 - 3600 + await this.redisClient.zremrangebyscore(this.redisKey, 0, expired) + } +} diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts index 632f695a158f5..029f28f20bb9a 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts @@ -171,6 +171,7 @@ export class ReplayEventsIngester { topic: KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, value: Buffer.from(JSON.stringify(replayRecord)), key: event.session_id, + waitForAck: true, }), ] } catch (error) { diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer.ts index 30aaab4a023d5..2e84d7826c002 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer.ts @@ -1,5 +1,6 @@ import { captureException } from '@sentry/node' import crypto from 'crypto' +import { Redis } from 'ioredis' import { mkdirSync, rmSync } from 'node:fs' import { CODES, features, KafkaConsumer, librdkafkaVersion, Message, TopicPartition } from 'node-rdkafka' import { Counter, Gauge, Histogram } from 'prom-client' @@ -20,6 +21,7 @@ import { addSentryBreadcrumbsEventListeners } from '../kafka-metrics' import { eventDroppedCounter } from '../metrics' import { ConsoleLogsIngester } from './services/console-logs-ingester' import { OffsetHighWaterMarker } from './services/offset-high-water-marker' +import { OverflowManager } from './services/overflow-manager' import { RealtimeManager } from './services/realtime-manager' import { ReplayEventsIngester } from './services/replay-events-ingester' import { BUCKETS_KB_WRITTEN, SessionManager } from './services/session-manager' @@ -41,6 +43,7 @@ require('@sentry/tracing') const KAFKA_CONSUMER_GROUP_ID = 'session-recordings-blob' const KAFKA_CONSUMER_SESSION_TIMEOUT_MS = 30000 const SHUTDOWN_FLUSH_TIMEOUT_MS = 30000 +const CAPTURE_OVERFLOW_REDIS_KEY = '@posthog/capture-overflow/replay' const gaugeSessionsHandled = new Gauge({ name: 'recording_blob_ingestion_session_manager_count', @@ -128,6 +131,7 @@ export class SessionRecordingIngester { sessionHighWaterMarker: OffsetHighWaterMarker persistentHighWaterMarker: OffsetHighWaterMarker realtimeManager: RealtimeManager + overflowDetection?: OverflowManager replayEventsIngester?: ReplayEventsIngester consoleLogsIngester?: ConsoleLogsIngester batchConsumer?: BatchConsumer @@ -147,7 +151,8 @@ export class SessionRecordingIngester { constructor( private globalServerConfig: PluginsServerConfig, private postgres: PostgresRouter, - private objectStorage: ObjectStorage + private objectStorage: ObjectStorage, + captureRedis: Redis | undefined ) { this.debugPartition = globalServerConfig.SESSION_RECORDING_DEBUG_PARTITION ? parseInt(globalServerConfig.SESSION_RECORDING_DEBUG_PARTITION) @@ -160,6 +165,16 @@ export class SessionRecordingIngester { this.realtimeManager = new RealtimeManager(this.redisPool, this.config) + if (globalServerConfig.SESSION_RECORDING_OVERFLOW_ENABLED && captureRedis) { + this.overflowDetection = new OverflowManager( + globalServerConfig.SESSION_RECORDING_OVERFLOW_BUCKET_CAPACITY, + globalServerConfig.SESSION_RECORDING_OVERFLOW_BUCKET_REPLENISH_RATE, + 24 * 3600, // One day, + CAPTURE_OVERFLOW_REDIS_KEY, + captureRedis + ) + } + // We create a hash of the cluster to use as a unique identifier for the high-water marks // This enables us to swap clusters without having to worry about resetting the high-water marks const kafkaClusterIdentifier = crypto.createHash('md5').update(this.config.KAFKA_HOSTS).digest('hex') @@ -240,6 +255,8 @@ export class SessionRecordingIngester { const { team_id, session_id } = event const key = `${team_id}-${session_id}` + // TODO: use this for session key too if it's safe to do so + const overflowKey = `${team_id}:${session_id}` const { partition, highOffset } = event.metadata if (this.debugPartition === partition) { @@ -291,7 +308,10 @@ export class SessionRecordingIngester { ) } - await this.sessions[key]?.add(event) + await Promise.allSettled([ + this.sessions[key]?.add(event), + this.overflowDetection?.observe(overflowKey, event.metadata.rawSize, event.metadata.timestamp), + ]) } public async handleEachBatch(messages: Message[], heartbeat: () => void): Promise { diff --git a/plugin-server/src/main/ingestion-queues/session-recording/types.ts b/plugin-server/src/main/ingestion-queues/session-recording/types.ts index 254e3f0897ee7..d61dadda9279e 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/types.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/types.ts @@ -6,6 +6,7 @@ export type IncomingRecordingMessage = { metadata: { topic: string partition: number + rawSize: number lowOffset: number highOffset: number timestamp: number diff --git a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts index 4b4345d43b48d..2c5637726743e 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts @@ -225,6 +225,7 @@ export const parseKafkaMessage = async ( metadata: { partition: message.partition, topic: message.topic, + rawSize: message.size, lowOffset: message.offset, highOffset: message.offset, timestamp: message.timestamp, @@ -254,37 +255,34 @@ export const reduceRecordingMessages = (messages: IncomingRecordingMessage[]): I const reducedMessages: Record = {} for (const message of messages) { - const clonedMessage = cloneObject(message) - const key = `${clonedMessage.team_id}-${clonedMessage.session_id}` + const key = `${message.team_id}-${message.session_id}` if (!reducedMessages[key]) { - reducedMessages[key] = clonedMessage + reducedMessages[key] = cloneObject(message) } else { const existingMessage = reducedMessages[key] - for (const [windowId, events] of Object.entries(clonedMessage.eventsByWindowId)) { + for (const [windowId, events] of Object.entries(message.eventsByWindowId)) { if (existingMessage.eventsByWindowId[windowId]) { existingMessage.eventsByWindowId[windowId].push(...events) } else { existingMessage.eventsByWindowId[windowId] = events } } + existingMessage.metadata.rawSize += message.metadata.rawSize // Update the events ranges existingMessage.metadata.lowOffset = Math.min( existingMessage.metadata.lowOffset, - clonedMessage.metadata.lowOffset + message.metadata.lowOffset ) existingMessage.metadata.highOffset = Math.max( existingMessage.metadata.highOffset, - clonedMessage.metadata.highOffset + message.metadata.highOffset ) // Update the events ranges - existingMessage.eventsRange.start = Math.min( - existingMessage.eventsRange.start, - clonedMessage.eventsRange.start - ) - existingMessage.eventsRange.end = Math.max(existingMessage.eventsRange.end, clonedMessage.eventsRange.end) + existingMessage.eventsRange.start = Math.min(existingMessage.eventsRange.start, message.eventsRange.start) + existingMessage.eventsRange.end = Math.max(existingMessage.eventsRange.end, message.eventsRange.end) } } diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts index f3a4362db6f83..8c910e1857b06 100644 --- a/plugin-server/src/main/pluginsServer.ts +++ b/plugin-server/src/main/pluginsServer.ts @@ -18,7 +18,7 @@ import { cancelAllScheduledJobs } from '../utils/node-schedule' import { PeriodicTask } from '../utils/periodic-task' import { PubSub } from '../utils/pubsub' import { status } from '../utils/status' -import { delay } from '../utils/utils' +import { createRedisClient, delay } from '../utils/utils' import { AppMetrics } from '../worker/ingestion/app-metrics' import { OrganizationManager } from '../worker/ingestion/organization-manager' import { DeferredPersonOverrideWorker, FlatPersonOverrideWriter } from '../worker/ingestion/person-state' @@ -243,6 +243,12 @@ export async function startPluginsServer( // be super lightweight and ideally not do any IO. const healthChecks: { [service: string]: () => Promise | boolean } = {} + // Creating a dedicated single-connection redis client to this Redis, as it's not relevant for hobby + // and cloud deploys don't have concurrent uses. We should abstract multi-Redis into a router util. + const captureRedis = serverConfig.CAPTURE_CONFIG_REDIS_HOST + ? await createRedisClient(serverConfig.CAPTURE_CONFIG_REDIS_HOST) + : undefined + try { // Based on the mode the plugin server was started, we start a number of // different services. Mostly this is reasonably obvious from the name. @@ -440,7 +446,7 @@ export async function startPluginsServer( throw new Error("Can't start session recording blob ingestion without object storage") } // NOTE: We intentionally pass in the original serverConfig as the ingester uses both kafkas - const ingester = new SessionRecordingIngester(serverConfig, postgres, s3) + const ingester = new SessionRecordingIngester(serverConfig, postgres, s3, captureRedis) await ingester.start() const batchConsumer = ingester.batchConsumer diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index b8eeb5b296a9e..db9350490bd70 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -207,6 +207,7 @@ export interface PluginsServerConfig { RUSTY_HOOK_URL: string SKIP_UPDATE_EVENT_AND_PROPERTIES_STEP: boolean PIPELINE_STEP_STALLED_LOG_TIMEOUT: number + CAPTURE_CONFIG_REDIS_HOST: string | null // Redis cluster to use to coordinate with capture (overflow, routing) // dump profiles to disk, covering the first N seconds of runtime STARTUP_PROFILE_DURATION_SECONDS: number @@ -230,6 +231,10 @@ export interface PluginsServerConfig { // a single partition which will output many more log messages to the console // useful when that partition is lagging unexpectedly SESSION_RECORDING_DEBUG_PARTITION: string | undefined + // overflow detection, updating Redis for capture to move the traffic away + SESSION_RECORDING_OVERFLOW_ENABLED: boolean + SESSION_RECORDING_OVERFLOW_BUCKET_CAPACITY: number + SESSION_RECORDING_OVERFLOW_BUCKET_REPLENISH_RATE: number // Dedicated infra values SESSION_RECORDING_KAFKA_HOSTS: string | undefined @@ -864,7 +869,6 @@ export interface EventPropertyFilter extends PropertyFilterWithOperator { /** Sync with posthog/frontend/src/types.ts */ export interface PersonPropertyFilter extends PropertyFilterWithOperator { type: 'person' - table?: string } /** Sync with posthog/frontend/src/types.ts */ diff --git a/plugin-server/src/utils/db/db.ts b/plugin-server/src/utils/db/db.ts index 2baa10671a91e..c7b6ce86a895a 100644 --- a/plugin-server/src/utils/db/db.ts +++ b/plugin-server/src/utils/db/db.ts @@ -707,7 +707,7 @@ export class DB { }) } - await this.kafkaProducer.queueMessages(kafkaMessages) + await this.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) return person } @@ -759,7 +759,7 @@ export class DB { if (tx) { kafkaMessages.push(message) } else { - await this.kafkaProducer.queueMessage(message) + await this.kafkaProducer.queueMessage({ kafkaMessage: message, waitForAck: true }) } status.debug( @@ -829,7 +829,7 @@ export class DB { public async addDistinctId(person: Person, distinctId: string): Promise { const kafkaMessages = await this.addDistinctIdPooled(person, distinctId) if (kafkaMessages.length) { - await this.kafkaProducer.queueMessages(kafkaMessages) + await this.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) } } @@ -1072,15 +1072,15 @@ export class DB { pluginLogEntryCounter.labels({ plugin_id: String(pluginConfig.plugin_id), source }).inc() try { - await this.kafkaProducer.queueSingleJsonMessage( - KAFKA_PLUGIN_LOG_ENTRIES, - parsedEntry.id, - parsedEntry, + await this.kafkaProducer.queueSingleJsonMessage({ + topic: KAFKA_PLUGIN_LOG_ENTRIES, + key: parsedEntry.id, + object: parsedEntry, // For logs, we relax our durability requirements a little and // do not wait for acks that Kafka has persisted the message to // disk. - false - ) + waitForAck: false, + }) } catch (e) { captureException(e, { tags: { team_id: entry.pluginConfig.team_id } }) console.error('Failed to produce message', e, parsedEntry) @@ -1409,19 +1409,22 @@ export class DB { version: number ): Promise { await this.kafkaProducer.queueMessage({ - topic: KAFKA_GROUPS, - messages: [ - { - value: JSON.stringify({ - group_type_index: groupTypeIndex, - group_key: groupKey, - team_id: teamId, - group_properties: JSON.stringify(properties), - created_at: castTimestampOrNow(createdAt, TimestampFormat.ClickHouseSecondPrecision), - version, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_GROUPS, + messages: [ + { + value: JSON.stringify({ + group_type_index: groupTypeIndex, + group_key: groupKey, + team_id: teamId, + group_properties: JSON.stringify(properties), + created_at: castTimestampOrNow(createdAt, TimestampFormat.ClickHouseSecondPrecision), + version, + }), + }, + ], + }, + waitForAck: true, }) } diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index 0a50533a1dbdb..098a44e7d4aa6 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -159,13 +159,16 @@ export async function createHub( // chained, and if we do not manage to produce then the chain will be // broken. await kafkaProducer.queueMessage({ - topic: KAFKA_JOBS, - messages: [ - { - value: Buffer.from(JSON.stringify(job)), - key: Buffer.from(job.pluginConfigTeam.toString()), - }, - ], + kafkaMessage: { + topic: KAFKA_JOBS, + messages: [ + { + value: Buffer.from(JSON.stringify(job)), + key: Buffer.from(job.pluginConfigTeam.toString()), + }, + ], + }, + waitForAck: true, }) } diff --git a/plugin-server/src/utils/db/kafka-producer-wrapper.ts b/plugin-server/src/utils/db/kafka-producer-wrapper.ts index 8f7cef4c06b30..0ea1e01c5099f 100644 --- a/plugin-server/src/utils/db/kafka-producer-wrapper.ts +++ b/plugin-server/src/utils/db/kafka-producer-wrapper.ts @@ -35,7 +35,7 @@ export class KafkaProducerWrapper { key: MessageKey topic: string headers?: MessageHeader[] - waitForAck?: boolean + waitForAck: boolean }): Promise { try { kafkaProducerMessagesQueuedCounter.labels({ topic_name: topic }).inc() @@ -66,7 +66,7 @@ export class KafkaProducerWrapper { } } - async queueMessage(kafkaMessage: ProducerRecord, waitForAck?: boolean) { + async queueMessage({ kafkaMessage, waitForAck }: { kafkaMessage: ProducerRecord; waitForAck: boolean }) { return await Promise.all( kafkaMessage.messages.map((message) => this.produce({ @@ -80,23 +80,34 @@ export class KafkaProducerWrapper { ) } - async queueMessages(kafkaMessages: ProducerRecord[], waitForAck?: boolean): Promise { - await Promise.all(kafkaMessages.map((message) => this.queueMessage(message, waitForAck))) + async queueMessages({ + kafkaMessages, + waitForAck, + }: { + kafkaMessages: ProducerRecord[] + waitForAck: boolean + }): Promise { + await Promise.all(kafkaMessages.map((kafkaMessage) => this.queueMessage({ kafkaMessage, waitForAck }))) } - async queueSingleJsonMessage( - topic: string, - key: Message['key'], - object: Record, - waitForAck?: boolean - ): Promise { - await this.queueMessage( - { + async queueSingleJsonMessage({ + topic, + key, + object, + waitForAck, + }: { + topic: string + key: Message['key'] + object: Record + waitForAck: boolean + }): Promise { + await this.queueMessage({ + kafkaMessage: { topic, messages: [{ key, value: JSON.stringify(object) }], }, - waitForAck - ) + waitForAck, + }) } public async flush() { diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts index 87fbe9e9e0640..a49a5161b4b3a 100644 --- a/plugin-server/src/utils/utils.ts +++ b/plugin-server/src/utils/utils.ts @@ -339,8 +339,12 @@ export async function createRedis(serverConfig: PluginsServerConfig): Promise { + const redis = new Redis(url, { + ...options, maxRetriesPerRequest: -1, }) let errorCounter = 0 diff --git a/plugin-server/src/worker/ingestion/app-metrics.ts b/plugin-server/src/worker/ingestion/app-metrics.ts index d8f52a7401150..5e0a83c92ae31 100644 --- a/plugin-server/src/worker/ingestion/app-metrics.ts +++ b/plugin-server/src/worker/ingestion/app-metrics.ts @@ -183,8 +183,11 @@ export class AppMetrics { })) await this.kafkaProducer.queueMessage({ - topic: KAFKA_APP_METRICS, - messages: kafkaMessages, + kafkaMessage: { + topic: KAFKA_APP_METRICS, + messages: kafkaMessages, + }, + waitForAck: true, }) status.debug('🚽', `Finished flushing app metrics, took ${Date.now() - startTime}ms`) } diff --git a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts index 7d71548381b16..6ae2248513073 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts @@ -25,7 +25,7 @@ import { processPersonsStep } from './processPersonsStep' export type EventPipelineResult = { // Promises that the batch handler should await on before committing offsets, // contains the Kafka producer ACKs, to avoid blocking after every message. - promises?: Array> + ackPromises?: Array> // Only used in tests // TODO: update to test for side-effects of running the pipeline rather than // this return type. @@ -78,14 +78,14 @@ export class EventPipelineRunner { drop_cause: 'disallowed', }) .inc() - return this.registerLastStep('eventDisallowedStep', null, [event]) + return this.registerLastStep('eventDisallowedStep', [event]) } let result: EventPipelineResult const eventWithTeam = await this.runStep(populateTeamDataStep, [this, event], event.team_id || -1) if (eventWithTeam != null) { result = await this.runEventPipelineSteps(eventWithTeam) } else { - result = this.registerLastStep('populateTeamDataStep', null, [event]) + result = this.registerLastStep('populateTeamDataStep', [event]) } eventProcessedAndIngestedCounter.inc() return result @@ -120,7 +120,7 @@ export class EventPipelineRunner { const processedEvent = await this.runStep(pluginsProcessEventStep, [this, event], event.team_id) if (processedEvent == null) { - return this.registerLastStep('pluginsProcessEventStep', event.team_id, [event]) + return this.registerLastStep('pluginsProcessEventStep', [event]) } const [normalizedEvent, person] = await this.runStep(processPersonsStep, [this, processedEvent], event.team_id) @@ -132,17 +132,12 @@ export class EventPipelineRunner { event.team_id ) - return this.registerLastStep('createEventStep', event.team_id, [rawClickhouseEvent, person], [eventAck]) + return this.registerLastStep('createEventStep', [rawClickhouseEvent, person], [eventAck]) } - registerLastStep( - stepName: string, - teamId: number | null, - args: any[], - promises?: Array> - ): EventPipelineResult { + registerLastStep(stepName: string, args: any[], ackPromises?: Array>): EventPipelineResult { pipelineLastStepCounter.labels(stepName).inc() - return { promises: promises, lastStep: stepName, args } + return { ackPromises, lastStep: stepName, args } } protected runStep any>( @@ -218,7 +213,7 @@ export class EventPipelineRunner { teamId, `plugin_server_ingest_event:${currentStepName}` ) - await this.hub.db.kafkaProducer!.queueMessage(message) + await this.hub.db.kafkaProducer!.queueMessage({ kafkaMessage: message, waitForAck: true }) } catch (dlqError) { status.info('🔔', `Errored trying to add event to dead letter queue. Error: ${dlqError}`) Sentry.captureException(dlqError, { diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts index b2356f3652662..525bbbf84c910 100644 --- a/plugin-server/src/worker/ingestion/person-state.ts +++ b/plugin-server/src/worker/ingestion/person-state.ts @@ -453,7 +453,7 @@ export class PersonState { olderCreatedAt, // Keep the oldest created_at (i.e. the first time we've seen either person) properties ) - await this.db.kafkaProducer.queueMessages(kafkaMessages) + await this.db.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) return mergedPerson } @@ -767,7 +767,7 @@ export class DeferredPersonOverrideWorker { // Postgres for some reason -- the same row state should be // generated each call, and the receiving ReplacingMergeTree will // ensure we keep only the latest version after all writes settle.) - await this.kafkaProducer.queueMessages(messages, true) + await this.kafkaProducer.queueMessages({ kafkaMessages: messages, waitForAck: true }) return rows.length } diff --git a/plugin-server/src/worker/ingestion/utils.ts b/plugin-server/src/worker/ingestion/utils.ts index c52ef4ebba78e..9488ee759581b 100644 --- a/plugin-server/src/worker/ingestion/utils.ts +++ b/plugin-server/src/worker/ingestion/utils.ts @@ -80,18 +80,21 @@ export async function captureIngestionWarning( const limiter_key = `${teamId}:${type}:${debounce?.key || ''}` if (!!debounce?.alwaysSend || IngestionWarningLimiter.consume(limiter_key, 1)) { await kafkaProducer.queueMessage({ - topic: KAFKA_INGESTION_WARNINGS, - messages: [ - { - value: JSON.stringify({ - team_id: teamId, - type: type, - source: 'plugin-server', - details: JSON.stringify(details), - timestamp: castTimestampOrNow(null, TimestampFormat.ClickHouse), - }), - }, - ], + kafkaMessage: { + topic: KAFKA_INGESTION_WARNINGS, + messages: [ + { + value: JSON.stringify({ + team_id: teamId, + type: type, + source: 'plugin-server', + details: JSON.stringify(details), + timestamp: castTimestampOrNow(null, TimestampFormat.ClickHouse), + }), + }, + ], + }, + waitForAck: true, }) } else { return Promise.resolve() diff --git a/plugin-server/src/worker/vm/extensions/posthog.ts b/plugin-server/src/worker/vm/extensions/posthog.ts index c7a0a7124c50d..34e9cb2befd1c 100644 --- a/plugin-server/src/worker/vm/extensions/posthog.ts +++ b/plugin-server/src/worker/vm/extensions/posthog.ts @@ -29,22 +29,25 @@ async function queueEvent(hub: Hub, pluginConfig: PluginConfig, data: InternalDa const partitionKey = partitionKeyHash.digest('hex') await hub.kafkaProducer.queueMessage({ - topic: hub.KAFKA_CONSUMPTION_TOPIC!, - messages: [ - { - key: partitionKey, - value: JSON.stringify({ - distinct_id: data.distinct_id, - ip: '', - site_url: '', - data: JSON.stringify(data), - team_id: pluginConfig.team_id, - now: data.timestamp, - sent_at: data.timestamp, - uuid: data.uuid, - } as RawEventMessage), - }, - ], + kafkaMessage: { + topic: hub.KAFKA_CONSUMPTION_TOPIC!, + messages: [ + { + key: partitionKey, + value: JSON.stringify({ + distinct_id: data.distinct_id, + ip: '', + site_url: '', + data: JSON.stringify(data), + team_id: pluginConfig.team_id, + now: data.timestamp, + sent_at: data.timestamp, + uuid: data.uuid, + } as RawEventMessage), + }, + ], + }, + waitForAck: true, }) } diff --git a/plugin-server/tests/main/db.test.ts b/plugin-server/tests/main/db.test.ts index 2adc7567c8a5d..14448f196f9be 100644 --- a/plugin-server/tests/main/db.test.ts +++ b/plugin-server/tests/main/db.test.ts @@ -367,9 +367,10 @@ describe('DB', () => { expect(updatedPerson.properties).toEqual({ c: 'aaa' }) // verify correct Kafka message was sent - expect(db.kafkaProducer!.queueMessage).toHaveBeenLastCalledWith( - generateKafkaPersonUpdateMessage(updatedPerson) - ) + expect(db.kafkaProducer!.queueMessage).toHaveBeenLastCalledWith({ + kafkaMessage: generateKafkaPersonUpdateMessage(updatedPerson), + waitForAck: true, + }) }) }) @@ -416,7 +417,7 @@ describe('DB', () => { await delayUntilEventIngested(fetchPersonsRows, 2) const kafkaMessages = await db.deletePerson(person) - await db.kafkaProducer.queueMessages(kafkaMessages) + await db.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) await db.kafkaProducer.flush() const persons = await delayUntilEventIngested(fetchPersonsRows, 3) diff --git a/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.test.ts b/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.test.ts index 851bb23e2ac14..774475a5b34aa 100644 --- a/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.test.ts +++ b/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.test.ts @@ -97,20 +97,23 @@ describe('eachBatchParallelIngestion with overflow consume', () => { expect(queue.pluginsServer.teamManager.getTeamForEvent).toHaveBeenCalledTimes(1) expect(consume).toHaveBeenCalledWith('1:ingestion_capacity_overflow:id', 1) expect(mockQueueMessage).toHaveBeenCalledWith({ - topic: 'clickhouse_ingestion_warnings_test', - messages: [ - { - value: JSON.stringify({ - team_id: 1, - type: 'ingestion_capacity_overflow', - source: 'plugin-server', - details: JSON.stringify({ - overflowDistinctId: 'id', + kafkaMessage: { + topic: 'clickhouse_ingestion_warnings_test', + messages: [ + { + value: JSON.stringify({ + team_id: 1, + type: 'ingestion_capacity_overflow', + source: 'plugin-server', + details: JSON.stringify({ + overflowDistinctId: 'id', + }), + timestamp: castTimestampOrNow(null, TimestampFormat.ClickHouse), }), - timestamp: castTimestampOrNow(null, TimestampFormat.ClickHouse), - }), - }, - ], + }, + ], + }, + waitForAck: true, }) // Event is processed diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts index 667c278d243f1..b0e61e62fdd66 100644 --- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts +++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts @@ -408,7 +408,7 @@ describe('eachBatchX', () => { const batch = createBatch(captureEndpointEvent) runEventPipeline.mockImplementationOnce(() => Promise.resolve({ - promises: [Promise.resolve(), Promise.reject('deferred nopes out')], + ackPromises: [Promise.resolve(), Promise.reject('deferred nopes out')], }) ) const tokenBlockList = buildStringMatcher('another_token,more_token', false) diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap b/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap index 9962eb544bc6d..87ca515b22bd6 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap +++ b/plugin-server/tests/main/ingestion-queues/session-recording/__snapshots__/utils.test.ts.snap @@ -33,6 +33,7 @@ Array [ "highOffset": 3, "lowOffset": 1, "partition": 1, + "rawSize": 12, "timestamp": 1, "topic": "the_topic", }, @@ -59,6 +60,7 @@ Array [ "highOffset": 4, "lowOffset": 4, "partition": 1, + "rawSize": 30, "timestamp": 4, "topic": "the_topic", }, @@ -85,6 +87,7 @@ Array [ "highOffset": 5, "lowOffset": 5, "partition": 1, + "rawSize": 31, "timestamp": 5, "topic": "the_topic", }, @@ -130,6 +133,7 @@ Object { "highOffset": 1, "lowOffset": 1, "partition": 1, + "rawSize": 42, "timestamp": 1, "topic": "the_topic", }, diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/services/console-log-ingester.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/services/console-log-ingester.test.ts index 42dfb9e55b5c1..6698b40a8ca6a 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/services/console-log-ingester.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/services/console-log-ingester.test.ts @@ -80,6 +80,7 @@ describe('console log ingester', () => { timestamp: '1970-01-01 00:00:00.000', }) ), + waitForAck: true, }, ], ]) @@ -124,6 +125,7 @@ describe('console log ingester', () => { timestamp: '1970-01-01 00:00:00.000', }) ), + waitForAck: true, }, ], [ @@ -142,6 +144,7 @@ describe('console log ingester', () => { timestamp: '1970-01-01 00:00:00.000', }) ), + waitForAck: true, }, ], ]) @@ -181,6 +184,7 @@ describe('console log ingester', () => { timestamp: '1970-01-01 00:00:00.000', }) ), + waitForAck: true, }, ], ]) diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/services/overflow-manager.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/services/overflow-manager.test.ts new file mode 100644 index 0000000000000..875a7157dcf12 --- /dev/null +++ b/plugin-server/tests/main/ingestion-queues/session-recording/services/overflow-manager.test.ts @@ -0,0 +1,104 @@ +import { Redis } from 'ioredis' + +import { OverflowManager } from '../../../../../src/main/ingestion-queues/session-recording/services/overflow-manager' +import { Hub } from '../../../../../src/types' +import { createHub } from '../../../../../src/utils/db/hub' + +jest.mock('../../../../../src/utils/status') +jest.mock('../../../../../src/kafka/producer') + +const CAPTURE_OVERFLOW_REDIS_KEY = '@posthog/capture-overflow/replay' + +describe('overflow manager', () => { + let hub: Hub + let closeHub: () => Promise + let redis: Redis + let overflowManager: OverflowManager + + beforeAll(async () => { + ;[hub, closeHub] = await createHub() + redis = await hub.redisPool.acquire() + }) + beforeEach(async () => { + await redis.del(CAPTURE_OVERFLOW_REDIS_KEY) + overflowManager = new OverflowManager(10, 1, 3600, CAPTURE_OVERFLOW_REDIS_KEY, redis) + }) + + afterAll(async () => { + await redis.flushdb() + await hub.redisPool.release(redis) + await closeHub?.() + }) + + test('it does not trigger if several keys are under threshold', async () => { + await overflowManager.observe('key1', 8) + await overflowManager.observe('key2', 8) + await overflowManager.observe('key3', 8) + + expect(await redis.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(0) + }) + + test('it triggers for hot keys', async () => { + await overflowManager.observe('key1', 4) + await overflowManager.observe('key1', 4) + await overflowManager.observe('key2', 8) + expect(await redis.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(0) + + await overflowManager.observe('key1', 4) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1)).toEqual(['key1']) + }) + + test('it does not triggers twice when cooling down', async () => { + await overflowManager.observe('key1', 11) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1)).toEqual(['key1']) + + // Delete the key to confirm that OverflowManager is in cooldown for key1 and does not re-create it + await redis.del(CAPTURE_OVERFLOW_REDIS_KEY) + await overflowManager.observe('key1', 11) + expect(await redis.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(0) + + // But it triggers for key2 + await overflowManager.observe('key2', 11) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1)).toEqual(['key2']) + }) + + test('it does not update existing values', async () => { + const timestamp = 1711280335000 + const oldTimestamp = timestamp / 1000 - 200 + await redis.zadd(CAPTURE_OVERFLOW_REDIS_KEY, oldTimestamp, 'key1') + + await overflowManager.observe('key1', 11, timestamp) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1, 'WITHSCORES')).toEqual([ + 'key1', + oldTimestamp.toString(), + ]) + }) + + test('it set the expected expiration on new values', async () => { + const timestamp = 1711280335000 + const oldTimestamp = timestamp / 1000 - 200 + await redis.zadd(CAPTURE_OVERFLOW_REDIS_KEY, oldTimestamp, 'key1') + + const expectedExpiration = timestamp / 1000 + 3600 + await overflowManager.observe('key2', 11, timestamp) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1, 'WITHSCORES')).toEqual([ + 'key1', + oldTimestamp.toString(), + 'key2', + expectedExpiration.toString(), + ]) + }) + + test('it removes old values when adding one', async () => { + const timestamp = 1711280335000 + const oldTimestamp = timestamp / 1000 - 8000 + await redis.zadd(CAPTURE_OVERFLOW_REDIS_KEY, oldTimestamp, 'key1') + + const expectedExpiration = timestamp / 1000 + 3600 + await overflowManager.observe('key2', 11, timestamp) + expect(await redis.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1, 'WITHSCORES')).toEqual([ + 'key2', + expectedExpiration.toString(), + ]) + }) +}) diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer.test.ts index 18dc39c7e5b2e..730fe28f481ac 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer.test.ts @@ -1,6 +1,7 @@ import { randomUUID } from 'crypto' +import { Redis } from 'ioredis' import { mkdirSync, readdirSync, rmSync } from 'node:fs' -import { TopicPartition, TopicPartitionOffset } from 'node-rdkafka' +import { Message, TopicPartition, TopicPartitionOffset } from 'node-rdkafka' import path from 'path' import { waitForExpect } from '../../../../functional_tests/expectations' @@ -12,10 +13,14 @@ import { getFirstTeam, resetTestDatabase } from '../../../helpers/sql' import { createIncomingRecordingMessage, createKafkaMessage, createTP } from './fixtures' const SESSION_RECORDING_REDIS_PREFIX = '@posthog-tests/replay/' +const CAPTURE_OVERFLOW_REDIS_KEY = '@posthog/capture-overflow/replay' const config: PluginsServerConfig = { ...defaultConfig, SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION: true, + SESSION_RECORDING_OVERFLOW_ENABLED: true, + SESSION_RECORDING_OVERFLOW_BUCKET_CAPACITY: 1_000_000, // 1MB burst + SESSION_RECORDING_OVERFLOW_BUCKET_REPLENISH_RATE: 1_000, // 1kB/s replenish SESSION_RECORDING_REDIS_PREFIX, } @@ -68,6 +73,7 @@ describe('ingester', () => { let teamToken = '' let mockOffsets: Record = {} let mockCommittedOffsets: Record = {} + let redisConn: Redis beforeAll(async () => { mkdirSync(path.join(config.SESSION_RECORDING_LOCAL_DIRECTORY, 'session-buffer-files'), { recursive: true }) @@ -103,9 +109,12 @@ describe('ingester', () => { ;[hub, closeHub] = await createHub() team = await getFirstTeam(hub) teamToken = team.api_token + redisConn = await hub.redisPool.acquire(0) + await redisConn.del(CAPTURE_OVERFLOW_REDIS_KEY) + await deleteKeysWithPrefix(hub) - ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage) + ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage, redisConn) await ingester.start() mockConsumer.assignments.mockImplementation(() => [createTP(0), createTP(1)]) @@ -113,6 +122,8 @@ describe('ingester', () => { afterEach(async () => { jest.setTimeout(10000) + await redisConn.del(CAPTURE_OVERFLOW_REDIS_KEY) + await hub.redisPool.release(redisConn) await deleteKeysWithPrefix(hub) await ingester.stop() await closeHub() @@ -128,7 +139,7 @@ describe('ingester', () => { await ingester.commitAllOffsets(ingester.partitionMetrics, Object.values(ingester.sessions)) } - const createMessage = (session_id: string, partition = 1) => { + const createMessage = (session_id: string, partition = 1, messageOverrides: Partial = {}) => { mockOffsets[partition] = mockOffsets[partition] ?? 0 mockOffsets[partition]++ @@ -137,6 +148,7 @@ describe('ingester', () => { { partition, offset: mockOffsets[partition], + ...messageOverrides, }, { $session_id: session_id, @@ -150,7 +162,7 @@ describe('ingester', () => { KAFKA_HOSTS: 'localhost:9092', } satisfies Partial as PluginsServerConfig - const ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage) + const ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage, undefined) expect(ingester['debugPartition']).toEqual(103) }) @@ -159,7 +171,7 @@ describe('ingester', () => { KAFKA_HOSTS: 'localhost:9092', } satisfies Partial as PluginsServerConfig - const ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage) + const ingester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage, undefined) expect(ingester['debugPartition']).toBeUndefined() }) @@ -424,7 +436,7 @@ describe('ingester', () => { jest.setTimeout(5000) // Increased to cover lock delay beforeEach(async () => { - otherIngester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage) + otherIngester = new SessionRecordingIngester(config, hub.postgres, hub.objectStorage, undefined) await otherIngester.start() }) @@ -561,6 +573,62 @@ describe('ingester', () => { }) }) + describe('overflow detection', () => { + const ingestBurst = async (count: number, size_bytes: number, timestamp_delta: number) => { + const first_timestamp = Date.now() - 2 * timestamp_delta * count + + // Because messages from the same batch are reduced into a single one, we call handleEachBatch + // with individual messages to have better control on the message timestamp + for (let n = 0; n < count; n++) { + const message = createMessage('sid1', 1, { + size: size_bytes, + timestamp: first_timestamp + n * timestamp_delta, + }) + await ingester.handleEachBatch([message], noop) + } + } + + it('should not trigger overflow if under threshold', async () => { + await ingestBurst(10, 100, 10) + expect(await redisConn.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(0) + }) + + it('should trigger overflow during bursts', async () => { + const expected_expiration = Math.floor(Date.now() / 1000) + 24 * 3600 // 24 hours from now, in seconds + await ingestBurst(10, 150_000, 10) + + expect(await redisConn.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(1) + expect( + await redisConn.zrangebyscore( + CAPTURE_OVERFLOW_REDIS_KEY, + expected_expiration - 10, + expected_expiration + 10 + ) + ).toEqual([`${team.id}:sid1`]) + }) + + it('should not trigger overflow during backfills', async () => { + await ingestBurst(10, 150_000, 150_000) + expect(await redisConn.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(0) + }) + + it('should cleanup older entries when triggering', async () => { + await redisConn.zadd(CAPTURE_OVERFLOW_REDIS_KEY, 'NX', Date.now() / 1000 - 7000, 'expired:session') + await redisConn.zadd(CAPTURE_OVERFLOW_REDIS_KEY, 'NX', Date.now() / 1000 - 1000, 'not_expired:session') + expect(await redisConn.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1)).toEqual([ + 'expired:session', + 'not_expired:session', + ]) + + await ingestBurst(10, 150_000, 10) + expect(await redisConn.exists(CAPTURE_OVERFLOW_REDIS_KEY)).toEqual(1) + expect(await redisConn.zrange(CAPTURE_OVERFLOW_REDIS_KEY, 0, -1)).toEqual([ + 'not_expired:session', + `${team.id}:sid1`, + ]) + }) + }) + describe('lag reporting', () => { it('should return the latest offsets', async () => { mockConsumer.queryWatermarkOffsets.mockImplementation((_topic, partition, _timeout, cb) => { diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/utils.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/utils.test.ts index b8e6dc59284e7..c5a3851486d93 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/utils.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/utils.test.ts @@ -57,7 +57,7 @@ describe('session-recording utils', () => { }) ), timestamp: 1, - size: 1, + size: 42, topic: 'the_topic', offset: 1, partition: 1, @@ -257,7 +257,7 @@ describe('session-recording utils', () => { distinct_id: '1', eventsRange: { start: 1, end: 1 }, eventsByWindowId: { window_1: [{ timestamp: 1, type: 1, data: {} }] }, - metadata: { lowOffset: 1, highOffset: 1, partition: 1, timestamp: 1, topic: 'the_topic' }, + metadata: { lowOffset: 1, highOffset: 1, partition: 1, timestamp: 1, topic: 'the_topic', rawSize: 5 }, session_id: '1', team_id: 1, snapshot_source: null, @@ -266,7 +266,7 @@ describe('session-recording utils', () => { distinct_id: '1', eventsRange: { start: 2, end: 2 }, eventsByWindowId: { window_1: [{ timestamp: 2, type: 2, data: {} }] }, - metadata: { lowOffset: 2, highOffset: 2, partition: 1, timestamp: 2, topic: 'the_topic' }, + metadata: { lowOffset: 2, highOffset: 2, partition: 1, timestamp: 2, topic: 'the_topic', rawSize: 4 }, session_id: '1', team_id: 1, snapshot_source: null, @@ -276,7 +276,7 @@ describe('session-recording utils', () => { distinct_id: '1', eventsRange: { start: 3, end: 3 }, eventsByWindowId: { window_2: [{ timestamp: 3, type: 3, data: {} }] }, - metadata: { lowOffset: 3, highOffset: 3, partition: 1, timestamp: 3, topic: 'the_topic' }, + metadata: { lowOffset: 3, highOffset: 3, partition: 1, timestamp: 3, topic: 'the_topic', rawSize: 3 }, session_id: '1', team_id: 1, snapshot_source: null, @@ -286,7 +286,7 @@ describe('session-recording utils', () => { distinct_id: '1', eventsRange: { start: 4, end: 4 }, eventsByWindowId: { window_1: [{ timestamp: 4, type: 4, data: {} }] }, - metadata: { lowOffset: 4, highOffset: 4, partition: 1, timestamp: 4, topic: 'the_topic' }, + metadata: { lowOffset: 4, highOffset: 4, partition: 1, timestamp: 4, topic: 'the_topic', rawSize: 30 }, session_id: '1', team_id: 2, snapshot_source: null, @@ -296,7 +296,7 @@ describe('session-recording utils', () => { distinct_id: '1', eventsRange: { start: 5, end: 5 }, eventsByWindowId: { window_1: [{ timestamp: 5, type: 5, data: {} }] }, - metadata: { lowOffset: 5, highOffset: 5, partition: 1, timestamp: 5, topic: 'the_topic' }, + metadata: { lowOffset: 5, highOffset: 5, partition: 1, timestamp: 5, topic: 'the_topic', rawSize: 31 }, session_id: '2', team_id: 1, snapshot_source: null, diff --git a/plugin-server/tests/main/jobs/schedule.test.ts b/plugin-server/tests/main/jobs/schedule.test.ts index 150d171f97d3b..b6b52c892f23d 100644 --- a/plugin-server/tests/main/jobs/schedule.test.ts +++ b/plugin-server/tests/main/jobs/schedule.test.ts @@ -37,120 +37,147 @@ describe('Graphile Worker schedule', () => { } as any) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(1, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '1', - value: JSON.stringify({ - taskType: 'runEveryMinute', - pluginConfigId: 1, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '1', + value: JSON.stringify({ + taskType: 'runEveryMinute', + pluginConfigId: 1, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(2, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '2', - value: JSON.stringify({ - taskType: 'runEveryMinute', - pluginConfigId: 2, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '2', + value: JSON.stringify({ + taskType: 'runEveryMinute', + pluginConfigId: 2, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(3, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '3', - value: JSON.stringify({ - taskType: 'runEveryMinute', - pluginConfigId: 3, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '3', + value: JSON.stringify({ + taskType: 'runEveryMinute', + pluginConfigId: 3, + }), + }, + ], + }, + waitForAck: true, }) await runScheduledTasks(mockHubWithPluginSchedule, mockPiscina as any, 'runEveryHour', { job: { run_at: new Date() }, } as any) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(4, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '4', - value: JSON.stringify({ - taskType: 'runEveryHour', - pluginConfigId: 4, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '4', + value: JSON.stringify({ + taskType: 'runEveryHour', + pluginConfigId: 4, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(5, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '5', - value: JSON.stringify({ - taskType: 'runEveryHour', - pluginConfigId: 5, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '5', + value: JSON.stringify({ + taskType: 'runEveryHour', + pluginConfigId: 5, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(6, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '6', - value: JSON.stringify({ - taskType: 'runEveryHour', - pluginConfigId: 6, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '6', + value: JSON.stringify({ + taskType: 'runEveryHour', + pluginConfigId: 6, + }), + }, + ], + }, + waitForAck: true, }) await runScheduledTasks(mockHubWithPluginSchedule, mockPiscina as any, 'runEveryDay', { job: { run_at: new Date() }, } as any) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(7, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '7', - value: JSON.stringify({ - taskType: 'runEveryDay', - pluginConfigId: 7, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '7', + value: JSON.stringify({ + taskType: 'runEveryDay', + pluginConfigId: 7, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(8, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '8', - value: JSON.stringify({ - taskType: 'runEveryDay', - pluginConfigId: 8, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '8', + value: JSON.stringify({ + taskType: 'runEveryDay', + pluginConfigId: 8, + }), + }, + ], + }, + waitForAck: true, }) expect(mockHubWithPluginSchedule.kafkaProducer.queueMessage).toHaveBeenNthCalledWith(9, { - topic: KAFKA_SCHEDULED_TASKS, - messages: [ - { - key: '9', - value: JSON.stringify({ - taskType: 'runEveryDay', - pluginConfigId: 9, - }), - }, - ], + kafkaMessage: { + topic: KAFKA_SCHEDULED_TASKS, + messages: [ + { + key: '9', + value: JSON.stringify({ + taskType: 'runEveryDay', + pluginConfigId: 9, + }), + }, + ], + }, + waitForAck: true, }) }) }) diff --git a/plugin-server/tests/worker/console.test.ts b/plugin-server/tests/worker/console.test.ts index 18c3480989700..4535f10f6e327 100644 --- a/plugin-server/tests/worker/console.test.ts +++ b/plugin-server/tests/worker/console.test.ts @@ -42,10 +42,10 @@ describe('console extension', () => { await (console[typeMethod](...args) as unknown as Promise) expect(queueSingleJsonMessageSpy).toHaveBeenCalledTimes(1) - expect(queueSingleJsonMessageSpy).toHaveBeenCalledWith( - KAFKA_PLUGIN_LOG_ENTRIES, - expect.any(String), - { + expect(queueSingleJsonMessageSpy).toHaveBeenCalledWith({ + topic: KAFKA_PLUGIN_LOG_ENTRIES, + key: expect.any(String), + object: { source: PluginLogEntrySource.Console, type, id: expect.any(String), @@ -56,8 +56,8 @@ describe('console extension', () => { message: expectedFinalMessage, instance_id: hub.instanceId.toString(), }, - false - ) + waitForAck: false, + }) }) }) }) diff --git a/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap b/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap index 10cf219b7e43b..1894a82b49dbd 100644 --- a/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap +++ b/plugin-server/tests/worker/ingestion/__snapshots__/app-metrics.test.ts.snap @@ -4,12 +4,15 @@ exports[`AppMetrics() flush() flushes queued messages 1`] = ` Array [ Array [ Object { - "messages": Array [ - Object { - "value": "{\\"timestamp\\":\\"1970-01-01 00:16:40.000\\",\\"team_id\\":2,\\"plugin_config_id\\":2,\\"job_id\\":\\"000-000\\",\\"category\\":\\"processEvent\\",\\"successes\\":1,\\"successes_on_retry\\":0,\\"failures\\":0}", - }, - ], - "topic": "clickhouse_app_metrics_test", + "kafkaMessage": Object { + "messages": Array [ + Object { + "value": "{\\"timestamp\\":\\"1970-01-01 00:16:40.000\\",\\"team_id\\":2,\\"plugin_config_id\\":2,\\"job_id\\":\\"000-000\\",\\"category\\":\\"processEvent\\",\\"successes\\":1,\\"successes_on_retry\\":0,\\"failures\\":0}", + }, + ], + "topic": "clickhouse_app_metrics_test", + }, + "waitForAck": true, }, ], ] diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts index 4bfc79f5e2379..364483f7c09a6 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/runner.test.ts @@ -219,7 +219,9 @@ describe('EventPipelineRunner', () => { await runner.runEventPipeline(pipelineEvent) expect(hub.db.kafkaProducer.queueMessage).toHaveBeenCalledTimes(1) - expect(JSON.parse(hub.db.kafkaProducer.queueMessage.mock.calls[0][0].messages[0].value)).toMatchObject({ + expect( + JSON.parse(hub.db.kafkaProducer.queueMessage.mock.calls[0][0].kafkaMessage.messages[0].value) + ).toMatchObject({ team_id: 2, distinct_id: 'my_id', error: 'Event ingestion failed. Error: testError', diff --git a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts index 5cdf1246c53f5..5c764e5809b40 100644 --- a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts +++ b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts @@ -339,7 +339,7 @@ describe('postgres parity', () => { // move distinct ids from person to to anotherPerson const kafkaMessages = await hub.db.moveDistinctIds(person, anotherPerson) - await hub.db!.kafkaProducer!.queueMessages(kafkaMessages) + await hub.db!.kafkaProducer!.queueMessages({ kafkaMessages, waitForAck: true }) await delayUntilEventIngested(() => hub.db.fetchDistinctIdValues(anotherPerson, Database.ClickHouse), 2) // it got added @@ -395,7 +395,7 @@ describe('postgres parity', () => { // delete person await hub.db.postgres.transaction(PostgresUse.COMMON_WRITE, '', async (client) => { const deletePersonMessage = await hub.db.deletePerson(person, client) - await hub.db!.kafkaProducer!.queueMessage(deletePersonMessage[0]) + await hub.db!.kafkaProducer!.queueMessage({ kafkaMessage: deletePersonMessage[0], waitForAck: true }) }) await delayUntilEventIngested(async () => diff --git a/plugin-server/tests/worker/vm.test.ts b/plugin-server/tests/worker/vm.test.ts index 7e3769de61328..5f1f727d4dbeb 100644 --- a/plugin-server/tests/worker/vm.test.ts +++ b/plugin-server/tests/worker/vm.test.ts @@ -689,10 +689,10 @@ describe('vm tests', () => { await vm.methods.processEvent!(event) expect(queueSingleJsonMessageSpy).toHaveBeenCalledTimes(1) - expect(queueSingleJsonMessageSpy).toHaveBeenCalledWith( - KAFKA_PLUGIN_LOG_ENTRIES, - expect.any(String), - { + expect(queueSingleJsonMessageSpy).toHaveBeenCalledWith({ + topic: KAFKA_PLUGIN_LOG_ENTRIES, + key: expect.any(String), + object: { id: expect.any(String), instance_id: hub.instanceId.toString(), message: 'logged event', @@ -703,8 +703,8 @@ describe('vm tests', () => { timestamp: expect.any(String), type: PluginLogEntryType.Log, }, - false - ) + waitForAck: false, + }) }) test('fetch', async () => { @@ -969,8 +969,8 @@ describe('vm tests', () => { expect(response).toBe('haha') expect(queueMessageSpy).toHaveBeenCalledTimes(1) - expect(queueMessageSpy.mock.calls[0][0].topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) - const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].messages[0].value!.toString()) + expect(queueMessageSpy.mock.calls[0][0].kafkaMessage.topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) + const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].kafkaMessage.messages[0].value!.toString()) expect(JSON.parse(parsedMessage.data)).toMatchObject({ distinct_id: 'plugin-id-60', event: 'my-new-event', @@ -998,8 +998,8 @@ describe('vm tests', () => { expect(response).toBe('haha') expect(queueMessageSpy).toHaveBeenCalledTimes(1) - expect(queueMessageSpy.mock.calls[0][0].topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) - const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].messages[0].value!.toString()) + expect(queueMessageSpy.mock.calls[0][0].kafkaMessage.topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) + const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].kafkaMessage.messages[0].value!.toString()) expect(JSON.parse(parsedMessage.data)).toMatchObject({ timestamp: '2020-02-23T02:15:00Z', // taken out of the properties distinct_id: 'plugin-id-60', @@ -1025,8 +1025,8 @@ describe('vm tests', () => { expect(response).toBe('haha') expect(response).toBe('haha') expect(queueMessageSpy).toHaveBeenCalledTimes(1) - expect(queueMessageSpy.mock.calls[0][0].topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) - const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].messages[0].value!.toString()) + expect(queueMessageSpy.mock.calls[0][0].kafkaMessage.topic).toEqual(KAFKA_EVENTS_PLUGIN_INGESTION) + const parsedMessage = JSON.parse(queueMessageSpy.mock.calls[0][0].kafkaMessage.messages[0].value!.toString()) expect(JSON.parse(parsedMessage.data)).toMatchObject({ distinct_id: 'custom id', event: 'my-new-event', diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5f157ec8b039e..73481ed11db17 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -238,6 +238,9 @@ dependencies: monaco-editor: specifier: ^0.39.0 version: 0.39.0 + natural-orderby: + specifier: ^3.0.2 + version: 3.0.2 papaparse: specifier: ^5.4.1 version: 5.4.1 @@ -251,8 +254,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.116.3 - version: 1.116.3 + specifier: 1.116.6 + version: 1.116.6 posthog-js-lite: specifier: 2.5.0 version: 2.5.0 @@ -6793,7 +6796,7 @@ packages: '@storybook/csf': 0.1.3 '@storybook/global': 5.0.0 '@storybook/types': 7.6.17 - '@types/qs': 6.9.13 + '@types/qs': 6.9.14 dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 @@ -8197,8 +8200,8 @@ packages: resolution: {integrity: sha512-bZcOkJ6uWrL0Qb2NAWKa7TBU+mJHPzhx9jjLL1KHF+XpzEcR7EXHvjbHlGtR/IsP1vyPrehuS6XqkmaePy//mg==} dev: false - /@types/qs@6.9.13: - resolution: {integrity: sha512-iLR+1vTTJ3p0QaOUq6ACbY1mzKTODFDT/XedZI8BksOotFmL4ForwDfRQ/DZeuTHR7/2i4lI1D203gdfxuqTlA==} + /@types/qs@6.9.14: + resolution: {integrity: sha512-5khscbd3SwWMhFqylJBLQ0zIu7c1K6Vz0uBIt915BI3zV0q1nfjRQD3RqSBcPaO6PHEF4ov/t9y89fSiyThlPA==} dev: true /@types/query-selector-shadow-dom@1.0.0: @@ -13630,7 +13633,7 @@ packages: hogan.js: 3.0.2 htm: 3.1.1 instantsearch-ui-components: 0.3.0 - preact: 10.20.0 + preact: 10.20.1 qs: 6.9.7 search-insights: 2.13.0 dev: false @@ -15917,6 +15920,11 @@ packages: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} dev: true + /natural-orderby@3.0.2: + resolution: {integrity: sha512-x7ZdOwBxZCEm9MM7+eQCjkrNLrW3rkBKNHVr78zbtqnMGVNlnDi6C/eUEYgxHNrcbu0ymvjzcwIL/6H1iHri9g==} + engines: {node: '>=18'} + dev: false + /needle@3.3.1: resolution: {integrity: sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==} engines: {node: '>= 4.4.x'} @@ -17446,19 +17454,19 @@ packages: resolution: {integrity: sha512-Urvlp0Vu9h3td0BVFWt0QXFJDoOZcaAD83XM9d91NKMKTVPZtfU0ysoxstIf5mw/ce9ZfuMgpWPaagrZI4rmSg==} dev: false - /posthog-js@1.116.3: - resolution: {integrity: sha512-KakGsQ8rS/K/U5Q/tiBrRrFRCgGrR0oI9VSYw9hwNCY00EClwAU3EuykUuQTFdQ1EuYMrZDIMWDD4NW6zgf7wQ==} + /posthog-js@1.116.6: + resolution: {integrity: sha512-rvt8HxzJD4c2B/xsUa4jle8ApdqljeBI2Qqjp4XJMohQf18DXRyM6b96H5/UMs8jxYuZG14Er0h/kEIWeU6Fmw==} dependencies: fflate: 0.4.8 - preact: 10.20.0 + preact: 10.20.1 dev: false /potpack@2.0.0: resolution: {integrity: sha512-Q+/tYsFU9r7xoOJ+y/ZTtdVQwTWfzjbiXBDMM/JKUux3+QPP02iUuIoeBQ+Ot6oEDlC+/PGjB/5A3K7KKb7hcw==} dev: false - /preact@10.20.0: - resolution: {integrity: sha512-wU7iZw2BjsaKDal3pDRDy/HpPB6cuFOnVUCcw9aIPKG98+ZrXx3F+szkos8BVME5bquyKDKvRlOJFG8kMkcAbg==} + /preact@10.20.1: + resolution: {integrity: sha512-JIFjgFg9B2qnOoGiYMVBtrcFxHqn+dNXbq76bVmcaHYJFYR4lW67AOcXgAYQQTDYXDOg/kTZrKPNCdRgJ2UJmw==} dev: false /prelude-ls@1.2.1: diff --git a/posthog/api/dashboards/dashboard.py b/posthog/api/dashboards/dashboard.py index 8524ab8618b4b..100e8745b8db1 100644 --- a/posthog/api/dashboards/dashboard.py +++ b/posthog/api/dashboards/dashboard.py @@ -7,7 +7,6 @@ from django.utils.timezone import now from rest_framework import exceptions, serializers, viewsets from rest_framework.decorators import action -from rest_framework.exceptions import PermissionDenied from rest_framework.permissions import SAFE_METHODS, BasePermission from rest_framework.request import Request from rest_framework.response import Response @@ -22,14 +21,12 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin -from posthog.constants import AvailableFeature from posthog.event_usage import report_user_action from posthog.helpers import create_dashboard_from_template from posthog.helpers.dashboard_templates import create_from_template from posthog.models import Dashboard, DashboardTile, Insight, Text from posthog.models.dashboard_templates import DashboardTemplate from posthog.models.tagged_item import TaggedItem -from posthog.models.team.team import check_is_feature_available_for_team from posthog.models.user import User from posthog.user_permissions import UserPermissionsSerializerMixin @@ -158,13 +155,6 @@ class Meta: ] read_only_fields = ["creation_mode", "effective_restriction_level", "is_shared"] - def validate_description(self, value: str) -> str: - if value and not check_is_feature_available_for_team( - self.context["team_id"], AvailableFeature.TEAM_COLLABORATION - ): - raise PermissionDenied("You must have paid for dashboard collaboration to set the dashboard description") - return value - def validate_filters(self, value) -> Dict: if not isinstance(value, dict): raise serializers.ValidationError("Filters must be a dictionary") diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 93234aadd6a1d..3a6e08bc7a7a0 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -49,9 +49,11 @@ def on_permitted_recording_domain(team: Team, request: HttpRequest) -> bool: ) or hostname_in_allowed_url_list(team.recording_domains, referer) # TODO this is a short term fix for beta testers # TODO we will match on the app identifier in the origin instead and allow users to auth those - is_authorized_android_client: bool = user_agent is not None and "posthog-android" in user_agent + is_authorized_mobile_client: bool = user_agent is not None and any( + keyword in user_agent for keyword in ["posthog-android", "posthog-ios"] + ) - return is_authorized_web_client or is_authorized_android_client + return is_authorized_web_client or is_authorized_mobile_client def hostname_in_allowed_url_list(allowed_url_list: Optional[List[str]], hostname: Optional[str]) -> bool: diff --git a/posthog/api/element.py b/posthog/api/element.py index 1ade5baa0977b..d7b721dee8195 100644 --- a/posthog/api/element.py +++ b/posthog/api/element.py @@ -130,9 +130,14 @@ def stats(self, request: request.Request, **kwargs) -> response.Response: def _events_filter(self, request) -> Tuple[Literal["$autocapture", "$rageclick"], ...]: event_to_filter: Tuple[Literal["$autocapture", "$rageclick"], ...] = () + # when multiple includes are sent expects them as separate parameters + # e.g. ?include=a&include=b events_to_include = request.query_params.getlist("include", []) + if not events_to_include: + # sensible default when not provided event_to_filter += ("$autocapture",) + event_to_filter += ("$rageclick",) else: if "$rageclick" in events_to_include: events_to_include.remove("$rageclick") diff --git a/posthog/api/query.py b/posthog/api/query.py index d8f45531253a0..e30853655c749 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -67,9 +67,10 @@ def create(self, request, *args, **kwargs) -> Response: if data.async_: query_status = enqueue_process_query_task( team_id=self.team.pk, + user_id=self.request.user.pk, query_json=request.data["query"], query_id=client_query_id, - refresh_requested=data.refresh, + refresh_requested=data.refresh or False, ) return Response(query_status.model_dump(), status=status.HTTP_202_ACCEPTED) diff --git a/posthog/api/signup.py b/posthog/api/signup.py index 13f171b906485..b8c3db86c3341 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -51,6 +51,7 @@ def get_redirect_url(uuid: str, is_email_verified: bool) -> str: class SignupSerializer(serializers.Serializer): first_name: serializers.Field = serializers.CharField(max_length=128) + last_name: serializers.Field = serializers.CharField(max_length=128, required=False, allow_blank=True) email: serializers.Field = serializers.EmailField() password: serializers.Field = serializers.CharField(allow_null=True, required=True) organization_name: serializers.Field = serializers.CharField(max_length=128, required=False, allow_blank=True) @@ -92,7 +93,7 @@ def create(self, validated_data, **kwargs): is_instance_first_user: bool = not User.objects.exists() - organization_name = validated_data.pop("organization_name", validated_data["first_name"]) + organization_name = validated_data.pop("organization_name", f"{validated_data['first_name']}'s Organization") role_at_organization = validated_data.pop("role_at_organization", "") referral_source = validated_data.pop("referral_source", "") diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index f38f19faf3f04..a70e9efd7a670 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -1739,7 +1739,7 @@ # --- # name: TestFeatureFlag.test_creating_static_cohort.14 ''' - /* user_id:200 celery:posthog.tasks.calculate_cohort.insert_cohort_from_feature_flag */ + /* user_id:201 celery:posthog.tasks.calculate_cohort.insert_cohort_from_feature_flag */ SELECT count(DISTINCT person_id) FROM person_static_cohort WHERE team_id = 2 diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 5bdf7b792790b..6495d532782b2 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -1616,6 +1616,24 @@ LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ ''' # --- +# name: TestInsight.test_listing_insights_does_not_nplus1.30 + ''' + SELECT "posthog_taggeditem"."id", + "posthog_taggeditem"."tag_id", + "posthog_taggeditem"."dashboard_id", + "posthog_taggeditem"."insight_id", + "posthog_taggeditem"."event_definition_id", + "posthog_taggeditem"."property_definition_id", + "posthog_taggeditem"."action_id", + "posthog_taggeditem"."feature_flag_id" + FROM "posthog_taggeditem" + WHERE "posthog_taggeditem"."insight_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/ + ''' +# --- # name: TestInsight.test_listing_insights_does_not_nplus1.4 ''' SELECT "posthog_team"."id", diff --git a/posthog/api/test/__snapshots__/test_query.ambr b/posthog/api/test/__snapshots__/test_query.ambr index 246efec9566f1..c9fdc628b2242 100644 --- a/posthog/api/test/__snapshots__/test_query.ambr +++ b/posthog/api/test/__snapshots__/test_query.ambr @@ -157,7 +157,7 @@ # --- # name: TestQuery.test_full_hogql_query_async ''' - /* user_id:467 celery:posthog.tasks.tasks.process_query_task */ + /* user_id:468 celery:posthog.tasks.tasks.process_query_task */ SELECT events.uuid AS uuid, events.event AS event, events.properties AS properties, diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index 642602f396f8d..9ae54e6e582eb 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -2762,6 +2762,24 @@ 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ ''' # --- +# name: TestDashboard.test_listing_dashboards_is_not_nplus1.57 + ''' + SELECT "posthog_sharingconfiguration"."id", + "posthog_sharingconfiguration"."team_id", + "posthog_sharingconfiguration"."dashboard_id", + "posthog_sharingconfiguration"."insight_id", + "posthog_sharingconfiguration"."recording_id", + "posthog_sharingconfiguration"."created_at", + "posthog_sharingconfiguration"."enabled", + "posthog_sharingconfiguration"."access_token" + FROM "posthog_sharingconfiguration" + WHERE "posthog_sharingconfiguration"."dashboard_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ + ''' +# --- # name: TestDashboard.test_listing_dashboards_is_not_nplus1.6 ''' SELECT "posthog_team"."id", @@ -11959,6 +11977,24 @@ 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ ''' # --- +# name: TestDashboard.test_retrieve_dashboard_list.33 + ''' + SELECT "posthog_sharingconfiguration"."id", + "posthog_sharingconfiguration"."team_id", + "posthog_sharingconfiguration"."dashboard_id", + "posthog_sharingconfiguration"."insight_id", + "posthog_sharingconfiguration"."recording_id", + "posthog_sharingconfiguration"."created_at", + "posthog_sharingconfiguration"."enabled", + "posthog_sharingconfiguration"."access_token" + FROM "posthog_sharingconfiguration" + WHERE "posthog_sharingconfiguration"."dashboard_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ + ''' +# --- # name: TestDashboard.test_retrieve_dashboard_list.4 ''' SELECT "posthog_dashboardtile"."id" diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index e56d763ed869f..7cb58bbce2324 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -482,6 +482,20 @@ def test_user_session_recording_allowed_for_android(self, *args) -> None: "networkPayloadCapture": None, } + def test_user_session_recording_allowed_for_ios(self, *args) -> None: + self._update_team({"session_recording_opt_in": True, "recording_domains": ["https://my-website.io"]}) + + response = self._post_decide(origin="any.site.com", user_agent="posthog-ios/3.1.0").json() + assert response["sessionRecording"] == { + "endpoint": "/s/", + "recorderVersion": "v2", + "consoleLogRecordingEnabled": False, + "sampleRate": None, + "linkedFlag": None, + "minimumDurationMilliseconds": None, + "networkPayloadCapture": None, + } + def test_user_session_recording_allowed_when_permitted_domains_are_not_http_based(self, *args): self._update_team( { diff --git a/posthog/api/test/test_element.py b/posthog/api/test/test_element.py index b7cf8f944a7e9..72a97ea2b9b43 100644 --- a/posthog/api/test/test_element.py +++ b/posthog/api/test/test_element.py @@ -4,6 +4,7 @@ from django.test import override_settings from freezegun import freeze_time +from parameterized import parameterized from rest_framework import status from posthog.models import Element, ElementGroup, Organization @@ -16,9 +17,9 @@ snapshot_postgres_queries, ) -expected_all_data_response_results: List[Dict] = [ +expected_autocapture_data_response_results: List[Dict] = [ { - "count": 2, + "count": 3, "hash": None, "type": "$autocapture", "elements": [ @@ -158,7 +159,7 @@ def test_element_stats_can_filter_by_properties(self) -> None: self._setup_events() response = self.client.get("/api/element/stats/?paginate_response=true").json() - assert len(response["results"]) == 2 + assert len(response["results"]) == 3 properties_filter = json.dumps([{"key": "$current_url", "value": "http://example.com/another_page"}]) response = self.client.get(f"/api/element/stats/?paginate_response=true&properties={properties_filter}").json() @@ -183,7 +184,7 @@ def test_element_stats_without_pagination(self) -> None: response = self.client.get("/api/element/stats").json() # not nested into a results property - assert response == expected_all_data_response_results + assert response == expected_autocapture_data_response_results + expected_rage_click_data_response_results def test_element_stats_clamps_date_from_to_start_of_day(self) -> None: event_start = "2012-01-14T03:21:34.000Z" @@ -244,7 +245,7 @@ def test_element_stats_can_load_all_the_data(self) -> None: assert response_json["next"] is None # loaded all the data, so no next link results = response_json["results"] - assert results == expected_all_data_response_results + assert results == expected_autocapture_data_response_results + expected_rage_click_data_response_results def test_element_stats_can_load_only_rageclick_data(self) -> None: self._setup_events() @@ -258,38 +259,52 @@ def test_element_stats_can_load_only_rageclick_data(self) -> None: assert results == expected_rage_click_data_response_results - def test_element_stats_can_load_rageclick_and_autocapture_data(self) -> None: + # no include params is equivalent to autocapture and rageclick + @parameterized.expand(["&include=$rageclick&include=$autocapture", ""]) + def test_element_stats_can_load_rageclick_and_autocapture_data(self, include_params) -> None: self._setup_events() - response = self.client.get( - f"/api/element/stats/?paginate_response=true&include=$rageclick&include=$autocapture" - ) + response = self.client.get(f"/api/element/stats/?paginate_response=true{include_params}") self.assertEqual(response.status_code, status.HTTP_200_OK) response_json = response.json() assert response_json["next"] is None # loaded all the data, so no next link results = response_json["results"] - assert results == expected_all_data_response_results + expected_rage_click_data_response_results + assert results == expected_autocapture_data_response_results + expected_rage_click_data_response_results def test_element_stats_obeys_limit_parameter(self) -> None: self._setup_events() - response = self.client.get(f"/api/element/stats/?paginate_response=true&limit=1") - self.assertEqual(response.status_code, status.HTTP_200_OK) + page_one_response = self.client.get(f"/api/element/stats/?paginate_response=true&limit=1") + self.assertEqual(page_one_response.status_code, status.HTTP_200_OK) - response_json = response.json() - assert response_json["next"] == "http://testserver/api/element/stats/?paginate_response=true&limit=1&offset=1" - limit_to_one_results = response_json["results"] - assert limit_to_one_results == [expected_all_data_response_results[0]] + page_one_response_json = page_one_response.json() + assert ( + page_one_response_json["next"] + == "http://testserver/api/element/stats/?paginate_response=true&limit=1&offset=1" + ) + limit_to_one_results = page_one_response_json["results"] + assert limit_to_one_results == [expected_autocapture_data_response_results[0]] - response = self.client.get(f"/api/element/stats/?paginate_response=true&limit=1&offset=1") - self.assertEqual(response.status_code, status.HTTP_200_OK) + page_two_response = self.client.get(f"/api/element/stats/?paginate_response=true&limit=1&offset=1") + self.assertEqual(page_two_response.status_code, status.HTTP_200_OK) - response_json = response.json() - assert response_json["next"] is None - limit_to_one_results = response_json["results"] - assert limit_to_one_results == [expected_all_data_response_results[1]] + page_two_response_json = page_two_response.json() + assert ( + page_two_response_json["next"] + == "http://testserver/api/element/stats/?paginate_response=true&limit=1&offset=2" + ) + limit_to_one_results_page_two = page_two_response_json["results"] + assert limit_to_one_results_page_two == [expected_autocapture_data_response_results[1]] + + page_three_response = self.client.get(f"/api/element/stats/?paginate_response=true&limit=1&offset=2") + self.assertEqual(page_three_response.status_code, status.HTTP_200_OK) + + page_three_response_json = page_three_response.json() + assert page_three_response_json["next"] is None + limit_to_one_results_page_three = page_three_response_json["results"] + assert limit_to_one_results_page_three == [expected_rage_click_data_response_results[0]] def test_element_stats_does_not_allow_non_numeric_limit(self) -> None: response = self.client.get(f"/api/element/stats/?limit=not-a-number") @@ -351,6 +366,26 @@ def _setup_events(self): distinct_id="one", properties={"$current_url": "http://example.com/demo"}, ) + _create_event( + team=self.team, + elements=[ + Element( + tag_name="a", + href="https://posthog.com/event-1", + text="event 1", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-1", + text="event 1", + order=1, + ), + ], + event="$autocapture", + distinct_id="one", + properties={"$current_url": "http://example.com/demo"}, + ) _create_event( team=self.team, elements=[ diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index e62be1ffd4893..1587c0b365e9e 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -43,6 +43,7 @@ def test_api_sign_up(self, mock_capture): "/api/signup/", { "first_name": "John", + "last_name": "Doe", "email": "hedgehog@posthog.com", "password": "notsecure", "organization_name": "Hedgehogs United, LLC", @@ -62,8 +63,8 @@ def test_api_sign_up(self, mock_capture): "id": user.pk, "uuid": str(user.uuid), "distinct_id": user.distinct_id, - "last_name": "", "first_name": "John", + "last_name": "Doe", "email": "hedgehog@posthog.com", "redirect_url": "/", "is_email_verified": False, @@ -72,6 +73,7 @@ def test_api_sign_up(self, mock_capture): # Assert that the user was properly created self.assertEqual(user.first_name, "John") + self.assertEqual(user.last_name, "Doe") self.assertEqual(user.email, "hedgehog@posthog.com") self.assertFalse(user.email_opt_in) self.assertTrue(user.is_staff) # True because this is the first user in the instance @@ -223,7 +225,7 @@ def test_signup_minimum_attrs(self, mock_capture): self.assertEqual(user.first_name, "Jane") self.assertEqual(user.email, "hedgehog2@posthog.com") self.assertTrue(user.email_opt_in) # Defaults to True - self.assertEqual(organization.name, "Jane") + self.assertEqual(organization.name, f"{user.first_name}'s Organization") self.assertTrue(user.is_staff) # True because this is the first user in the instance # Assert that the sign up event & identify calls were sent to PostHog analytics diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index 70b85c4d35bde..db51865560a33 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -111,6 +111,9 @@ class Status(models.TextChoices): auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated.", ) + records_total_count: models.IntegerField = models.IntegerField( + null=True, help_text="The total count of records that should be exported in this BatchExportRun." + ) BATCH_EXPORT_INTERVALS = [ diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index c26be9a77ed1a..f98dea7a9ebf8 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -90,6 +90,7 @@ class S3BatchExportInputs: kms_key_id: str | None = None batch_export_schema: BatchExportSchema | None = None endpoint_url: str | None = None + file_format: str = "JSONLines" @dataclass @@ -416,6 +417,7 @@ def create_batch_export_run( data_interval_start: str, data_interval_end: str, status: str = BatchExportRun.Status.STARTING, + records_total_count: int | None = None, ) -> BatchExportRun: """Create a BatchExportRun after a Temporal Workflow execution. @@ -433,22 +435,29 @@ def create_batch_export_run( status=status, data_interval_start=dt.datetime.fromisoformat(data_interval_start), data_interval_end=dt.datetime.fromisoformat(data_interval_end), + records_total_count=records_total_count, ) run.save() return run -def update_batch_export_run_status( - run_id: UUID, status: str, latest_error: str | None, records_completed: int = 0 +def update_batch_export_run( + run_id: UUID, + **kwargs, ) -> BatchExportRun: - """Update the status of an BatchExportRun with given id. + """Update the BatchExportRun with given run_id and provided **kwargs. Arguments: - id: The id of the BatchExportRun to update. + run_id: The id of the BatchExportRun to update. """ model = BatchExportRun.objects.filter(id=run_id) - updated = model.update(status=status, latest_error=latest_error, records_completed=records_completed) + update_at = dt.datetime.now() + + updated = model.update( + **kwargs, + last_updated_at=update_at, + ) if not updated: raise ValueError(f"BatchExportRun with id {run_id} not found.") diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 06f7fc639f824..4671b0060299b 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -1,5 +1,6 @@ import datetime import json +from typing import Optional import uuid import structlog @@ -69,11 +70,12 @@ def delete_query_status(self): def execute_process_query( - team_id, - query_id, - query_json, - limit_context, - refresh_requested, + team_id: int, + user_id: int, + query_id: str, + query_json: dict, + limit_context: Optional[LimitContext], + refresh_requested: bool, ): manager = QueryStatusManager(query_id, team_id) @@ -91,7 +93,7 @@ def execute_process_query( QUERY_WAIT_TIME.observe(wait_duration) try: - tag_queries(client_query_id=query_id, team_id=team_id) + tag_queries(client_query_id=query_id, team_id=team_id, user_id=user_id) results = process_query( team=team, query_json=query_json, limit_context=limit_context, refresh_requested=refresh_requested ) @@ -113,12 +115,13 @@ def execute_process_query( def enqueue_process_query_task( - team_id, - query_json, - query_id=None, - refresh_requested=False, - bypass_celery=False, - force=False, + team_id: int, + user_id: int, + query_json: dict, + query_id: Optional[str] = None, + refresh_requested: bool = False, + force: bool = False, + _test_only_bypass_celery: bool = False, ) -> QueryStatus: if not query_id: query_id = uuid.uuid4().hex @@ -136,14 +139,23 @@ def enqueue_process_query_task( query_status = QueryStatus(id=query_id, team_id=team_id, start_time=datetime.datetime.now(datetime.timezone.utc)) manager.store_query_status(query_status) - if bypass_celery: - # Call directly ( for testing ) + if _test_only_bypass_celery: process_query_task( - team_id, query_id, query_json, limit_context=LimitContext.QUERY_ASYNC, refresh_requested=refresh_requested + team_id, + user_id, + query_id, + query_json, + limit_context=LimitContext.QUERY_ASYNC, + refresh_requested=refresh_requested, ) else: task = process_query_task.delay( - team_id, query_id, query_json, limit_context=LimitContext.QUERY_ASYNC, refresh_requested=refresh_requested + team_id, + user_id, + query_id, + query_json, + limit_context=LimitContext.QUERY_ASYNC, + refresh_requested=refresh_requested, ) query_status.task_id = task.id manager.store_query_status(query_status) diff --git a/posthog/clickhouse/client/test/test_execute_async.py b/posthog/clickhouse/client/test/test_execute_async.py index 0d7a7281e6a4b..085e7708b9232 100644 --- a/posthog/clickhouse/client/test/test_execute_async.py +++ b/posthog/clickhouse/client/test/test_execute_async.py @@ -24,6 +24,7 @@ def setUp(self): self.organization = Organization.objects.create(name="test") self.team = Team.objects.create(organization=self.organization) self.team_id = self.team.pk + self.user_id = 1337 self.query_id = "test_query_id" self.query_json = {} self.limit_context = None @@ -41,7 +42,9 @@ def test_execute_process_query(self, mock_process_query, mock_redis_client): mock_process_query.return_value = [float("inf"), float("-inf"), float("nan"), 1.0, "👍"] - execute_process_query(self.team_id, self.query_id, self.query_json, self.limit_context, self.refresh_requested) + execute_process_query( + self.team_id, self.user_id, self.query_id, self.query_json, self.limit_context, self.refresh_requested + ) mock_redis_client.assert_called_once() mock_process_query.assert_called_once() @@ -55,15 +58,16 @@ def test_execute_process_query(self, mock_process_query, mock_redis_client): class ClickhouseClientTestCase(TestCase, ClickhouseTestMixin): def setUp(self): - self.organization = Organization.objects.create(name="test") - self.team = Team.objects.create(organization=self.organization) - self.team_id = self.team.pk + self.organization: Organization = Organization.objects.create(name="test") + self.team: Team = Team.objects.create(organization=self.organization) + self.team_id: int = self.team.pk + self.user_id: int = 2137 @snapshot_clickhouse_queries def test_async_query_client(self): query = build_query("SELECT 1+1") team_id = self.team_id - query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True).id + query_id = client.enqueue_process_query_task(team_id, self.user_id, query, _test_only_bypass_celery=True).id result = client.get_query_status(team_id, query_id) self.assertFalse(result.error, result.error_message) self.assertTrue(result.complete) @@ -74,11 +78,13 @@ def test_async_query_client_errors(self): self.assertRaises( HogQLException, client.enqueue_process_query_task, - **{"team_id": (self.team_id), "query_json": query, "bypass_celery": True}, + **{"team_id": self.team_id, "user_id": self.user_id, "query_json": query, "_test_only_bypass_celery": True}, ) query_id = uuid.uuid4().hex try: - client.enqueue_process_query_task(self.team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + self.team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) except Exception: pass @@ -89,7 +95,7 @@ def test_async_query_client_errors(self): def test_async_query_client_uuid(self): query = build_query("SELECT toUUID('00000000-0000-0000-0000-000000000000')") team_id = self.team_id - query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True).id + query_id = client.enqueue_process_query_task(team_id, self.user_id, query, _test_only_bypass_celery=True).id result = client.get_query_status(team_id, query_id) self.assertFalse(result.error, result.error_message) self.assertTrue(result.complete) @@ -99,7 +105,7 @@ def test_async_query_client_does_not_leak(self): query = build_query("SELECT 1+1") team_id = self.team_id wrong_team = 5 - query_id = client.enqueue_process_query_task(team_id, query, bypass_celery=True).id + query_id = client.enqueue_process_query_task(team_id, self.user_id, query, _test_only_bypass_celery=True).id try: client.get_query_status(wrong_team, query_id) @@ -111,13 +117,19 @@ def test_async_query_client_is_lazy(self, execute_sync_mock): query = build_query("SELECT 4 + 4") query_id = uuid.uuid4().hex team_id = self.team_id - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Try the same query again - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Try the same query again (for good measure!) - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Assert that we only called clickhouse once execute_sync_mock.assert_called_once() @@ -127,13 +139,19 @@ def test_async_query_client_is_lazy_but_not_too_lazy(self, execute_sync_mock): query = build_query("SELECT 8 + 8") query_id = uuid.uuid4().hex team_id = self.team_id - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Try the same query again, but with force - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True, force=True + ) # Try the same query again (for good measure!) - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Assert that we called clickhouse twice self.assertEqual(execute_sync_mock.call_count, 2) @@ -145,13 +163,19 @@ def test_async_query_client_manual_query_uuid(self, execute_sync_mock): query = build_query("SELECT 8 + 8") team_id = self.team_id query_id = "I'm so unique" - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Try the same query again, but with force - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True, force=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True, force=True + ) # Try the same query again (for good measure!) - client.enqueue_process_query_task(team_id, query, query_id=query_id, bypass_celery=True) + client.enqueue_process_query_task( + team_id, self.user_id, query, query_id=query_id, _test_only_bypass_celery=True + ) # Assert that we called clickhouse twice self.assertEqual(execute_sync_mock.call_count, 2) @@ -186,4 +210,4 @@ def test_client_strips_comments_from_request(self): # Make sure it still includes the "annotation" comment that includes # request routing information for debugging purposes - self.assertIn("/* request:1 */", first_query) + self.assertIn(f"/* user_id:{self.user_id} request:1 */", first_query) diff --git a/posthog/errors.py b/posthog/errors.py index 39f07be762d00..a6e3536042a7f 100644 --- a/posthog/errors.py +++ b/posthog/errors.py @@ -1,6 +1,6 @@ from dataclasses import dataclass import re -from typing import Dict +from typing import Dict, Optional from clickhouse_driver.errors import ServerException @@ -8,9 +8,10 @@ class InternalCHQueryError(ServerException): - code_name: str + code_name: Optional[str] + """Can be null if re-raised from a thread (see `failhard_threadhook_context`).""" - def __init__(self, message, *, code=None, nested=None, code_name): + def __init__(self, message, *, code=None, nested=None, code_name=None): self.code_name = code_name super().__init__(message, code, nested) @@ -151,7 +152,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta: 60: ErrorCodeMeta("UNKNOWN_TABLE"), 61: ErrorCodeMeta("ONLY_FILTER_COLUMN_IN_BLOCK"), 62: ErrorCodeMeta("SYNTAX_ERROR"), - 63: ErrorCodeMeta("UNKNOWN_AGGREGATE_FUNCTION"), + 63: ErrorCodeMeta("UNKNOWN_AGGREGATE_FUNCTION", user_safe=True), 64: ErrorCodeMeta("CANNOT_READ_AGGREGATE_FUNCTION_FROM_TEXT"), 65: ErrorCodeMeta("CANNOT_WRITE_AGGREGATE_FUNCTION_AS_TEXT"), 66: ErrorCodeMeta("NOT_A_COLUMN"), diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index a459514f2524f..806226b8f1b9e 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -46,8 +46,17 @@ def resolve_constant_type(self, context: HogQLContext): def resolve_database_field(self, context: HogQLContext): if isinstance(self.type, FieldType): return self.type.resolve_database_field(context) + if isinstance(self.type, PropertyType): + return self.type.field_type.resolve_database_field(context) raise NotImplementedException("FieldAliasType.resolve_database_field not implemented") + def resolve_table_type(self, context: HogQLContext): + if isinstance(self.type, FieldType): + return self.type.table_type + if isinstance(self.type, PropertyType): + return self.type.field_type.table_type + raise NotImplementedException("FieldAliasType.resolve_table_type not implemented") + @dataclass(kw_only=True) class BaseTableType(Type): @@ -339,6 +348,9 @@ def get_child(self, name: str | int, context: HogQLContext) -> Type: f'Can not access property "{name}" on field "{self.name}" of type: {type(database_field).__name__}' ) + def resolve_table_type(self, context: HogQLContext): + return self.table_type + @dataclass(kw_only=True) class PropertyType(Type): diff --git a/posthog/hogql/base.py b/posthog/hogql/base.py index fbdafffb2d08c..e8a74025b78be 100644 --- a/posthog/hogql/base.py +++ b/posthog/hogql/base.py @@ -32,7 +32,7 @@ def accept(self, visitor): return visit(self) if hasattr(visitor, "visit_unknown"): return visitor.visit_unknown(self) - raise NotImplementedException(f"Visitor has no method {method_name}") + raise NotImplementedException(f"{visitor.__class__.__name__} has no method {method_name}") @dataclass(kw_only=True) diff --git a/posthog/hogql/database/__init__.py b/posthog/hogql/database/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 6909211070e59..afeac3c26a143 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -31,6 +31,11 @@ from posthog.hogql.database.schema.events import EventsTable from posthog.hogql.database.schema.groups import GroupsTable, RawGroupsTable from posthog.hogql.database.schema.numbers import NumbersTable +from posthog.hogql.database.schema.person_distinct_id_overrides import ( + PersonDistinctIdOverridesTable, + RawPersonDistinctIdOverridesTable, + join_with_person_distinct_id_overrides_table, +) from posthog.hogql.database.schema.person_distinct_ids import ( PersonDistinctIdsTable, RawPersonDistinctIdsTable, @@ -53,7 +58,6 @@ from posthog.models.team.team import WeekStartDay from posthog.schema import HogQLQueryModifiers, PersonsOnEventsMode - if TYPE_CHECKING: from posthog.models import Team @@ -66,6 +70,7 @@ class Database(BaseModel): groups: GroupsTable = GroupsTable() persons: PersonsTable = PersonsTable() person_distinct_ids: PersonDistinctIdsTable = PersonDistinctIdsTable() + person_distinct_id_overrides: PersonDistinctIdOverridesTable = PersonDistinctIdOverridesTable() person_overrides: PersonOverridesTable = PersonOverridesTable() session_replay_events: SessionReplayEventsTable = SessionReplayEventsTable() @@ -81,6 +86,7 @@ class Database(BaseModel): raw_persons: RawPersonsTable = RawPersonsTable() raw_groups: RawGroupsTable = RawGroupsTable() raw_cohort_people: RawCohortPeople = RawCohortPeople() + raw_person_distinct_id_overrides: RawPersonDistinctIdOverridesTable = RawPersonDistinctIdOverridesTable() raw_person_overrides: RawPersonOverridesTable = RawPersonOverridesTable() raw_sessions: RawSessionsTable = RawSessionsTable() @@ -186,6 +192,24 @@ def create_hogql_database( database.events.fields["poe"].fields["id"] = database.events.fields["person_id"] database.events.fields["person"] = FieldTraverser(chain=["poe"]) + elif modifiers.personsOnEventsMode == PersonsOnEventsMode.v3_enabled: + database.events.fields["event_person_id"] = StringDatabaseField(name="person_id") + database.events.fields["override"] = LazyJoin( + from_field=["distinct_id"], # ??? + join_table=PersonDistinctIdOverridesTable(), + join_function=join_with_person_distinct_id_overrides_table, + ) + database.events.fields["person_id"] = ExpressionField( + name="person_id", + expr=parse_expr( + # NOTE: assumes `join_use_nulls = 0` (the default), as ``override.distinct_id`` is not Nullable + "if(not(empty(override.distinct_id)), override.person_id, event_person_id)", + start=None, + ), + ) + database.events.fields["poe"].fields["id"] = database.events.fields["person_id"] + database.events.fields["person"] = FieldTraverser(chain=["poe"]) + database.persons.fields["$virt_initial_referring_domain_type"] = create_initial_domain_type( "$virt_initial_referring_domain_type" ) @@ -209,10 +233,22 @@ def create_hogql_database( ) if "timestamp" not in tables[warehouse_modifier.table_name].fields.keys(): - tables[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField( - name="timestamp", - expr=ast.Call(name="toDateTime", args=[ast.Field(chain=[warehouse_modifier.timestamp_field])]), - ) + table_model = DataWarehouseTable.objects.filter( + team_id=team.pk, name=warehouse_modifier.table_name + ).latest("created_at") + timestamp_field_type = table_model.get_clickhouse_column_type(warehouse_modifier.timestamp_field) + + # If field type is none or datetime, we can use the field directly + if timestamp_field_type is None or timestamp_field_type.startswith("DateTime"): + tables[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField( + name="timestamp", + expr=ast.Field(chain=[warehouse_modifier.timestamp_field]), + ) + else: + tables[warehouse_modifier.table_name].fields["timestamp"] = ExpressionField( + name="timestamp", + expr=ast.Call(name="toDateTime", args=[ast.Field(chain=[warehouse_modifier.timestamp_field])]), + ) # TODO: Need to decide how the distinct_id and person_id fields are going to be handled if "distinct_id" not in tables[warehouse_modifier.table_name].fields.keys(): diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py index 95a00595c6472..d2da7868a7f9c 100644 --- a/posthog/hogql/database/models.py +++ b/posthog/hogql/database/models.py @@ -3,7 +3,6 @@ from posthog.hogql.base import Expr from posthog.hogql.errors import HogQLException, NotImplementedException -from posthog.schema import HogQLQueryModifiers if TYPE_CHECKING: from posthog.hogql.context import HogQLContext @@ -126,12 +125,14 @@ def resolve_table(self, context: "HogQLContext") -> Table: class LazyTable(Table): """ - A table that is replaced with a subquery returned from `lazy_select(requested_fields: Dict[name, chain], modifiers: HogQLQueryModifiers)` + A table that is replaced with a subquery returned from `lazy_select(requested_fields: Dict[name, chain], modifiers: HogQLQueryModifiers, node: SelectQuery)` """ model_config = ConfigDict(extra="forbid") - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers) -> Any: + def lazy_select( + self, requested_fields: Dict[str, List[str | int]], context: "HogQLContext", node: "SelectQuery" + ) -> Any: raise NotImplementedException("LazyTable.lazy_select not overridden") diff --git a/posthog/hogql/database/schema/__init__.py b/posthog/hogql/database/schema/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/hogql/database/schema/channel_type.py b/posthog/hogql/database/schema/channel_type.py index 5dee575fc59a3..1552a0e6aa6d4 100644 --- a/posthog/hogql/database/schema/channel_type.py +++ b/posthog/hogql/database/schema/channel_type.py @@ -62,6 +62,12 @@ def create_channel_type_expr( gclid: ast.Expr, gad_source: ast.Expr, ) -> ast.Expr: + def wrap_with_null_if_empty(expr: ast.Expr) -> ast.Expr: + return ast.Call( + name="nullIf", + args=[ast.Call(name="nullIf", args=[expr, ast.Constant(value="")]), ast.Constant(value="null")], + ) + return parse_expr( """ multiIf( @@ -95,8 +101,8 @@ def create_channel_type_expr( ( {referring_domain} = '$direct' - AND ({medium} IS NULL OR {medium} = '') - AND ({source} IS NULL OR {source} IN ('', '(direct)', 'direct')) + AND ({medium} IS NULL) + AND ({source} IS NULL OR {source} IN ('(direct)', 'direct')) ), 'Direct', @@ -122,11 +128,11 @@ def create_channel_type_expr( )""", start=None, placeholders={ - "campaign": campaign, - "medium": medium, - "source": source, + "campaign": wrap_with_null_if_empty(campaign), + "medium": wrap_with_null_if_empty(medium), + "source": wrap_with_null_if_empty(source), "referring_domain": referring_domain, - "gclid": gclid, - "gad_source": gad_source, + "gclid": wrap_with_null_if_empty(gclid), + "gad_source": wrap_with_null_if_empty(gad_source), }, ) diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py index 72080419b7355..11723f0194619 100644 --- a/posthog/hogql/database/schema/cohort_people.py +++ b/posthog/hogql/database/schema/cohort_people.py @@ -9,7 +9,6 @@ FieldOrTable, ) from posthog.hogql.database.schema.persons import join_with_persons_table -from posthog.schema import HogQLQueryModifiers COHORT_PEOPLE_FIELDS = { "person_id": StringDatabaseField(name="person_id"), @@ -67,7 +66,7 @@ def to_printed_hogql(self): class CohortPeople(LazyTable): fields: Dict[str, FieldOrTable] = COHORT_PEOPLE_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): return select_from_cohort_people_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/groups.py b/posthog/hogql/database/schema/groups.py index bb237d68e8070..3b9de7f08befc 100644 --- a/posthog/hogql/database/schema/groups.py +++ b/posthog/hogql/database/schema/groups.py @@ -13,7 +13,6 @@ FieldOrTable, ) from posthog.hogql.errors import HogQLException -from posthog.schema import HogQLQueryModifiers GROUPS_TABLE_FIELDS = { "index": IntegerDatabaseField(name="group_type_index"), @@ -83,7 +82,7 @@ def to_printed_hogql(self): class GroupsTable(LazyTable): fields: Dict[str, FieldOrTable] = GROUPS_TABLE_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): return select_from_groups_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/log_entries.py b/posthog/hogql/database/schema/log_entries.py index c14e90e26da50..9f5dc816ac4b0 100644 --- a/posthog/hogql/database/schema/log_entries.py +++ b/posthog/hogql/database/schema/log_entries.py @@ -9,7 +9,6 @@ LazyTable, FieldOrTable, ) -from posthog.schema import HogQLQueryModifiers LOG_ENTRIES_FIELDS: Dict[str, FieldOrTable] = { "team_id": IntegerDatabaseField(name="team_id"), @@ -35,7 +34,7 @@ def to_printed_hogql(self): class ReplayConsoleLogsLogEntriesTable(LazyTable): fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()] return ast.SelectQuery( @@ -58,7 +57,7 @@ def to_printed_hogql(self): class BatchExportLogEntriesTable(LazyTable): fields: Dict[str, FieldOrTable] = LOG_ENTRIES_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): fields: List[ast.Expr] = [ast.Field(chain=["log_entries"] + chain) for name, chain in requested_fields.items()] return ast.SelectQuery( diff --git a/posthog/hogql/database/schema/person_distinct_id_overrides.py b/posthog/hogql/database/schema/person_distinct_id_overrides.py new file mode 100644 index 0000000000000..34df59655c24d --- /dev/null +++ b/posthog/hogql/database/schema/person_distinct_id_overrides.py @@ -0,0 +1,91 @@ +from typing import Dict, List +from posthog.hogql.ast import SelectQuery +from posthog.hogql.context import HogQLContext + +from posthog.hogql.database.argmax import argmax_select +from posthog.hogql.database.models import ( + Table, + IntegerDatabaseField, + StringDatabaseField, + BooleanDatabaseField, + LazyJoin, + LazyTable, + FieldOrTable, +) +from posthog.hogql.database.schema.persons import join_with_persons_table +from posthog.hogql.errors import HogQLException + +PERSON_DISTINCT_ID_OVERRIDES_FIELDS = { + "team_id": IntegerDatabaseField(name="team_id"), + "distinct_id": StringDatabaseField(name="distinct_id"), + "person_id": StringDatabaseField(name="person_id"), + "person": LazyJoin( + from_field=["person_id"], + join_table="persons", + join_function=join_with_persons_table, + ), +} + + +def select_from_person_distinct_id_overrides_table(requested_fields: Dict[str, List[str | int]]): + # Always include "person_id", as it's the key we use to make further joins, and it'd be great if it's available + if "person_id" not in requested_fields: + requested_fields = {**requested_fields, "person_id": ["person_id"]} + return argmax_select( + table_name="raw_person_distinct_id_overrides", + select_fields=requested_fields, + group_fields=["distinct_id"], + argmax_field="version", + deleted_field="is_deleted", + ) + + +def join_with_person_distinct_id_overrides_table( + from_table: str, + to_table: str, + requested_fields: Dict[str, List[str]], + context: HogQLContext, + node: SelectQuery, +): + from posthog.hogql import ast + + if not requested_fields: + raise HogQLException("No fields requested from person_distinct_id_overrides") + join_expr = ast.JoinExpr(table=select_from_person_distinct_id_overrides_table(requested_fields)) + join_expr.join_type = "LEFT OUTER JOIN" + join_expr.alias = to_table + join_expr.constraint = ast.JoinConstraint( + expr=ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=[from_table, "distinct_id"]), + right=ast.Field(chain=[to_table, "distinct_id"]), + ) + ) + return join_expr + + +class RawPersonDistinctIdOverridesTable(Table): + fields: Dict[str, FieldOrTable] = { + **PERSON_DISTINCT_ID_OVERRIDES_FIELDS, + "is_deleted": BooleanDatabaseField(name="is_deleted"), + "version": IntegerDatabaseField(name="version"), + } + + def to_printed_clickhouse(self, context): + return "person_distinct_id_overrides" + + def to_printed_hogql(self): + return "raw_person_distinct_id_overrides" + + +class PersonDistinctIdOverridesTable(LazyTable): + fields: Dict[str, FieldOrTable] = PERSON_DISTINCT_ID_OVERRIDES_FIELDS + + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context: HogQLContext, node: SelectQuery): + return select_from_person_distinct_id_overrides_table(requested_fields) + + def to_printed_clickhouse(self, context): + return "person_distinct_id_overrides" + + def to_printed_hogql(self): + return "person_distinct_id_overrides" diff --git a/posthog/hogql/database/schema/person_distinct_ids.py b/posthog/hogql/database/schema/person_distinct_ids.py index 02144b35fc3d8..3304eccda862e 100644 --- a/posthog/hogql/database/schema/person_distinct_ids.py +++ b/posthog/hogql/database/schema/person_distinct_ids.py @@ -14,7 +14,6 @@ ) from posthog.hogql.database.schema.persons import join_with_persons_table from posthog.hogql.errors import HogQLException -from posthog.schema import HogQLQueryModifiers PERSON_DISTINCT_IDS_FIELDS = { "team_id": IntegerDatabaseField(name="team_id"), @@ -82,7 +81,7 @@ def to_printed_hogql(self): class PersonDistinctIdsTable(LazyTable): fields: Dict[str, FieldOrTable] = PERSON_DISTINCT_IDS_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): return select_from_person_distinct_ids_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py index a248da56b7307..c7abdd89e14c6 100644 --- a/posthog/hogql/database/schema/persons.py +++ b/posthog/hogql/database/schema/persons.py @@ -123,8 +123,8 @@ def to_printed_hogql(self): class PersonsTable(LazyTable): fields: Dict[str, FieldOrTable] = PERSONS_FIELDS - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): - return select_from_persons_table(requested_fields, modifiers) + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): + return select_from_persons_table(requested_fields, context.modifiers) def to_printed_clickhouse(self, context): return "person" diff --git a/posthog/hogql/database/schema/persons_pdi.py b/posthog/hogql/database/schema/persons_pdi.py index 9f476f407b4d2..195643b90c08c 100644 --- a/posthog/hogql/database/schema/persons_pdi.py +++ b/posthog/hogql/database/schema/persons_pdi.py @@ -10,7 +10,6 @@ FieldOrTable, ) from posthog.hogql.errors import HogQLException -from posthog.schema import HogQLQueryModifiers # :NOTE: We already have person_distinct_ids.py, which most tables link to. This persons_pdi.py is a hack to @@ -63,7 +62,7 @@ class PersonsPDITable(LazyTable): "person_id": StringDatabaseField(name="person_id"), } - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): return persons_pdi_select(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py index c9d564c7d4588..baaecef89e049 100644 --- a/posthog/hogql/database/schema/session_replay_events.py +++ b/posthog/hogql/database/schema/session_replay_events.py @@ -15,7 +15,6 @@ PersonDistinctIdsTable, join_with_person_distinct_ids_table, ) -from posthog.schema import HogQLQueryModifiers RAW_ONLY_FIELDS = ["min_first_timestamp", "max_last_timestamp"] @@ -115,7 +114,7 @@ class SessionReplayEventsTable(LazyTable): "first_url": StringDatabaseField(name="first_url"), } - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node): return select_from_session_replay_events_table(requested_fields) def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/sessions.py b/posthog/hogql/database/schema/sessions.py index 2a4865798eeb8..770daceaa23c5 100644 --- a/posthog/hogql/database/schema/sessions.py +++ b/posthog/hogql/database/schema/sessions.py @@ -1,5 +1,7 @@ from typing import Dict, List, cast +from posthog.hogql import ast +from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import ( StringDatabaseField, DateTimeDatabaseField, @@ -11,7 +13,7 @@ LazyTable, ) from posthog.hogql.database.schema.channel_type import create_channel_type_expr -from posthog.schema import HogQLQueryModifiers +from posthog.hogql.database.schema.util.session_where_clause_extractor import SessionMinTimestampWhereClauseExtractor SESSIONS_COMMON_FIELDS: Dict[str, FieldOrTable] = { @@ -62,7 +64,9 @@ def avoid_asterisk_fields(self) -> List[str]: ] -def select_from_sessions_table(requested_fields: Dict[str, List[str | int]]): +def select_from_sessions_table( + requested_fields: Dict[str, List[str | int]], node: ast.SelectQuery, context: HogQLContext +): from posthog.hogql import ast table_name = "raw_sessions" @@ -134,10 +138,13 @@ def select_from_sessions_table(requested_fields: Dict[str, List[str | int]]): ) group_by_fields.append(ast.Field(chain=cast(list[str | int], [table_name]) + chain)) + where = SessionMinTimestampWhereClauseExtractor(context).get_inner_where(node) + return ast.SelectQuery( select=select_fields, select_from=ast.JoinExpr(table=ast.Field(chain=[table_name])), group_by=group_by_fields, + where=where, ) @@ -148,8 +155,8 @@ class SessionsTable(LazyTable): "channel_type": StringDatabaseField(name="channel_type"), } - def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): - return select_from_sessions_table(requested_fields) + def lazy_select(self, requested_fields: Dict[str, List[str | int]], context, node: ast.SelectQuery): + return select_from_sessions_table(requested_fields, node, context) def to_printed_clickhouse(self, context): return "sessions" diff --git a/posthog/hogql/database/schema/test/test_channel_type.py b/posthog/hogql/database/schema/test/test_channel_type.py index 89e026ff3aed0..97dba3e13ba38 100644 --- a/posthog/hogql/database/schema/test/test_channel_type.py +++ b/posthog/hogql/database/schema/test/test_channel_type.py @@ -106,6 +106,36 @@ def test_direct(self): ), ) + def test_direct_empty_string(self): + self.assertEqual( + "Direct", + self._get_initial_channel_type( + { + "$initial_referring_domain": "$direct", + "$initial_utm_source": "", + "$initial_utm_medium": "", + "$initial_utm_campaign": "", + "$initial_gclid": "", + "$initial_gad_source": "", + } + ), + ) + + def test_direct_null_string(self): + self.assertEqual( + "Direct", + self._get_initial_channel_type( + { + "$initial_referring_domain": "$direct", + "$initial_utm_source": "null", + "$initial_utm_medium": "null", + "$initial_utm_campaign": "null", + "$initial_gclid": "null", + "$initial_gad_source": "null", + } + ), + ) + def test_cross_network(self): self.assertEqual( "Cross Network", diff --git a/posthog/hogql/database/schema/util/__init__.py b/posthog/hogql/database/schema/util/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/hogql/database/schema/util/session_where_clause_extractor.py b/posthog/hogql/database/schema/util/session_where_clause_extractor.py new file mode 100644 index 0000000000000..83933bdde8b85 --- /dev/null +++ b/posthog/hogql/database/schema/util/session_where_clause_extractor.py @@ -0,0 +1,398 @@ +from dataclasses import dataclass +from typing import Optional + +from posthog.hogql import ast +from posthog.hogql.ast import CompareOperationOp, ArithmeticOperationOp +from posthog.hogql.context import HogQLContext +from posthog.hogql.database.models import DatabaseField + +from posthog.hogql.visitor import clone_expr, CloningVisitor, Visitor + +SESSION_BUFFER_DAYS = 3 + + +@dataclass +class SessionMinTimestampWhereClauseExtractor(CloningVisitor): + """This class extracts the Where clause from the lazy sessions table, to the clickhouse sessions table. + + The sessions table in Clickhouse is an AggregatingMergeTree, and will have one row per session per day. This means that + when we want to query sessions, we need to pre-group these rows, so that we only have one row per session. + + We hide this detail using a lazy table, but to make querying the underlying Clickhouse table faster, we can inline the + min_timestamp where conditions from the select on the outer lazy table to the select on the inner real table. + + This class is called on the select query of the lazy table, and will return the where clause that should be applied to + the inner table. + + As a query can be unreasonably complex, we only handle simple cases, but this class is designed to fail-safe. If it + can't reason about a particular expression, it will just return a constant True, i.e. fetch more rows than necessary. + + This means that we can incrementally add support for more complex queries, without breaking existing queries, by + handling more cases. + + Some examples of failing-safe: + + `SELECT * FROM sessions where min_timestamp > '2022-01-01' AND f(session_id)` + only the` min_timestamp > '2022-01-01'` part is relevant, so we can ignore the `f(session_id)` part, and it is safe + to replace it with a constant True, which collapses the AND to just the `min_timestamp > '2022-01-01'` part. + + `SELECT * FROM sessions where min_timestamp > '2022-01-01' OR f(session_id)` + only the` min_timestamp > '2022-01-01'` part is relevant, and turning the `f(session_id)` part into a constant True + would collapse the OR to True. In this case we return None as no pre-filtering is possible. + + All min_timestamp comparisons are given a buffer of SESSION_BUFFER_DAYS each side, to ensure that we collect all the + relevant rows for each session. + """ + + context: HogQLContext + clear_types: bool = False + clear_locations: bool = False + + def get_inner_where(self, parsed_query: ast.SelectQuery) -> Optional[ast.Expr]: + if not parsed_query.where: + return None + + # visit the where clause + where = self.visit(parsed_query.where) + + if isinstance(where, ast.Constant): + return None + + return clone_expr(where, clear_types=True, clear_locations=True) + + def visit_compare_operation(self, node: ast.CompareOperation) -> ast.Expr: + is_left_constant = is_time_or_interval_constant(node.left) + is_right_constant = is_time_or_interval_constant(node.right) + is_left_timestamp_field = is_simple_timestamp_field_expression(node.left, self.context) + is_right_timestamp_field = is_simple_timestamp_field_expression(node.right, self.context) + + if is_left_constant and is_right_constant: + # just ignore this comparison + return ast.Constant(value=True) + + # handle the left side being a min_timestamp expression and the right being constant + if is_left_timestamp_field and is_right_constant: + if node.op == CompareOperationOp.Eq: + return ast.And( + exprs=[ + ast.CompareOperation( + op=ast.CompareOperationOp.LtEq, + left=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Sub, + left=rewrite_timestamp_field(node.left, self.context), + right=ast.Call(name="toIntervalDay", args=[ast.Constant(value=SESSION_BUFFER_DAYS)]), + ), + right=node.right, + ), + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Add, + left=rewrite_timestamp_field(node.left, self.context), + right=ast.Call(name="toIntervalDay", args=[ast.Constant(value=SESSION_BUFFER_DAYS)]), + ), + right=node.right, + ), + ] + ) + elif node.op == CompareOperationOp.Gt or node.op == CompareOperationOp.GtEq: + return ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Add, + left=rewrite_timestamp_field(node.left, self.context), + right=ast.Call(name="toIntervalDay", args=[ast.Constant(value=SESSION_BUFFER_DAYS)]), + ), + right=node.right, + ) + elif node.op == CompareOperationOp.Lt or node.op == CompareOperationOp.LtEq: + return ast.CompareOperation( + op=ast.CompareOperationOp.LtEq, + left=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Sub, + left=rewrite_timestamp_field(node.left, self.context), + right=ast.Call(name="toIntervalDay", args=[ast.Constant(value=SESSION_BUFFER_DAYS)]), + ), + right=node.right, + ) + elif is_right_timestamp_field and is_left_constant: + # let's not duplicate the logic above, instead just flip and it and recurse + if node.op in [ + CompareOperationOp.Eq, + CompareOperationOp.Lt, + CompareOperationOp.LtEq, + CompareOperationOp.Gt, + CompareOperationOp.GtEq, + ]: + return self.visit( + ast.CompareOperation( + op=CompareOperationOp.Eq + if node.op == CompareOperationOp.Eq + else CompareOperationOp.Lt + if node.op == CompareOperationOp.Gt + else CompareOperationOp.LtEq + if node.op == CompareOperationOp.GtEq + else CompareOperationOp.Gt + if node.op == CompareOperationOp.Lt + else CompareOperationOp.GtEq, + left=node.right, + right=node.left, + ) + ) + + return ast.Constant(value=True) + + def visit_arithmetic_operation(self, node: ast.ArithmeticOperation) -> ast.Expr: + # don't even try to handle complex logic + return ast.Constant(value=True) + + def visit_not(self, node: ast.Not) -> ast.Expr: + return ast.Constant(value=True) + + def visit_call(self, node: ast.Call) -> ast.Expr: + if node.name == "and": + return self.visit_and(ast.And(exprs=node.args)) + elif node.name == "or": + return self.visit_or(ast.Or(exprs=node.args)) + return ast.Constant(value=True) + + def visit_field(self, node: ast.Field) -> ast.Expr: + return ast.Constant(value=True) + + def visit_constant(self, node: ast.Constant) -> ast.Expr: + return ast.Constant(value=True) + + def visit_placeholder(self, node: ast.Placeholder) -> ast.Expr: + raise Exception() # this should never happen, as placeholders should be resolved before this runs + + def visit_and(self, node: ast.And) -> ast.Expr: + exprs = [self.visit(expr) for expr in node.exprs] + + flattened = [] + for expr in exprs: + if isinstance(expr, ast.And): + flattened.extend(expr.exprs) + else: + flattened.append(expr) + + if any(isinstance(expr, ast.Constant) and expr.value is False for expr in flattened): + return ast.Constant(value=False) + + filtered = [expr for expr in flattened if not isinstance(expr, ast.Constant) or expr.value is not True] + if len(filtered) == 0: + return ast.Constant(value=True) + elif len(filtered) == 1: + return filtered[0] + else: + return ast.And(exprs=filtered) + + def visit_or(self, node: ast.Or) -> ast.Expr: + exprs = [self.visit(expr) for expr in node.exprs] + + flattened = [] + for expr in exprs: + if isinstance(expr, ast.Or): + flattened.extend(expr.exprs) + else: + flattened.append(expr) + + if any(isinstance(expr, ast.Constant) and expr.value is True for expr in flattened): + return ast.Constant(value=True) + + filtered = [expr for expr in flattened if not isinstance(expr, ast.Constant) or expr.value is not False] + if len(filtered) == 0: + return ast.Constant(value=False) + elif len(filtered) == 1: + return filtered[0] + else: + return ast.Or(exprs=filtered) + + def visit_alias(self, node: ast.Alias) -> ast.Expr: + return self.visit(node.expr) + + +def is_time_or_interval_constant(expr: ast.Expr) -> bool: + return IsTimeOrIntervalConstantVisitor().visit(expr) + + +class IsTimeOrIntervalConstantVisitor(Visitor[bool]): + def visit_constant(self, node: ast.Constant) -> bool: + return True + + def visit_compare_operation(self, node: ast.CompareOperation) -> bool: + return self.visit(node.left) and self.visit(node.right) + + def visit_arithmetic_operation(self, node: ast.ArithmeticOperation) -> bool: + return self.visit(node.left) and self.visit(node.right) + + def visit_call(self, node: ast.Call) -> bool: + # some functions just return a constant + if node.name in ["today", "now"]: + return True + # some functions return a constant if the first argument is a constant + if node.name in [ + "parseDateTime64BestEffortOrNull", + "toDateTime", + "toTimeZone", + "assumeNotNull", + "toIntervalYear", + "toIntervalMonth", + "toIntervalWeek", + "toIntervalDay", + "toIntervalHour", + "toIntervalMinute", + "toIntervalSecond", + "toStartOfDay", + "toStartOfWeek", + "toStartOfMonth", + "toStartOfQuarter", + "toStartOfYear", + ]: + return self.visit(node.args[0]) + + if node.name in ["minus", "add"]: + return all(self.visit(arg) for arg in node.args) + + # otherwise we don't know, so return False + return False + + def visit_field(self, node: ast.Field) -> bool: + return False + + def visit_and(self, node: ast.And) -> bool: + return False + + def visit_or(self, node: ast.Or) -> bool: + return False + + def visit_not(self, node: ast.Not) -> bool: + return False + + def visit_placeholder(self, node: ast.Placeholder) -> bool: + raise Exception() + + def visit_alias(self, node: ast.Alias) -> bool: + return self.visit(node.expr) + + +def is_simple_timestamp_field_expression(expr: ast.Expr, context: HogQLContext) -> bool: + return IsSimpleTimestampFieldExpressionVisitor(context).visit(expr) + + +@dataclass +class IsSimpleTimestampFieldExpressionVisitor(Visitor[bool]): + context: HogQLContext + + def visit_constant(self, node: ast.Constant) -> bool: + return False + + def visit_field(self, node: ast.Field) -> bool: + if node.type and isinstance(node.type, ast.FieldType): + resolved_field = node.type.resolve_database_field(self.context) + if resolved_field and isinstance(resolved_field, DatabaseField) and resolved_field: + return resolved_field.name in ["min_timestamp", "timestamp"] + # no type information, so just use the name of the field + return node.chain[-1] in ["min_timestamp", "timestamp"] + + def visit_arithmetic_operation(self, node: ast.ArithmeticOperation) -> bool: + # only allow the min_timestamp field to be used on one side of the arithmetic operation + return ( + self.visit(node.left) + and is_time_or_interval_constant(node.right) + or (self.visit(node.right) and is_time_or_interval_constant(node.left)) + ) + + def visit_call(self, node: ast.Call) -> bool: + # some functions count as a timestamp field expression if their first argument is + if node.name in [ + "parseDateTime64BestEffortOrNull", + "toDateTime", + "toTimeZone", + "assumeNotNull", + "toStartOfDay", + "toStartOfWeek", + "toStartOfMonth", + "toStartOfQuarter", + "toStartOfYear", + ]: + return self.visit(node.args[0]) + + if node.name in ["minus", "add"]: + return self.visit_arithmetic_operation( + ast.ArithmeticOperation( + op=ArithmeticOperationOp.Sub if node.name == "minus" else ArithmeticOperationOp.Add, + left=node.args[0], + right=node.args[1], + ) + ) + + # otherwise we don't know, so return False + return False + + def visit_compare_operation(self, node: ast.CompareOperation) -> bool: + return False + + def visit_and(self, node: ast.And) -> bool: + return False + + def visit_or(self, node: ast.Or) -> bool: + return False + + def visit_not(self, node: ast.Not) -> bool: + return False + + def visit_placeholder(self, node: ast.Placeholder) -> bool: + raise Exception() + + def visit_alias(self, node: ast.Alias) -> bool: + from posthog.hogql.database.schema.events import EventsTable + from posthog.hogql.database.schema.sessions import SessionsTable + + if node.type and isinstance(node.type, ast.FieldAliasType): + resolved_field = node.type.resolve_database_field(self.context) + table_type = node.type.resolve_table_type(self.context) + if not table_type: + return False + return ( + isinstance(table_type, ast.TableType) + and isinstance(table_type.table, EventsTable) + and resolved_field.name == "timestamp" + ) or ( + isinstance(table_type, ast.LazyTableType) + and isinstance(table_type.table, SessionsTable) + and resolved_field.name == "min_timestamp" + ) + + return self.visit(node.expr) + + +def rewrite_timestamp_field(expr: ast.Expr, context: HogQLContext) -> ast.Expr: + return RewriteTimestampFieldVisitor(context).visit(expr) + + +class RewriteTimestampFieldVisitor(CloningVisitor): + context: HogQLContext + + def __init__(self, context: HogQLContext, *args, **kwargs): + super().__init__(*args, **kwargs) + self.context = context + + def visit_field(self, node: ast.Field) -> ast.Field: + from posthog.hogql.database.schema.events import EventsTable + from posthog.hogql.database.schema.sessions import SessionsTable + + if node.type and isinstance(node.type, ast.FieldType): + resolved_field = node.type.resolve_database_field(self.context) + table = node.type.resolve_table_type(self.context).table + if resolved_field and isinstance(resolved_field, DatabaseField): + if (isinstance(table, EventsTable) and resolved_field.name == "timestamp") or ( + isinstance(table, SessionsTable) and resolved_field.name == "min_timestamp" + ): + return ast.Field(chain=["raw_sessions", "min_timestamp"]) + # no type information, so just use the name of the field + if node.chain[-1] in ["min_timestamp", "timestamp"]: + return ast.Field(chain=["raw_sessions", "min_timestamp"]) + return node + + def visit_alias(self, node: ast.Alias) -> ast.Expr: + return self.visit(node.expr) diff --git a/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py new file mode 100644 index 0000000000000..bc5324e739ad9 --- /dev/null +++ b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py @@ -0,0 +1,284 @@ +from typing import Union, Optional, Dict + +from posthog.hogql import ast +from posthog.hogql.context import HogQLContext +from posthog.hogql.database.schema.util.session_where_clause_extractor import SessionMinTimestampWhereClauseExtractor +from posthog.hogql.modifiers import create_default_modifiers_for_team +from posthog.hogql.parser import parse_select, parse_expr +from posthog.hogql.printer import prepare_ast_for_printing, print_prepared_ast +from posthog.hogql.visitor import clone_expr +from posthog.test.base import ClickhouseTestMixin, APIBaseTest + + +def f(s: Union[str, ast.Expr, None], placeholders: Optional[dict[str, ast.Expr]] = None) -> Union[ast.Expr, None]: + if s is None: + return None + if isinstance(s, str): + expr = parse_expr(s, placeholders=placeholders) + else: + expr = s + return clone_expr(expr, clear_types=True, clear_locations=True) + + +def parse( + s: str, + placeholders: Optional[Dict[str, ast.Expr]] = None, +) -> ast.SelectQuery: + parsed = parse_select(s, placeholders=placeholders) + assert isinstance(parsed, ast.SelectQuery) + return parsed + + +class TestSessionTimestampInliner(ClickhouseTestMixin, APIBaseTest): + @property + def inliner(self): + team = self.team + modifiers = create_default_modifiers_for_team(team) + context = HogQLContext( + team_id=team.pk, + team=team, + enable_select_queries=True, + modifiers=modifiers, + ) + return SessionMinTimestampWhereClauseExtractor(context) + + def test_handles_select_with_no_where_claus(self): + inner_where = self.inliner.get_inner_where(parse("SELECT * FROM sessions")) + assert inner_where is None + + def test_handles_select_with_eq(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp = '2021-01-01'"))) + expected = f( + "((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-01') AND ((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01')" + ) + assert expected == actual + + def test_handles_select_with_eq_flipped(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE '2021-01-01' = min_timestamp"))) + expected = f( + "((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-01') AND ((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01')" + ) + assert expected == actual + + def test_handles_select_with_simple_gt(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp > '2021-01-01'"))) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01')") + assert expected == actual + + def test_handles_select_with_simple_gte(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp >= '2021-01-01'"))) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01')") + assert expected == actual + + def test_handles_select_with_simple_lt(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp < '2021-01-01'"))) + expected = f("((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-01')") + assert expected == actual + + def test_handles_select_with_simple_lte(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp <= '2021-01-01'"))) + expected = f("((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-01')") + assert expected == actual + + def test_select_with_placeholder(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM sessions WHERE min_timestamp > {timestamp}", + placeholders={"timestamp": ast.Constant(value="2021-01-01")}, + ) + ) + ) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01')") + assert expected == actual + + def test_unrelated_equals(self): + actual = self.inliner.get_inner_where( + parse("SELECT * FROM sessions WHERE initial_utm_campaign = initial_utm_source") + ) + assert actual is None + + def test_timestamp_and(self): + actual = f( + self.inliner.get_inner_where( + parse("SELECT * FROM sessions WHERE and(min_timestamp >= '2021-01-01', min_timestamp <= '2021-01-03')") + ) + ) + expected = f( + "((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-01') AND ((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-03')" + ) + assert expected == actual + + def test_timestamp_or(self): + actual = f( + self.inliner.get_inner_where( + parse("SELECT * FROM sessions WHERE and(min_timestamp <= '2021-01-01', min_timestamp >= '2021-01-03')") + ) + ) + expected = f( + "((raw_sessions.min_timestamp - toIntervalDay(3)) <= '2021-01-01') AND ((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-03')" + ) + assert expected == actual + + def test_unrelated_function(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE like('a', 'b')"))) + assert actual is None + + def test_timestamp_unrelated_function(self): + actual = f( + self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE like(toString(min_timestamp), 'b')")) + ) + assert actual is None + + def test_timestamp_unrelated_function_timestamp(self): + actual = f( + self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE like(toString(min_timestamp), 'b')")) + ) + assert actual is None + + def test_ambiguous_or(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM sessions WHERE or(min_timestamp > '2021-01-03', like(toString(min_timestamp), 'b'))" + ) + ) + ) + assert actual is None + + def test_ambiguous_and(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM sessions WHERE and(min_timestamp > '2021-01-03', like(toString(min_timestamp), 'b'))" + ) + ) + ) + assert actual == f("(raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-03'") + + def test_join(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM events JOIN sessions ON events.session_id = raw_sessions.session_id WHERE min_timestamp > '2021-01-03'" + ) + ) + ) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-03')") + assert expected == actual + + def test_join_using_events_timestamp_filter(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM events JOIN sessions ON events.session_id = raw_sessions.session_id WHERE timestamp > '2021-01-03'" + ) + ) + ) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= '2021-01-03')") + assert expected == actual + + def test_minus(self): + actual = f(self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp >= today() - 2"))) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= (today() - 2))") + assert expected == actual + + def test_minus_function(self): + actual = f( + self.inliner.get_inner_where(parse("SELECT * FROM sessions WHERE min_timestamp >= minus(today() , 2)")) + ) + expected = f("((raw_sessions.min_timestamp + toIntervalDay(3)) >= minus(today(), 2))") + assert expected == actual + + def test_real_example(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM events JOIN sessions ON events.session_id = raw_sessions.session_id WHERE event = '$pageview' AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2024-03-12 00:00:00', 'US/Pacific') AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2024-03-19 23:59:59', 'US/Pacific')" + ) + ) + ) + expected = f( + "(toTimeZone(raw_sessions.min_timestamp, 'US/Pacific') + toIntervalDay(3)) >= toDateTime('2024-03-12 00:00:00', 'US/Pacific') AND (toTimeZone(raw_sessions.min_timestamp, 'US/Pacific') - toIntervalDay(3)) <= toDateTime('2024-03-19 23:59:59', 'US/Pacific') " + ) + assert expected == actual + + def test_collapse_and(self): + actual = f( + self.inliner.get_inner_where( + parse( + "SELECT * FROM sesions WHERE event = '$pageview' AND (TRUE AND (TRUE AND TRUE AND (timestamp >= '2024-03-12' AND TRUE)))" + ) + ) + ) + expected = f("(raw_sessions.min_timestamp + toIntervalDay(3)) >= '2024-03-12'") + assert expected == actual + + +class TestSessionsQueriesHogQLToClickhouse(ClickhouseTestMixin, APIBaseTest): + def print_query(self, query: str) -> str: + team = self.team + modifiers = create_default_modifiers_for_team(team) + context = HogQLContext( + team_id=team.pk, + team=team, + enable_select_queries=True, + modifiers=modifiers, + ) + prepared_ast = prepare_ast_for_printing(node=parse(query), context=context, dialect="clickhouse") + pretty = print_prepared_ast(prepared_ast, context=context, dialect="clickhouse", pretty=True) + return pretty + + def test_select_with_timestamp(self): + actual = self.print_query("SELECT session_id FROM sessions WHERE min_timestamp > '2021-01-01'") + expected = f"""SELECT + sessions.session_id AS session_id +FROM + (SELECT + sessions.session_id AS session_id, + min(sessions.min_timestamp) AS min_timestamp + FROM + sessions + WHERE + and(equals(sessions.team_id, {self.team.id}), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_0)s), toIntervalDay(3)), %(hogql_val_1)s), 0)) + GROUP BY + sessions.session_id, + sessions.session_id) AS sessions +WHERE + ifNull(greater(toTimeZone(sessions.min_timestamp, %(hogql_val_2)s), %(hogql_val_3)s), 0) +LIMIT 10000""" + assert expected == actual + + def test_join_with_events(self): + actual = self.print_query( + """ +SELECT + sessions.session_id, + uniq(uuid) +FROM events +JOIN sessions +ON events.$session_id = sessions.session_id +WHERE events.timestamp > '2021-01-01' +GROUP BY sessions.session_id +""" + ) + expected = f"""SELECT + sessions.session_id AS session_id, + uniq(events.uuid) +FROM + events + JOIN (SELECT + sessions.session_id AS session_id + FROM + sessions + WHERE + and(equals(sessions.team_id, {self.team.id}), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_0)s), toIntervalDay(3)), %(hogql_val_1)s), 0)) + GROUP BY + sessions.session_id, + sessions.session_id) AS sessions ON equals(events.`$session_id`, sessions.session_id) +WHERE + and(equals(events.team_id, {self.team.id}), greater(toTimeZone(events.timestamp, %(hogql_val_2)s), %(hogql_val_3)s)) +GROUP BY + sessions.session_id +LIMIT 10000""" + assert expected == actual diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index db4dfc8f6df9f..63c2d16ce87aa 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -304,6 +304,31 @@ ] } ], + "person_distinct_id_overrides": [ + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "person_id", + "type": "string" + }, + { + "key": "person", + "type": "lazy_table", + "table": "persons", + "fields": [ + "id", + "created_at", + "team_id", + "properties", + "is_identified", + "pdi", + "$virt_initial_referring_domain_type", + "$virt_initial_channel_type" + ] + } + ], "person_overrides": [ { "key": "old_person_id", @@ -790,6 +815,39 @@ "type": "integer" } ], + "raw_person_distinct_id_overrides": [ + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "person_id", + "type": "string" + }, + { + "key": "person", + "type": "lazy_table", + "table": "persons", + "fields": [ + "id", + "created_at", + "team_id", + "properties", + "is_identified", + "pdi", + "$virt_initial_referring_domain_type", + "$virt_initial_channel_type" + ] + }, + { + "key": "is_deleted", + "type": "boolean" + }, + { + "key": "version", + "type": "integer" + } + ], "raw_person_overrides": [ { "key": "old_person_id", @@ -1155,6 +1213,31 @@ ] } ], + "person_distinct_id_overrides": [ + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "person_id", + "type": "string" + }, + { + "key": "person", + "type": "lazy_table", + "table": "persons", + "fields": [ + "id", + "created_at", + "team_id", + "properties", + "is_identified", + "pdi", + "$virt_initial_referring_domain_type", + "$virt_initial_channel_type" + ] + } + ], "person_overrides": [ { "key": "old_person_id", @@ -1641,6 +1724,39 @@ "type": "integer" } ], + "raw_person_distinct_id_overrides": [ + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "person_id", + "type": "string" + }, + { + "key": "person", + "type": "lazy_table", + "table": "persons", + "fields": [ + "id", + "created_at", + "team_id", + "properties", + "is_identified", + "pdi", + "$virt_initial_referring_domain_type", + "$virt_initial_channel_type" + ] + }, + { + "key": "is_deleted", + "type": "boolean" + }, + { + "key": "version", + "type": "integer" + } + ], "raw_person_overrides": [ { "key": "old_person_id", diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 5edf1a68a826a..cd908d725341d 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -161,22 +161,22 @@ class HogQLFunctionMeta: "toDate": HogQLFunctionMeta( "toDateOrNull", 1, - 1, + 2, overloads=[((ast.DateTimeType, ast.DateType), "toDate")], tz_aware=True, ), "toDateTime": HogQLFunctionMeta( "parseDateTime64BestEffortOrNull", 1, - 1, + 2, overloads=[((ast.DateTimeType, ast.DateType, ast.IntegerType), "toDateTime")], tz_aware=True, ), "toUUID": HogQLFunctionMeta("toUUIDOrNull", 1, 1), "toString": HogQLFunctionMeta("toString", 1, 1), "toJSONString": HogQLFunctionMeta("toJSONString", 1, 1), - "parseDateTime": HogQLFunctionMeta("parseDateTimeOrNull", 2, 2, tz_aware=True), - "parseDateTimeBestEffort": HogQLFunctionMeta("parseDateTime64BestEffortOrNull", 1, 1, tz_aware=True), + "parseDateTime": HogQLFunctionMeta("parseDateTimeOrNull", 2, 3, tz_aware=True), + "parseDateTimeBestEffort": HogQLFunctionMeta("parseDateTime64BestEffortOrNull", 1, 2, tz_aware=True), # dates and times "toTimeZone": HogQLFunctionMeta("toTimeZone", 2, 2), "timeZoneOf": HogQLFunctionMeta("timeZoneOf", 1, 1), @@ -219,8 +219,8 @@ class HogQLFunctionMeta: "dateSub": HogQLFunctionMeta("dateSub", 3, 3), "timeStampAdd": HogQLFunctionMeta("timeStampAdd", 2, 2), "timeStampSub": HogQLFunctionMeta("timeStampSub", 2, 2), - "now": HogQLFunctionMeta("now64", tz_aware=True), - "NOW": HogQLFunctionMeta("now64", tz_aware=True), + "now": HogQLFunctionMeta("now64", 0, 1, tz_aware=True), + "NOW": HogQLFunctionMeta("now64", 0, 1, tz_aware=True), "nowInBlock": HogQLFunctionMeta("nowInBlock", 1, 1), "today": HogQLFunctionMeta("today"), "yesterday": HogQLFunctionMeta("yesterday"), diff --git a/posthog/hogql/grammar/HogQLParser.g4 b/posthog/hogql/grammar/HogQLParser.g4 index a0f22ae3cdb0a..0729a05215653 100644 --- a/posthog/hogql/grammar/HogQLParser.g4 +++ b/posthog/hogql/grammar/HogQLParser.g4 @@ -103,7 +103,7 @@ columnExpr : CASE caseExpr=columnExpr? (WHEN whenExpr=columnExpr THEN thenExpr=columnExpr)+ (ELSE elseExpr=columnExpr)? END # ColumnExprCase | CAST LPAREN columnExpr AS columnTypeExpr RPAREN # ColumnExprCast | DATE STRING_LITERAL # ColumnExprDate - | EXTRACT LPAREN interval FROM columnExpr RPAREN # ColumnExprExtract +// | EXTRACT LPAREN interval FROM columnExpr RPAREN # ColumnExprExtract // Interferes with a function call | INTERVAL columnExpr interval # ColumnExprInterval | SUBSTRING LPAREN columnExpr FROM columnExpr (FOR columnExpr)? RPAREN # ColumnExprSubstring | TIMESTAMP STRING_LITERAL # ColumnExprTimestamp diff --git a/posthog/hogql/grammar/HogQLParser.interp b/posthog/hogql/grammar/HogQLParser.interp index 2b24b72a2819d..b159bc05eb424 100644 --- a/posthog/hogql/grammar/HogQLParser.interp +++ b/posthog/hogql/grammar/HogQLParser.interp @@ -554,4 +554,4 @@ placeholder atn: -[4, 1, 242, 979, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 3, 0, 128, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 136, 8, 1, 10, 1, 12, 1, 139, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 147, 8, 2, 1, 3, 3, 3, 150, 8, 3, 1, 3, 1, 3, 3, 3, 154, 8, 3, 1, 3, 3, 3, 157, 8, 3, 1, 3, 1, 3, 3, 3, 161, 8, 3, 1, 3, 3, 3, 164, 8, 3, 1, 3, 3, 3, 167, 8, 3, 1, 3, 3, 3, 170, 8, 3, 1, 3, 3, 3, 173, 8, 3, 1, 3, 1, 3, 3, 3, 177, 8, 3, 1, 3, 1, 3, 3, 3, 181, 8, 3, 1, 3, 3, 3, 184, 8, 3, 1, 3, 3, 3, 187, 8, 3, 1, 3, 3, 3, 190, 8, 3, 1, 3, 1, 3, 3, 3, 194, 8, 3, 1, 3, 3, 3, 197, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 206, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 3, 7, 212, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 231, 8, 8, 10, 8, 12, 8, 234, 9, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 250, 8, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 267, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 273, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 290, 8, 15, 3, 15, 292, 8, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 3, 18, 303, 8, 18, 1, 18, 3, 18, 306, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 312, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 320, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 326, 8, 18, 10, 18, 12, 18, 329, 9, 18, 1, 19, 3, 19, 332, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 337, 8, 19, 1, 19, 3, 19, 340, 8, 19, 1, 19, 3, 19, 343, 8, 19, 1, 19, 1, 19, 3, 19, 347, 8, 19, 1, 19, 1, 19, 3, 19, 351, 8, 19, 1, 19, 3, 19, 354, 8, 19, 3, 19, 356, 8, 19, 1, 19, 3, 19, 359, 8, 19, 1, 19, 1, 19, 3, 19, 363, 8, 19, 1, 19, 1, 19, 3, 19, 367, 8, 19, 1, 19, 3, 19, 370, 8, 19, 3, 19, 372, 8, 19, 3, 19, 374, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 379, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 390, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 396, 8, 22, 1, 23, 1, 23, 1, 23, 5, 23, 401, 8, 23, 10, 23, 12, 23, 404, 9, 23, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 422, 8, 25, 3, 25, 424, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 429, 8, 26, 10, 26, 12, 26, 432, 9, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 439, 8, 28, 1, 28, 3, 28, 442, 8, 28, 1, 28, 3, 28, 445, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 478, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 492, 8, 35, 10, 35, 12, 35, 495, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 504, 8, 35, 10, 35, 12, 35, 507, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 516, 8, 35, 10, 35, 12, 35, 519, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 526, 8, 35, 1, 35, 1, 35, 3, 35, 530, 8, 35, 1, 36, 1, 36, 1, 36, 5, 36, 535, 8, 36, 10, 36, 12, 36, 538, 9, 36, 1, 37, 1, 37, 1, 37, 3, 37, 543, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 550, 8, 37, 11, 37, 12, 37, 551, 1, 37, 1, 37, 3, 37, 556, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 604, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 616, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 626, 8, 37, 1, 37, 3, 37, 629, 8, 37, 1, 37, 1, 37, 3, 37, 633, 8, 37, 1, 37, 3, 37, 636, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 649, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 666, 8, 37, 1, 37, 1, 37, 3, 37, 670, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 676, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 683, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 695, 8, 37, 1, 37, 1, 37, 3, 37, 699, 8, 37, 1, 37, 3, 37, 702, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 711, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 725, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 752, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 761, 8, 37, 5, 37, 763, 8, 37, 10, 37, 12, 37, 766, 9, 37, 1, 38, 1, 38, 1, 38, 5, 38, 771, 8, 38, 10, 38, 12, 38, 774, 9, 38, 1, 39, 1, 39, 3, 39, 778, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 784, 8, 40, 10, 40, 12, 40, 787, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 794, 8, 40, 10, 40, 12, 40, 797, 9, 40, 3, 40, 799, 8, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 807, 8, 41, 10, 41, 12, 41, 810, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 818, 8, 41, 10, 41, 12, 41, 821, 9, 41, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 832, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 845, 8, 42, 1, 43, 1, 43, 1, 43, 5, 43, 850, 8, 43, 10, 43, 12, 43, 853, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 865, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 871, 8, 45, 1, 45, 3, 45, 874, 8, 45, 1, 46, 1, 46, 1, 46, 5, 46, 879, 8, 46, 10, 46, 12, 46, 882, 9, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 893, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 899, 8, 47, 5, 47, 901, 8, 47, 10, 47, 12, 47, 904, 9, 47, 1, 48, 1, 48, 1, 48, 3, 48, 909, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 916, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 5, 50, 923, 8, 50, 10, 50, 12, 50, 926, 9, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 936, 8, 52, 3, 52, 938, 8, 52, 1, 53, 3, 53, 941, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 949, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 954, 8, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 3, 58, 964, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 969, 8, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 0, 3, 36, 74, 94, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 16, 2, 0, 32, 32, 141, 141, 2, 0, 84, 84, 96, 96, 3, 0, 4, 4, 8, 8, 12, 12, 4, 0, 4, 4, 7, 8, 12, 12, 147, 147, 2, 0, 96, 96, 140, 140, 2, 0, 4, 4, 8, 8, 2, 0, 11, 11, 42, 43, 2, 0, 62, 62, 93, 93, 2, 0, 133, 133, 143, 143, 3, 0, 17, 17, 95, 95, 170, 170, 2, 0, 79, 79, 98, 98, 1, 0, 197, 198, 2, 0, 208, 208, 228, 228, 8, 0, 37, 37, 76, 76, 108, 108, 110, 110, 132, 132, 145, 145, 185, 185, 190, 190, 13, 0, 2, 24, 26, 36, 38, 75, 77, 81, 83, 107, 109, 109, 111, 112, 114, 115, 117, 130, 133, 144, 146, 184, 186, 189, 191, 192, 4, 0, 36, 36, 62, 62, 77, 77, 91, 91, 1107, 0, 127, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 146, 1, 0, 0, 0, 6, 149, 1, 0, 0, 0, 8, 198, 1, 0, 0, 0, 10, 201, 1, 0, 0, 0, 12, 207, 1, 0, 0, 0, 14, 211, 1, 0, 0, 0, 16, 217, 1, 0, 0, 0, 18, 235, 1, 0, 0, 0, 20, 238, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 251, 1, 0, 0, 0, 26, 254, 1, 0, 0, 0, 28, 258, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 293, 1, 0, 0, 0, 34, 296, 1, 0, 0, 0, 36, 311, 1, 0, 0, 0, 38, 373, 1, 0, 0, 0, 40, 378, 1, 0, 0, 0, 42, 389, 1, 0, 0, 0, 44, 391, 1, 0, 0, 0, 46, 397, 1, 0, 0, 0, 48, 405, 1, 0, 0, 0, 50, 423, 1, 0, 0, 0, 52, 425, 1, 0, 0, 0, 54, 433, 1, 0, 0, 0, 56, 438, 1, 0, 0, 0, 58, 446, 1, 0, 0, 0, 60, 450, 1, 0, 0, 0, 62, 454, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 477, 1, 0, 0, 0, 68, 479, 1, 0, 0, 0, 70, 529, 1, 0, 0, 0, 72, 531, 1, 0, 0, 0, 74, 669, 1, 0, 0, 0, 76, 767, 1, 0, 0, 0, 78, 777, 1, 0, 0, 0, 80, 798, 1, 0, 0, 0, 82, 831, 1, 0, 0, 0, 84, 844, 1, 0, 0, 0, 86, 846, 1, 0, 0, 0, 88, 864, 1, 0, 0, 0, 90, 873, 1, 0, 0, 0, 92, 875, 1, 0, 0, 0, 94, 892, 1, 0, 0, 0, 96, 905, 1, 0, 0, 0, 98, 915, 1, 0, 0, 0, 100, 919, 1, 0, 0, 0, 102, 927, 1, 0, 0, 0, 104, 937, 1, 0, 0, 0, 106, 940, 1, 0, 0, 0, 108, 953, 1, 0, 0, 0, 110, 955, 1, 0, 0, 0, 112, 957, 1, 0, 0, 0, 114, 959, 1, 0, 0, 0, 116, 963, 1, 0, 0, 0, 118, 968, 1, 0, 0, 0, 120, 970, 1, 0, 0, 0, 122, 974, 1, 0, 0, 0, 124, 128, 3, 2, 1, 0, 125, 128, 3, 6, 3, 0, 126, 128, 3, 82, 41, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 137, 3, 4, 2, 0, 132, 133, 5, 176, 0, 0, 133, 134, 5, 4, 0, 0, 134, 136, 3, 4, 2, 0, 135, 132, 1, 0, 0, 0, 136, 139, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 3, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 147, 3, 6, 3, 0, 141, 142, 5, 220, 0, 0, 142, 143, 3, 2, 1, 0, 143, 144, 5, 236, 0, 0, 144, 147, 1, 0, 0, 0, 145, 147, 3, 122, 61, 0, 146, 140, 1, 0, 0, 0, 146, 141, 1, 0, 0, 0, 146, 145, 1, 0, 0, 0, 147, 5, 1, 0, 0, 0, 148, 150, 3, 8, 4, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 153, 5, 146, 0, 0, 152, 154, 5, 49, 0, 0, 153, 152, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 156, 1, 0, 0, 0, 155, 157, 3, 10, 5, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 3, 72, 36, 0, 159, 161, 3, 12, 6, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 163, 1, 0, 0, 0, 162, 164, 3, 14, 7, 0, 163, 162, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 167, 3, 18, 9, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 170, 3, 20, 10, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 172, 1, 0, 0, 0, 171, 173, 3, 22, 11, 0, 172, 171, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 175, 5, 189, 0, 0, 175, 177, 7, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 179, 5, 189, 0, 0, 179, 181, 5, 169, 0, 0, 180, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 183, 1, 0, 0, 0, 182, 184, 3, 24, 12, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 187, 3, 16, 8, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 189, 1, 0, 0, 0, 188, 190, 3, 26, 13, 0, 189, 188, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, 194, 3, 30, 15, 0, 192, 194, 3, 32, 16, 0, 193, 191, 1, 0, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 197, 3, 34, 17, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 7, 1, 0, 0, 0, 198, 199, 5, 189, 0, 0, 199, 200, 3, 86, 43, 0, 200, 9, 1, 0, 0, 0, 201, 202, 5, 168, 0, 0, 202, 205, 5, 198, 0, 0, 203, 204, 5, 189, 0, 0, 204, 206, 5, 164, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 11, 1, 0, 0, 0, 207, 208, 5, 68, 0, 0, 208, 209, 3, 36, 18, 0, 209, 13, 1, 0, 0, 0, 210, 212, 7, 1, 0, 0, 211, 210, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 9, 0, 0, 214, 215, 5, 90, 0, 0, 215, 216, 3, 72, 36, 0, 216, 15, 1, 0, 0, 0, 217, 218, 5, 188, 0, 0, 218, 219, 3, 118, 59, 0, 219, 220, 5, 10, 0, 0, 220, 221, 5, 220, 0, 0, 221, 222, 3, 56, 28, 0, 222, 232, 5, 236, 0, 0, 223, 224, 5, 206, 0, 0, 224, 225, 3, 118, 59, 0, 225, 226, 5, 10, 0, 0, 226, 227, 5, 220, 0, 0, 227, 228, 3, 56, 28, 0, 228, 229, 5, 236, 0, 0, 229, 231, 1, 0, 0, 0, 230, 223, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 17, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 235, 236, 5, 129, 0, 0, 236, 237, 3, 74, 37, 0, 237, 19, 1, 0, 0, 0, 238, 239, 5, 187, 0, 0, 239, 240, 3, 74, 37, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 73, 0, 0, 242, 249, 5, 18, 0, 0, 243, 244, 7, 0, 0, 0, 244, 245, 5, 220, 0, 0, 245, 246, 3, 72, 36, 0, 246, 247, 5, 236, 0, 0, 247, 250, 1, 0, 0, 0, 248, 250, 3, 72, 36, 0, 249, 243, 1, 0, 0, 0, 249, 248, 1, 0, 0, 0, 250, 23, 1, 0, 0, 0, 251, 252, 5, 74, 0, 0, 252, 253, 3, 74, 37, 0, 253, 25, 1, 0, 0, 0, 254, 255, 5, 122, 0, 0, 255, 256, 5, 18, 0, 0, 256, 257, 3, 46, 23, 0, 257, 27, 1, 0, 0, 0, 258, 259, 5, 122, 0, 0, 259, 260, 5, 18, 0, 0, 260, 261, 3, 72, 36, 0, 261, 29, 1, 0, 0, 0, 262, 263, 5, 99, 0, 0, 263, 266, 3, 74, 37, 0, 264, 265, 5, 206, 0, 0, 265, 267, 3, 74, 37, 0, 266, 264, 1, 0, 0, 0, 266, 267, 1, 0, 0, 0, 267, 272, 1, 0, 0, 0, 268, 269, 5, 189, 0, 0, 269, 273, 5, 164, 0, 0, 270, 271, 5, 18, 0, 0, 271, 273, 3, 72, 36, 0, 272, 268, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 292, 1, 0, 0, 0, 274, 275, 5, 99, 0, 0, 275, 278, 3, 74, 37, 0, 276, 277, 5, 189, 0, 0, 277, 279, 5, 164, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 118, 0, 0, 281, 282, 3, 74, 37, 0, 282, 292, 1, 0, 0, 0, 283, 284, 5, 99, 0, 0, 284, 285, 3, 74, 37, 0, 285, 286, 5, 118, 0, 0, 286, 289, 3, 74, 37, 0, 287, 288, 5, 18, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 292, 1, 0, 0, 0, 291, 262, 1, 0, 0, 0, 291, 274, 1, 0, 0, 0, 291, 283, 1, 0, 0, 0, 292, 31, 1, 0, 0, 0, 293, 294, 5, 118, 0, 0, 294, 295, 3, 74, 37, 0, 295, 33, 1, 0, 0, 0, 296, 297, 5, 150, 0, 0, 297, 298, 3, 52, 26, 0, 298, 35, 1, 0, 0, 0, 299, 300, 6, 18, -1, 0, 300, 302, 3, 94, 47, 0, 301, 303, 5, 61, 0, 0, 302, 301, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 305, 1, 0, 0, 0, 304, 306, 3, 44, 22, 0, 305, 304, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 312, 1, 0, 0, 0, 307, 308, 5, 220, 0, 0, 308, 309, 3, 36, 18, 0, 309, 310, 5, 236, 0, 0, 310, 312, 1, 0, 0, 0, 311, 299, 1, 0, 0, 0, 311, 307, 1, 0, 0, 0, 312, 327, 1, 0, 0, 0, 313, 314, 10, 3, 0, 0, 314, 315, 3, 40, 20, 0, 315, 316, 3, 36, 18, 4, 316, 326, 1, 0, 0, 0, 317, 319, 10, 4, 0, 0, 318, 320, 3, 38, 19, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 90, 0, 0, 322, 323, 3, 36, 18, 0, 323, 324, 3, 42, 21, 0, 324, 326, 1, 0, 0, 0, 325, 313, 1, 0, 0, 0, 325, 317, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 37, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 332, 7, 2, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 340, 5, 84, 0, 0, 334, 336, 5, 84, 0, 0, 335, 337, 7, 2, 0, 0, 336, 335, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 340, 7, 2, 0, 0, 339, 331, 1, 0, 0, 0, 339, 334, 1, 0, 0, 0, 339, 338, 1, 0, 0, 0, 340, 374, 1, 0, 0, 0, 341, 343, 7, 3, 0, 0, 342, 341, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 346, 7, 4, 0, 0, 345, 347, 5, 123, 0, 0, 346, 345, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 356, 1, 0, 0, 0, 348, 350, 7, 4, 0, 0, 349, 351, 5, 123, 0, 0, 350, 349, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 354, 7, 3, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 342, 1, 0, 0, 0, 355, 348, 1, 0, 0, 0, 356, 374, 1, 0, 0, 0, 357, 359, 7, 5, 0, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 5, 69, 0, 0, 361, 363, 5, 123, 0, 0, 362, 361, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 372, 1, 0, 0, 0, 364, 366, 5, 69, 0, 0, 365, 367, 5, 123, 0, 0, 366, 365, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 370, 7, 5, 0, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 358, 1, 0, 0, 0, 371, 364, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 339, 1, 0, 0, 0, 373, 355, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 39, 1, 0, 0, 0, 375, 376, 5, 31, 0, 0, 376, 379, 5, 90, 0, 0, 377, 379, 5, 206, 0, 0, 378, 375, 1, 0, 0, 0, 378, 377, 1, 0, 0, 0, 379, 41, 1, 0, 0, 0, 380, 381, 5, 119, 0, 0, 381, 390, 3, 72, 36, 0, 382, 383, 5, 179, 0, 0, 383, 384, 5, 220, 0, 0, 384, 385, 3, 72, 36, 0, 385, 386, 5, 236, 0, 0, 386, 390, 1, 0, 0, 0, 387, 388, 5, 179, 0, 0, 388, 390, 3, 72, 36, 0, 389, 380, 1, 0, 0, 0, 389, 382, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 43, 1, 0, 0, 0, 391, 392, 5, 144, 0, 0, 392, 395, 3, 50, 25, 0, 393, 394, 5, 118, 0, 0, 394, 396, 3, 50, 25, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 45, 1, 0, 0, 0, 397, 402, 3, 48, 24, 0, 398, 399, 5, 206, 0, 0, 399, 401, 3, 48, 24, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 47, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 407, 3, 74, 37, 0, 406, 408, 7, 6, 0, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 117, 0, 0, 410, 412, 7, 7, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 26, 0, 0, 414, 416, 5, 200, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 49, 1, 0, 0, 0, 417, 424, 3, 122, 61, 0, 418, 421, 3, 106, 53, 0, 419, 420, 5, 238, 0, 0, 420, 422, 3, 106, 53, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 417, 1, 0, 0, 0, 423, 418, 1, 0, 0, 0, 424, 51, 1, 0, 0, 0, 425, 430, 3, 54, 27, 0, 426, 427, 5, 206, 0, 0, 427, 429, 3, 54, 27, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 53, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 3, 118, 59, 0, 434, 435, 5, 212, 0, 0, 435, 436, 3, 108, 54, 0, 436, 55, 1, 0, 0, 0, 437, 439, 3, 58, 29, 0, 438, 437, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0, 0, 0, 440, 442, 3, 60, 30, 0, 441, 440, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 445, 3, 62, 31, 0, 444, 443, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 57, 1, 0, 0, 0, 446, 447, 5, 126, 0, 0, 447, 448, 5, 18, 0, 0, 448, 449, 3, 72, 36, 0, 449, 59, 1, 0, 0, 0, 450, 451, 5, 122, 0, 0, 451, 452, 5, 18, 0, 0, 452, 453, 3, 46, 23, 0, 453, 61, 1, 0, 0, 0, 454, 455, 7, 8, 0, 0, 455, 456, 3, 64, 32, 0, 456, 63, 1, 0, 0, 0, 457, 464, 3, 66, 33, 0, 458, 459, 5, 16, 0, 0, 459, 460, 3, 66, 33, 0, 460, 461, 5, 6, 0, 0, 461, 462, 3, 66, 33, 0, 462, 464, 1, 0, 0, 0, 463, 457, 1, 0, 0, 0, 463, 458, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 466, 5, 33, 0, 0, 466, 478, 5, 142, 0, 0, 467, 468, 5, 175, 0, 0, 468, 478, 5, 128, 0, 0, 469, 470, 5, 175, 0, 0, 470, 478, 5, 64, 0, 0, 471, 472, 3, 106, 53, 0, 472, 473, 5, 128, 0, 0, 473, 478, 1, 0, 0, 0, 474, 475, 3, 106, 53, 0, 475, 476, 5, 64, 0, 0, 476, 478, 1, 0, 0, 0, 477, 465, 1, 0, 0, 0, 477, 467, 1, 0, 0, 0, 477, 469, 1, 0, 0, 0, 477, 471, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 478, 67, 1, 0, 0, 0, 479, 480, 3, 74, 37, 0, 480, 481, 5, 0, 0, 1, 481, 69, 1, 0, 0, 0, 482, 530, 3, 118, 59, 0, 483, 484, 3, 118, 59, 0, 484, 485, 5, 220, 0, 0, 485, 486, 3, 118, 59, 0, 486, 493, 3, 70, 35, 0, 487, 488, 5, 206, 0, 0, 488, 489, 3, 118, 59, 0, 489, 490, 3, 70, 35, 0, 490, 492, 1, 0, 0, 0, 491, 487, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 497, 5, 236, 0, 0, 497, 530, 1, 0, 0, 0, 498, 499, 3, 118, 59, 0, 499, 500, 5, 220, 0, 0, 500, 505, 3, 120, 60, 0, 501, 502, 5, 206, 0, 0, 502, 504, 3, 120, 60, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 5, 236, 0, 0, 509, 530, 1, 0, 0, 0, 510, 511, 3, 118, 59, 0, 511, 512, 5, 220, 0, 0, 512, 517, 3, 70, 35, 0, 513, 514, 5, 206, 0, 0, 514, 516, 3, 70, 35, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 520, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 5, 236, 0, 0, 521, 530, 1, 0, 0, 0, 522, 523, 3, 118, 59, 0, 523, 525, 5, 220, 0, 0, 524, 526, 3, 72, 36, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 5, 236, 0, 0, 528, 530, 1, 0, 0, 0, 529, 482, 1, 0, 0, 0, 529, 483, 1, 0, 0, 0, 529, 498, 1, 0, 0, 0, 529, 510, 1, 0, 0, 0, 529, 522, 1, 0, 0, 0, 530, 71, 1, 0, 0, 0, 531, 536, 3, 74, 37, 0, 532, 533, 5, 206, 0, 0, 533, 535, 3, 74, 37, 0, 534, 532, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 73, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 6, 37, -1, 0, 540, 542, 5, 19, 0, 0, 541, 543, 3, 74, 37, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 549, 1, 0, 0, 0, 544, 545, 5, 186, 0, 0, 545, 546, 3, 74, 37, 0, 546, 547, 5, 163, 0, 0, 547, 548, 3, 74, 37, 0, 548, 550, 1, 0, 0, 0, 549, 544, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 5, 52, 0, 0, 554, 556, 3, 74, 37, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 5, 53, 0, 0, 558, 670, 1, 0, 0, 0, 559, 560, 5, 20, 0, 0, 560, 561, 5, 220, 0, 0, 561, 562, 3, 74, 37, 0, 562, 563, 5, 10, 0, 0, 563, 564, 3, 70, 35, 0, 564, 565, 5, 236, 0, 0, 565, 670, 1, 0, 0, 0, 566, 567, 5, 36, 0, 0, 567, 670, 5, 200, 0, 0, 568, 569, 5, 59, 0, 0, 569, 570, 5, 220, 0, 0, 570, 571, 3, 110, 55, 0, 571, 572, 5, 68, 0, 0, 572, 573, 3, 74, 37, 0, 573, 574, 5, 236, 0, 0, 574, 670, 1, 0, 0, 0, 575, 576, 5, 86, 0, 0, 576, 577, 3, 74, 37, 0, 577, 578, 3, 110, 55, 0, 578, 670, 1, 0, 0, 0, 579, 580, 5, 155, 0, 0, 580, 581, 5, 220, 0, 0, 581, 582, 3, 74, 37, 0, 582, 583, 5, 68, 0, 0, 583, 586, 3, 74, 37, 0, 584, 585, 5, 65, 0, 0, 585, 587, 3, 74, 37, 0, 586, 584, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 5, 236, 0, 0, 589, 670, 1, 0, 0, 0, 590, 591, 5, 166, 0, 0, 591, 670, 5, 200, 0, 0, 592, 593, 5, 171, 0, 0, 593, 594, 5, 220, 0, 0, 594, 595, 7, 9, 0, 0, 595, 596, 5, 200, 0, 0, 596, 597, 5, 68, 0, 0, 597, 598, 3, 74, 37, 0, 598, 599, 5, 236, 0, 0, 599, 670, 1, 0, 0, 0, 600, 601, 3, 118, 59, 0, 601, 603, 5, 220, 0, 0, 602, 604, 3, 72, 36, 0, 603, 602, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 606, 5, 236, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 5, 125, 0, 0, 608, 609, 5, 220, 0, 0, 609, 610, 3, 56, 28, 0, 610, 611, 5, 236, 0, 0, 611, 670, 1, 0, 0, 0, 612, 613, 3, 118, 59, 0, 613, 615, 5, 220, 0, 0, 614, 616, 3, 72, 36, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 5, 236, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 5, 125, 0, 0, 620, 621, 3, 118, 59, 0, 621, 670, 1, 0, 0, 0, 622, 628, 3, 118, 59, 0, 623, 625, 5, 220, 0, 0, 624, 626, 3, 72, 36, 0, 625, 624, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 629, 5, 236, 0, 0, 628, 623, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 632, 5, 220, 0, 0, 631, 633, 5, 49, 0, 0, 632, 631, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 635, 1, 0, 0, 0, 634, 636, 3, 76, 38, 0, 635, 634, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 637, 1, 0, 0, 0, 637, 638, 5, 236, 0, 0, 638, 670, 1, 0, 0, 0, 639, 670, 3, 82, 41, 0, 640, 670, 3, 108, 54, 0, 641, 642, 5, 208, 0, 0, 642, 670, 3, 74, 37, 18, 643, 644, 5, 115, 0, 0, 644, 670, 3, 74, 37, 12, 645, 646, 3, 98, 49, 0, 646, 647, 5, 210, 0, 0, 647, 649, 1, 0, 0, 0, 648, 645, 1, 0, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 670, 5, 202, 0, 0, 651, 652, 5, 220, 0, 0, 652, 653, 3, 2, 1, 0, 653, 654, 5, 236, 0, 0, 654, 670, 1, 0, 0, 0, 655, 656, 5, 220, 0, 0, 656, 657, 3, 74, 37, 0, 657, 658, 5, 236, 0, 0, 658, 670, 1, 0, 0, 0, 659, 660, 5, 220, 0, 0, 660, 661, 3, 72, 36, 0, 661, 662, 5, 236, 0, 0, 662, 670, 1, 0, 0, 0, 663, 665, 5, 219, 0, 0, 664, 666, 3, 72, 36, 0, 665, 664, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 670, 5, 235, 0, 0, 668, 670, 3, 90, 45, 0, 669, 539, 1, 0, 0, 0, 669, 559, 1, 0, 0, 0, 669, 566, 1, 0, 0, 0, 669, 568, 1, 0, 0, 0, 669, 575, 1, 0, 0, 0, 669, 579, 1, 0, 0, 0, 669, 590, 1, 0, 0, 0, 669, 592, 1, 0, 0, 0, 669, 600, 1, 0, 0, 0, 669, 612, 1, 0, 0, 0, 669, 622, 1, 0, 0, 0, 669, 639, 1, 0, 0, 0, 669, 640, 1, 0, 0, 0, 669, 641, 1, 0, 0, 0, 669, 643, 1, 0, 0, 0, 669, 648, 1, 0, 0, 0, 669, 651, 1, 0, 0, 0, 669, 655, 1, 0, 0, 0, 669, 659, 1, 0, 0, 0, 669, 663, 1, 0, 0, 0, 669, 668, 1, 0, 0, 0, 670, 764, 1, 0, 0, 0, 671, 675, 10, 17, 0, 0, 672, 676, 5, 202, 0, 0, 673, 676, 5, 238, 0, 0, 674, 676, 5, 227, 0, 0, 675, 672, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 763, 3, 74, 37, 18, 678, 682, 10, 16, 0, 0, 679, 683, 5, 228, 0, 0, 680, 683, 5, 208, 0, 0, 681, 683, 5, 207, 0, 0, 682, 679, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 682, 681, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 763, 3, 74, 37, 17, 685, 710, 10, 15, 0, 0, 686, 711, 5, 211, 0, 0, 687, 711, 5, 212, 0, 0, 688, 711, 5, 223, 0, 0, 689, 711, 5, 221, 0, 0, 690, 711, 5, 222, 0, 0, 691, 711, 5, 213, 0, 0, 692, 711, 5, 214, 0, 0, 693, 695, 5, 115, 0, 0, 694, 693, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 698, 5, 80, 0, 0, 697, 699, 5, 25, 0, 0, 698, 697, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 711, 1, 0, 0, 0, 700, 702, 5, 115, 0, 0, 701, 700, 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 711, 7, 10, 0, 0, 704, 711, 5, 232, 0, 0, 705, 711, 5, 233, 0, 0, 706, 711, 5, 225, 0, 0, 707, 711, 5, 216, 0, 0, 708, 711, 5, 217, 0, 0, 709, 711, 5, 224, 0, 0, 710, 686, 1, 0, 0, 0, 710, 687, 1, 0, 0, 0, 710, 688, 1, 0, 0, 0, 710, 689, 1, 0, 0, 0, 710, 690, 1, 0, 0, 0, 710, 691, 1, 0, 0, 0, 710, 692, 1, 0, 0, 0, 710, 694, 1, 0, 0, 0, 710, 701, 1, 0, 0, 0, 710, 704, 1, 0, 0, 0, 710, 705, 1, 0, 0, 0, 710, 706, 1, 0, 0, 0, 710, 707, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 763, 3, 74, 37, 16, 713, 714, 10, 13, 0, 0, 714, 715, 5, 226, 0, 0, 715, 763, 3, 74, 37, 14, 716, 717, 10, 11, 0, 0, 717, 718, 5, 6, 0, 0, 718, 763, 3, 74, 37, 12, 719, 720, 10, 10, 0, 0, 720, 721, 5, 121, 0, 0, 721, 763, 3, 74, 37, 11, 722, 724, 10, 9, 0, 0, 723, 725, 5, 115, 0, 0, 724, 723, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 727, 5, 16, 0, 0, 727, 728, 3, 74, 37, 0, 728, 729, 5, 6, 0, 0, 729, 730, 3, 74, 37, 10, 730, 763, 1, 0, 0, 0, 731, 732, 10, 8, 0, 0, 732, 733, 5, 229, 0, 0, 733, 734, 3, 74, 37, 0, 734, 735, 5, 205, 0, 0, 735, 736, 3, 74, 37, 8, 736, 763, 1, 0, 0, 0, 737, 738, 10, 21, 0, 0, 738, 739, 5, 219, 0, 0, 739, 740, 3, 74, 37, 0, 740, 741, 5, 235, 0, 0, 741, 763, 1, 0, 0, 0, 742, 743, 10, 20, 0, 0, 743, 744, 5, 210, 0, 0, 744, 763, 5, 198, 0, 0, 745, 746, 10, 19, 0, 0, 746, 747, 5, 210, 0, 0, 747, 763, 3, 118, 59, 0, 748, 749, 10, 14, 0, 0, 749, 751, 5, 88, 0, 0, 750, 752, 5, 115, 0, 0, 751, 750, 1, 0, 0, 0, 751, 752, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 753, 763, 5, 116, 0, 0, 754, 760, 10, 7, 0, 0, 755, 761, 3, 116, 58, 0, 756, 757, 5, 10, 0, 0, 757, 761, 3, 118, 59, 0, 758, 759, 5, 10, 0, 0, 759, 761, 5, 200, 0, 0, 760, 755, 1, 0, 0, 0, 760, 756, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 671, 1, 0, 0, 0, 762, 678, 1, 0, 0, 0, 762, 685, 1, 0, 0, 0, 762, 713, 1, 0, 0, 0, 762, 716, 1, 0, 0, 0, 762, 719, 1, 0, 0, 0, 762, 722, 1, 0, 0, 0, 762, 731, 1, 0, 0, 0, 762, 737, 1, 0, 0, 0, 762, 742, 1, 0, 0, 0, 762, 745, 1, 0, 0, 0, 762, 748, 1, 0, 0, 0, 762, 754, 1, 0, 0, 0, 763, 766, 1, 0, 0, 0, 764, 762, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 75, 1, 0, 0, 0, 766, 764, 1, 0, 0, 0, 767, 772, 3, 78, 39, 0, 768, 769, 5, 206, 0, 0, 769, 771, 3, 78, 39, 0, 770, 768, 1, 0, 0, 0, 771, 774, 1, 0, 0, 0, 772, 770, 1, 0, 0, 0, 772, 773, 1, 0, 0, 0, 773, 77, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 775, 778, 3, 80, 40, 0, 776, 778, 3, 74, 37, 0, 777, 775, 1, 0, 0, 0, 777, 776, 1, 0, 0, 0, 778, 79, 1, 0, 0, 0, 779, 780, 5, 220, 0, 0, 780, 785, 3, 118, 59, 0, 781, 782, 5, 206, 0, 0, 782, 784, 3, 118, 59, 0, 783, 781, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 788, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 789, 5, 236, 0, 0, 789, 799, 1, 0, 0, 0, 790, 795, 3, 118, 59, 0, 791, 792, 5, 206, 0, 0, 792, 794, 3, 118, 59, 0, 793, 791, 1, 0, 0, 0, 794, 797, 1, 0, 0, 0, 795, 793, 1, 0, 0, 0, 795, 796, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 798, 779, 1, 0, 0, 0, 798, 790, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 5, 201, 0, 0, 801, 802, 3, 74, 37, 0, 802, 81, 1, 0, 0, 0, 803, 804, 5, 222, 0, 0, 804, 808, 3, 118, 59, 0, 805, 807, 3, 84, 42, 0, 806, 805, 1, 0, 0, 0, 807, 810, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 811, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 811, 812, 5, 238, 0, 0, 812, 813, 5, 214, 0, 0, 813, 832, 1, 0, 0, 0, 814, 815, 5, 222, 0, 0, 815, 819, 3, 118, 59, 0, 816, 818, 3, 84, 42, 0, 817, 816, 1, 0, 0, 0, 818, 821, 1, 0, 0, 0, 819, 817, 1, 0, 0, 0, 819, 820, 1, 0, 0, 0, 820, 822, 1, 0, 0, 0, 821, 819, 1, 0, 0, 0, 822, 824, 5, 214, 0, 0, 823, 825, 3, 82, 41, 0, 824, 823, 1, 0, 0, 0, 824, 825, 1, 0, 0, 0, 825, 826, 1, 0, 0, 0, 826, 827, 5, 222, 0, 0, 827, 828, 5, 238, 0, 0, 828, 829, 3, 118, 59, 0, 829, 830, 5, 214, 0, 0, 830, 832, 1, 0, 0, 0, 831, 803, 1, 0, 0, 0, 831, 814, 1, 0, 0, 0, 832, 83, 1, 0, 0, 0, 833, 834, 3, 118, 59, 0, 834, 835, 5, 212, 0, 0, 835, 836, 5, 200, 0, 0, 836, 845, 1, 0, 0, 0, 837, 838, 3, 118, 59, 0, 838, 839, 5, 212, 0, 0, 839, 840, 5, 218, 0, 0, 840, 841, 3, 74, 37, 0, 841, 842, 5, 234, 0, 0, 842, 845, 1, 0, 0, 0, 843, 845, 3, 118, 59, 0, 844, 833, 1, 0, 0, 0, 844, 837, 1, 0, 0, 0, 844, 843, 1, 0, 0, 0, 845, 85, 1, 0, 0, 0, 846, 851, 3, 88, 44, 0, 847, 848, 5, 206, 0, 0, 848, 850, 3, 88, 44, 0, 849, 847, 1, 0, 0, 0, 850, 853, 1, 0, 0, 0, 851, 849, 1, 0, 0, 0, 851, 852, 1, 0, 0, 0, 852, 87, 1, 0, 0, 0, 853, 851, 1, 0, 0, 0, 854, 855, 3, 118, 59, 0, 855, 856, 5, 10, 0, 0, 856, 857, 5, 220, 0, 0, 857, 858, 3, 2, 1, 0, 858, 859, 5, 236, 0, 0, 859, 865, 1, 0, 0, 0, 860, 861, 3, 74, 37, 0, 861, 862, 5, 10, 0, 0, 862, 863, 3, 118, 59, 0, 863, 865, 1, 0, 0, 0, 864, 854, 1, 0, 0, 0, 864, 860, 1, 0, 0, 0, 865, 89, 1, 0, 0, 0, 866, 874, 3, 122, 61, 0, 867, 868, 3, 98, 49, 0, 868, 869, 5, 210, 0, 0, 869, 871, 1, 0, 0, 0, 870, 867, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 3, 92, 46, 0, 873, 866, 1, 0, 0, 0, 873, 870, 1, 0, 0, 0, 874, 91, 1, 0, 0, 0, 875, 880, 3, 118, 59, 0, 876, 877, 5, 210, 0, 0, 877, 879, 3, 118, 59, 0, 878, 876, 1, 0, 0, 0, 879, 882, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 93, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 883, 884, 6, 47, -1, 0, 884, 893, 3, 98, 49, 0, 885, 893, 3, 96, 48, 0, 886, 887, 5, 220, 0, 0, 887, 888, 3, 2, 1, 0, 888, 889, 5, 236, 0, 0, 889, 893, 1, 0, 0, 0, 890, 893, 3, 82, 41, 0, 891, 893, 3, 122, 61, 0, 892, 883, 1, 0, 0, 0, 892, 885, 1, 0, 0, 0, 892, 886, 1, 0, 0, 0, 892, 890, 1, 0, 0, 0, 892, 891, 1, 0, 0, 0, 893, 902, 1, 0, 0, 0, 894, 898, 10, 3, 0, 0, 895, 899, 3, 116, 58, 0, 896, 897, 5, 10, 0, 0, 897, 899, 3, 118, 59, 0, 898, 895, 1, 0, 0, 0, 898, 896, 1, 0, 0, 0, 899, 901, 1, 0, 0, 0, 900, 894, 1, 0, 0, 0, 901, 904, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 95, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 905, 906, 3, 118, 59, 0, 906, 908, 5, 220, 0, 0, 907, 909, 3, 100, 50, 0, 908, 907, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 5, 236, 0, 0, 911, 97, 1, 0, 0, 0, 912, 913, 3, 102, 51, 0, 913, 914, 5, 210, 0, 0, 914, 916, 1, 0, 0, 0, 915, 912, 1, 0, 0, 0, 915, 916, 1, 0, 0, 0, 916, 917, 1, 0, 0, 0, 917, 918, 3, 118, 59, 0, 918, 99, 1, 0, 0, 0, 919, 924, 3, 74, 37, 0, 920, 921, 5, 206, 0, 0, 921, 923, 3, 74, 37, 0, 922, 920, 1, 0, 0, 0, 923, 926, 1, 0, 0, 0, 924, 922, 1, 0, 0, 0, 924, 925, 1, 0, 0, 0, 925, 101, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 927, 928, 3, 118, 59, 0, 928, 103, 1, 0, 0, 0, 929, 938, 5, 196, 0, 0, 930, 931, 5, 210, 0, 0, 931, 938, 7, 11, 0, 0, 932, 933, 5, 198, 0, 0, 933, 935, 5, 210, 0, 0, 934, 936, 7, 11, 0, 0, 935, 934, 1, 0, 0, 0, 935, 936, 1, 0, 0, 0, 936, 938, 1, 0, 0, 0, 937, 929, 1, 0, 0, 0, 937, 930, 1, 0, 0, 0, 937, 932, 1, 0, 0, 0, 938, 105, 1, 0, 0, 0, 939, 941, 7, 12, 0, 0, 940, 939, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 948, 1, 0, 0, 0, 942, 949, 3, 104, 52, 0, 943, 949, 5, 197, 0, 0, 944, 949, 5, 198, 0, 0, 945, 949, 5, 199, 0, 0, 946, 949, 5, 82, 0, 0, 947, 949, 5, 113, 0, 0, 948, 942, 1, 0, 0, 0, 948, 943, 1, 0, 0, 0, 948, 944, 1, 0, 0, 0, 948, 945, 1, 0, 0, 0, 948, 946, 1, 0, 0, 0, 948, 947, 1, 0, 0, 0, 949, 107, 1, 0, 0, 0, 950, 954, 3, 106, 53, 0, 951, 954, 5, 200, 0, 0, 952, 954, 5, 116, 0, 0, 953, 950, 1, 0, 0, 0, 953, 951, 1, 0, 0, 0, 953, 952, 1, 0, 0, 0, 954, 109, 1, 0, 0, 0, 955, 956, 7, 13, 0, 0, 956, 111, 1, 0, 0, 0, 957, 958, 7, 14, 0, 0, 958, 113, 1, 0, 0, 0, 959, 960, 7, 15, 0, 0, 960, 115, 1, 0, 0, 0, 961, 964, 5, 195, 0, 0, 962, 964, 3, 114, 57, 0, 963, 961, 1, 0, 0, 0, 963, 962, 1, 0, 0, 0, 964, 117, 1, 0, 0, 0, 965, 969, 5, 195, 0, 0, 966, 969, 3, 110, 55, 0, 967, 969, 3, 112, 56, 0, 968, 965, 1, 0, 0, 0, 968, 966, 1, 0, 0, 0, 968, 967, 1, 0, 0, 0, 969, 119, 1, 0, 0, 0, 970, 971, 5, 200, 0, 0, 971, 972, 5, 212, 0, 0, 972, 973, 3, 106, 53, 0, 973, 121, 1, 0, 0, 0, 974, 975, 5, 218, 0, 0, 975, 976, 3, 118, 59, 0, 976, 977, 5, 234, 0, 0, 977, 123, 1, 0, 0, 0, 120, 127, 137, 146, 149, 153, 156, 160, 163, 166, 169, 172, 176, 180, 183, 186, 189, 193, 196, 205, 211, 232, 249, 266, 272, 278, 289, 291, 302, 305, 311, 319, 325, 327, 331, 336, 339, 342, 346, 350, 353, 355, 358, 362, 366, 369, 371, 373, 378, 389, 395, 402, 407, 411, 415, 421, 423, 430, 438, 441, 444, 463, 477, 493, 505, 517, 525, 529, 536, 542, 551, 555, 586, 603, 615, 625, 628, 632, 635, 648, 665, 669, 675, 682, 694, 698, 701, 710, 724, 751, 760, 762, 764, 772, 777, 785, 795, 798, 808, 819, 824, 831, 844, 851, 864, 870, 873, 880, 892, 898, 902, 908, 915, 924, 935, 937, 940, 948, 953, 963, 968] \ No newline at end of file +[4, 1, 242, 972, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 3, 0, 128, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 136, 8, 1, 10, 1, 12, 1, 139, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 147, 8, 2, 1, 3, 3, 3, 150, 8, 3, 1, 3, 1, 3, 3, 3, 154, 8, 3, 1, 3, 3, 3, 157, 8, 3, 1, 3, 1, 3, 3, 3, 161, 8, 3, 1, 3, 3, 3, 164, 8, 3, 1, 3, 3, 3, 167, 8, 3, 1, 3, 3, 3, 170, 8, 3, 1, 3, 3, 3, 173, 8, 3, 1, 3, 1, 3, 3, 3, 177, 8, 3, 1, 3, 1, 3, 3, 3, 181, 8, 3, 1, 3, 3, 3, 184, 8, 3, 1, 3, 3, 3, 187, 8, 3, 1, 3, 3, 3, 190, 8, 3, 1, 3, 1, 3, 3, 3, 194, 8, 3, 1, 3, 3, 3, 197, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 206, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 3, 7, 212, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 231, 8, 8, 10, 8, 12, 8, 234, 9, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 250, 8, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 267, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 273, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 290, 8, 15, 3, 15, 292, 8, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 3, 18, 303, 8, 18, 1, 18, 3, 18, 306, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 312, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 320, 8, 18, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 326, 8, 18, 10, 18, 12, 18, 329, 9, 18, 1, 19, 3, 19, 332, 8, 19, 1, 19, 1, 19, 1, 19, 3, 19, 337, 8, 19, 1, 19, 3, 19, 340, 8, 19, 1, 19, 3, 19, 343, 8, 19, 1, 19, 1, 19, 3, 19, 347, 8, 19, 1, 19, 1, 19, 3, 19, 351, 8, 19, 1, 19, 3, 19, 354, 8, 19, 3, 19, 356, 8, 19, 1, 19, 3, 19, 359, 8, 19, 1, 19, 1, 19, 3, 19, 363, 8, 19, 1, 19, 1, 19, 3, 19, 367, 8, 19, 1, 19, 3, 19, 370, 8, 19, 3, 19, 372, 8, 19, 3, 19, 374, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 379, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 390, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 3, 22, 396, 8, 22, 1, 23, 1, 23, 1, 23, 5, 23, 401, 8, 23, 10, 23, 12, 23, 404, 9, 23, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 422, 8, 25, 3, 25, 424, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 429, 8, 26, 10, 26, 12, 26, 432, 9, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 439, 8, 28, 1, 28, 3, 28, 442, 8, 28, 1, 28, 3, 28, 445, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 478, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 492, 8, 35, 10, 35, 12, 35, 495, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 504, 8, 35, 10, 35, 12, 35, 507, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 516, 8, 35, 10, 35, 12, 35, 519, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 526, 8, 35, 1, 35, 1, 35, 3, 35, 530, 8, 35, 1, 36, 1, 36, 1, 36, 5, 36, 535, 8, 36, 10, 36, 12, 36, 538, 9, 36, 1, 37, 1, 37, 1, 37, 3, 37, 543, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 550, 8, 37, 11, 37, 12, 37, 551, 1, 37, 1, 37, 3, 37, 556, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 580, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 597, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 609, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 619, 8, 37, 1, 37, 3, 37, 622, 8, 37, 1, 37, 1, 37, 3, 37, 626, 8, 37, 1, 37, 3, 37, 629, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 642, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 659, 8, 37, 1, 37, 1, 37, 3, 37, 663, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 669, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 676, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 688, 8, 37, 1, 37, 1, 37, 3, 37, 692, 8, 37, 1, 37, 3, 37, 695, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 704, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 718, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 745, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 754, 8, 37, 5, 37, 756, 8, 37, 10, 37, 12, 37, 759, 9, 37, 1, 38, 1, 38, 1, 38, 5, 38, 764, 8, 38, 10, 38, 12, 38, 767, 9, 38, 1, 39, 1, 39, 3, 39, 771, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 777, 8, 40, 10, 40, 12, 40, 780, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 787, 8, 40, 10, 40, 12, 40, 790, 9, 40, 3, 40, 792, 8, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 800, 8, 41, 10, 41, 12, 41, 803, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 811, 8, 41, 10, 41, 12, 41, 814, 9, 41, 1, 41, 1, 41, 3, 41, 818, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 825, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 838, 8, 42, 1, 43, 1, 43, 1, 43, 5, 43, 843, 8, 43, 10, 43, 12, 43, 846, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 858, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 864, 8, 45, 1, 45, 3, 45, 867, 8, 45, 1, 46, 1, 46, 1, 46, 5, 46, 872, 8, 46, 10, 46, 12, 46, 875, 9, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 886, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 892, 8, 47, 5, 47, 894, 8, 47, 10, 47, 12, 47, 897, 9, 47, 1, 48, 1, 48, 1, 48, 3, 48, 902, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 3, 49, 909, 8, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 5, 50, 916, 8, 50, 10, 50, 12, 50, 919, 9, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 929, 8, 52, 3, 52, 931, 8, 52, 1, 53, 3, 53, 934, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 942, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 947, 8, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 3, 58, 957, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 962, 8, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 0, 3, 36, 74, 94, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 16, 2, 0, 32, 32, 141, 141, 2, 0, 84, 84, 96, 96, 3, 0, 4, 4, 8, 8, 12, 12, 4, 0, 4, 4, 7, 8, 12, 12, 147, 147, 2, 0, 96, 96, 140, 140, 2, 0, 4, 4, 8, 8, 2, 0, 11, 11, 42, 43, 2, 0, 62, 62, 93, 93, 2, 0, 133, 133, 143, 143, 3, 0, 17, 17, 95, 95, 170, 170, 2, 0, 79, 79, 98, 98, 1, 0, 197, 198, 2, 0, 208, 208, 228, 228, 8, 0, 37, 37, 76, 76, 108, 108, 110, 110, 132, 132, 145, 145, 185, 185, 190, 190, 13, 0, 2, 24, 26, 36, 38, 75, 77, 81, 83, 107, 109, 109, 111, 112, 114, 115, 117, 130, 133, 144, 146, 184, 186, 189, 191, 192, 4, 0, 36, 36, 62, 62, 77, 77, 91, 91, 1099, 0, 127, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 146, 1, 0, 0, 0, 6, 149, 1, 0, 0, 0, 8, 198, 1, 0, 0, 0, 10, 201, 1, 0, 0, 0, 12, 207, 1, 0, 0, 0, 14, 211, 1, 0, 0, 0, 16, 217, 1, 0, 0, 0, 18, 235, 1, 0, 0, 0, 20, 238, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 251, 1, 0, 0, 0, 26, 254, 1, 0, 0, 0, 28, 258, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 293, 1, 0, 0, 0, 34, 296, 1, 0, 0, 0, 36, 311, 1, 0, 0, 0, 38, 373, 1, 0, 0, 0, 40, 378, 1, 0, 0, 0, 42, 389, 1, 0, 0, 0, 44, 391, 1, 0, 0, 0, 46, 397, 1, 0, 0, 0, 48, 405, 1, 0, 0, 0, 50, 423, 1, 0, 0, 0, 52, 425, 1, 0, 0, 0, 54, 433, 1, 0, 0, 0, 56, 438, 1, 0, 0, 0, 58, 446, 1, 0, 0, 0, 60, 450, 1, 0, 0, 0, 62, 454, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 477, 1, 0, 0, 0, 68, 479, 1, 0, 0, 0, 70, 529, 1, 0, 0, 0, 72, 531, 1, 0, 0, 0, 74, 662, 1, 0, 0, 0, 76, 760, 1, 0, 0, 0, 78, 770, 1, 0, 0, 0, 80, 791, 1, 0, 0, 0, 82, 824, 1, 0, 0, 0, 84, 837, 1, 0, 0, 0, 86, 839, 1, 0, 0, 0, 88, 857, 1, 0, 0, 0, 90, 866, 1, 0, 0, 0, 92, 868, 1, 0, 0, 0, 94, 885, 1, 0, 0, 0, 96, 898, 1, 0, 0, 0, 98, 908, 1, 0, 0, 0, 100, 912, 1, 0, 0, 0, 102, 920, 1, 0, 0, 0, 104, 930, 1, 0, 0, 0, 106, 933, 1, 0, 0, 0, 108, 946, 1, 0, 0, 0, 110, 948, 1, 0, 0, 0, 112, 950, 1, 0, 0, 0, 114, 952, 1, 0, 0, 0, 116, 956, 1, 0, 0, 0, 118, 961, 1, 0, 0, 0, 120, 963, 1, 0, 0, 0, 122, 967, 1, 0, 0, 0, 124, 128, 3, 2, 1, 0, 125, 128, 3, 6, 3, 0, 126, 128, 3, 82, 41, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 129, 1, 0, 0, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 137, 3, 4, 2, 0, 132, 133, 5, 176, 0, 0, 133, 134, 5, 4, 0, 0, 134, 136, 3, 4, 2, 0, 135, 132, 1, 0, 0, 0, 136, 139, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 3, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 147, 3, 6, 3, 0, 141, 142, 5, 220, 0, 0, 142, 143, 3, 2, 1, 0, 143, 144, 5, 236, 0, 0, 144, 147, 1, 0, 0, 0, 145, 147, 3, 122, 61, 0, 146, 140, 1, 0, 0, 0, 146, 141, 1, 0, 0, 0, 146, 145, 1, 0, 0, 0, 147, 5, 1, 0, 0, 0, 148, 150, 3, 8, 4, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 153, 5, 146, 0, 0, 152, 154, 5, 49, 0, 0, 153, 152, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 156, 1, 0, 0, 0, 155, 157, 3, 10, 5, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 3, 72, 36, 0, 159, 161, 3, 12, 6, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 163, 1, 0, 0, 0, 162, 164, 3, 14, 7, 0, 163, 162, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 167, 3, 18, 9, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 170, 3, 20, 10, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 172, 1, 0, 0, 0, 171, 173, 3, 22, 11, 0, 172, 171, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 175, 5, 189, 0, 0, 175, 177, 7, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 179, 5, 189, 0, 0, 179, 181, 5, 169, 0, 0, 180, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 183, 1, 0, 0, 0, 182, 184, 3, 24, 12, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 187, 3, 16, 8, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 189, 1, 0, 0, 0, 188, 190, 3, 26, 13, 0, 189, 188, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, 194, 3, 30, 15, 0, 192, 194, 3, 32, 16, 0, 193, 191, 1, 0, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 197, 3, 34, 17, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 7, 1, 0, 0, 0, 198, 199, 5, 189, 0, 0, 199, 200, 3, 86, 43, 0, 200, 9, 1, 0, 0, 0, 201, 202, 5, 168, 0, 0, 202, 205, 5, 198, 0, 0, 203, 204, 5, 189, 0, 0, 204, 206, 5, 164, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 11, 1, 0, 0, 0, 207, 208, 5, 68, 0, 0, 208, 209, 3, 36, 18, 0, 209, 13, 1, 0, 0, 0, 210, 212, 7, 1, 0, 0, 211, 210, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 9, 0, 0, 214, 215, 5, 90, 0, 0, 215, 216, 3, 72, 36, 0, 216, 15, 1, 0, 0, 0, 217, 218, 5, 188, 0, 0, 218, 219, 3, 118, 59, 0, 219, 220, 5, 10, 0, 0, 220, 221, 5, 220, 0, 0, 221, 222, 3, 56, 28, 0, 222, 232, 5, 236, 0, 0, 223, 224, 5, 206, 0, 0, 224, 225, 3, 118, 59, 0, 225, 226, 5, 10, 0, 0, 226, 227, 5, 220, 0, 0, 227, 228, 3, 56, 28, 0, 228, 229, 5, 236, 0, 0, 229, 231, 1, 0, 0, 0, 230, 223, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 17, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 235, 236, 5, 129, 0, 0, 236, 237, 3, 74, 37, 0, 237, 19, 1, 0, 0, 0, 238, 239, 5, 187, 0, 0, 239, 240, 3, 74, 37, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 73, 0, 0, 242, 249, 5, 18, 0, 0, 243, 244, 7, 0, 0, 0, 244, 245, 5, 220, 0, 0, 245, 246, 3, 72, 36, 0, 246, 247, 5, 236, 0, 0, 247, 250, 1, 0, 0, 0, 248, 250, 3, 72, 36, 0, 249, 243, 1, 0, 0, 0, 249, 248, 1, 0, 0, 0, 250, 23, 1, 0, 0, 0, 251, 252, 5, 74, 0, 0, 252, 253, 3, 74, 37, 0, 253, 25, 1, 0, 0, 0, 254, 255, 5, 122, 0, 0, 255, 256, 5, 18, 0, 0, 256, 257, 3, 46, 23, 0, 257, 27, 1, 0, 0, 0, 258, 259, 5, 122, 0, 0, 259, 260, 5, 18, 0, 0, 260, 261, 3, 72, 36, 0, 261, 29, 1, 0, 0, 0, 262, 263, 5, 99, 0, 0, 263, 266, 3, 74, 37, 0, 264, 265, 5, 206, 0, 0, 265, 267, 3, 74, 37, 0, 266, 264, 1, 0, 0, 0, 266, 267, 1, 0, 0, 0, 267, 272, 1, 0, 0, 0, 268, 269, 5, 189, 0, 0, 269, 273, 5, 164, 0, 0, 270, 271, 5, 18, 0, 0, 271, 273, 3, 72, 36, 0, 272, 268, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 292, 1, 0, 0, 0, 274, 275, 5, 99, 0, 0, 275, 278, 3, 74, 37, 0, 276, 277, 5, 189, 0, 0, 277, 279, 5, 164, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 118, 0, 0, 281, 282, 3, 74, 37, 0, 282, 292, 1, 0, 0, 0, 283, 284, 5, 99, 0, 0, 284, 285, 3, 74, 37, 0, 285, 286, 5, 118, 0, 0, 286, 289, 3, 74, 37, 0, 287, 288, 5, 18, 0, 0, 288, 290, 3, 72, 36, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 292, 1, 0, 0, 0, 291, 262, 1, 0, 0, 0, 291, 274, 1, 0, 0, 0, 291, 283, 1, 0, 0, 0, 292, 31, 1, 0, 0, 0, 293, 294, 5, 118, 0, 0, 294, 295, 3, 74, 37, 0, 295, 33, 1, 0, 0, 0, 296, 297, 5, 150, 0, 0, 297, 298, 3, 52, 26, 0, 298, 35, 1, 0, 0, 0, 299, 300, 6, 18, -1, 0, 300, 302, 3, 94, 47, 0, 301, 303, 5, 61, 0, 0, 302, 301, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 305, 1, 0, 0, 0, 304, 306, 3, 44, 22, 0, 305, 304, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 312, 1, 0, 0, 0, 307, 308, 5, 220, 0, 0, 308, 309, 3, 36, 18, 0, 309, 310, 5, 236, 0, 0, 310, 312, 1, 0, 0, 0, 311, 299, 1, 0, 0, 0, 311, 307, 1, 0, 0, 0, 312, 327, 1, 0, 0, 0, 313, 314, 10, 3, 0, 0, 314, 315, 3, 40, 20, 0, 315, 316, 3, 36, 18, 4, 316, 326, 1, 0, 0, 0, 317, 319, 10, 4, 0, 0, 318, 320, 3, 38, 19, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 90, 0, 0, 322, 323, 3, 36, 18, 0, 323, 324, 3, 42, 21, 0, 324, 326, 1, 0, 0, 0, 325, 313, 1, 0, 0, 0, 325, 317, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 37, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 332, 7, 2, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 340, 5, 84, 0, 0, 334, 336, 5, 84, 0, 0, 335, 337, 7, 2, 0, 0, 336, 335, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 340, 7, 2, 0, 0, 339, 331, 1, 0, 0, 0, 339, 334, 1, 0, 0, 0, 339, 338, 1, 0, 0, 0, 340, 374, 1, 0, 0, 0, 341, 343, 7, 3, 0, 0, 342, 341, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 346, 7, 4, 0, 0, 345, 347, 5, 123, 0, 0, 346, 345, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 356, 1, 0, 0, 0, 348, 350, 7, 4, 0, 0, 349, 351, 5, 123, 0, 0, 350, 349, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 354, 7, 3, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 342, 1, 0, 0, 0, 355, 348, 1, 0, 0, 0, 356, 374, 1, 0, 0, 0, 357, 359, 7, 5, 0, 0, 358, 357, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 5, 69, 0, 0, 361, 363, 5, 123, 0, 0, 362, 361, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 372, 1, 0, 0, 0, 364, 366, 5, 69, 0, 0, 365, 367, 5, 123, 0, 0, 366, 365, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 370, 7, 5, 0, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 358, 1, 0, 0, 0, 371, 364, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 339, 1, 0, 0, 0, 373, 355, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 39, 1, 0, 0, 0, 375, 376, 5, 31, 0, 0, 376, 379, 5, 90, 0, 0, 377, 379, 5, 206, 0, 0, 378, 375, 1, 0, 0, 0, 378, 377, 1, 0, 0, 0, 379, 41, 1, 0, 0, 0, 380, 381, 5, 119, 0, 0, 381, 390, 3, 72, 36, 0, 382, 383, 5, 179, 0, 0, 383, 384, 5, 220, 0, 0, 384, 385, 3, 72, 36, 0, 385, 386, 5, 236, 0, 0, 386, 390, 1, 0, 0, 0, 387, 388, 5, 179, 0, 0, 388, 390, 3, 72, 36, 0, 389, 380, 1, 0, 0, 0, 389, 382, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 43, 1, 0, 0, 0, 391, 392, 5, 144, 0, 0, 392, 395, 3, 50, 25, 0, 393, 394, 5, 118, 0, 0, 394, 396, 3, 50, 25, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 45, 1, 0, 0, 0, 397, 402, 3, 48, 24, 0, 398, 399, 5, 206, 0, 0, 399, 401, 3, 48, 24, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 47, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 407, 3, 74, 37, 0, 406, 408, 7, 6, 0, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 117, 0, 0, 410, 412, 7, 7, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 26, 0, 0, 414, 416, 5, 200, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 49, 1, 0, 0, 0, 417, 424, 3, 122, 61, 0, 418, 421, 3, 106, 53, 0, 419, 420, 5, 238, 0, 0, 420, 422, 3, 106, 53, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 417, 1, 0, 0, 0, 423, 418, 1, 0, 0, 0, 424, 51, 1, 0, 0, 0, 425, 430, 3, 54, 27, 0, 426, 427, 5, 206, 0, 0, 427, 429, 3, 54, 27, 0, 428, 426, 1, 0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 53, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 3, 118, 59, 0, 434, 435, 5, 212, 0, 0, 435, 436, 3, 108, 54, 0, 436, 55, 1, 0, 0, 0, 437, 439, 3, 58, 29, 0, 438, 437, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0, 0, 0, 440, 442, 3, 60, 30, 0, 441, 440, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 444, 1, 0, 0, 0, 443, 445, 3, 62, 31, 0, 444, 443, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 57, 1, 0, 0, 0, 446, 447, 5, 126, 0, 0, 447, 448, 5, 18, 0, 0, 448, 449, 3, 72, 36, 0, 449, 59, 1, 0, 0, 0, 450, 451, 5, 122, 0, 0, 451, 452, 5, 18, 0, 0, 452, 453, 3, 46, 23, 0, 453, 61, 1, 0, 0, 0, 454, 455, 7, 8, 0, 0, 455, 456, 3, 64, 32, 0, 456, 63, 1, 0, 0, 0, 457, 464, 3, 66, 33, 0, 458, 459, 5, 16, 0, 0, 459, 460, 3, 66, 33, 0, 460, 461, 5, 6, 0, 0, 461, 462, 3, 66, 33, 0, 462, 464, 1, 0, 0, 0, 463, 457, 1, 0, 0, 0, 463, 458, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 466, 5, 33, 0, 0, 466, 478, 5, 142, 0, 0, 467, 468, 5, 175, 0, 0, 468, 478, 5, 128, 0, 0, 469, 470, 5, 175, 0, 0, 470, 478, 5, 64, 0, 0, 471, 472, 3, 106, 53, 0, 472, 473, 5, 128, 0, 0, 473, 478, 1, 0, 0, 0, 474, 475, 3, 106, 53, 0, 475, 476, 5, 64, 0, 0, 476, 478, 1, 0, 0, 0, 477, 465, 1, 0, 0, 0, 477, 467, 1, 0, 0, 0, 477, 469, 1, 0, 0, 0, 477, 471, 1, 0, 0, 0, 477, 474, 1, 0, 0, 0, 478, 67, 1, 0, 0, 0, 479, 480, 3, 74, 37, 0, 480, 481, 5, 0, 0, 1, 481, 69, 1, 0, 0, 0, 482, 530, 3, 118, 59, 0, 483, 484, 3, 118, 59, 0, 484, 485, 5, 220, 0, 0, 485, 486, 3, 118, 59, 0, 486, 493, 3, 70, 35, 0, 487, 488, 5, 206, 0, 0, 488, 489, 3, 118, 59, 0, 489, 490, 3, 70, 35, 0, 490, 492, 1, 0, 0, 0, 491, 487, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 497, 5, 236, 0, 0, 497, 530, 1, 0, 0, 0, 498, 499, 3, 118, 59, 0, 499, 500, 5, 220, 0, 0, 500, 505, 3, 120, 60, 0, 501, 502, 5, 206, 0, 0, 502, 504, 3, 120, 60, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 5, 236, 0, 0, 509, 530, 1, 0, 0, 0, 510, 511, 3, 118, 59, 0, 511, 512, 5, 220, 0, 0, 512, 517, 3, 70, 35, 0, 513, 514, 5, 206, 0, 0, 514, 516, 3, 70, 35, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 520, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 521, 5, 236, 0, 0, 521, 530, 1, 0, 0, 0, 522, 523, 3, 118, 59, 0, 523, 525, 5, 220, 0, 0, 524, 526, 3, 72, 36, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 5, 236, 0, 0, 528, 530, 1, 0, 0, 0, 529, 482, 1, 0, 0, 0, 529, 483, 1, 0, 0, 0, 529, 498, 1, 0, 0, 0, 529, 510, 1, 0, 0, 0, 529, 522, 1, 0, 0, 0, 530, 71, 1, 0, 0, 0, 531, 536, 3, 74, 37, 0, 532, 533, 5, 206, 0, 0, 533, 535, 3, 74, 37, 0, 534, 532, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 73, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 540, 6, 37, -1, 0, 540, 542, 5, 19, 0, 0, 541, 543, 3, 74, 37, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 549, 1, 0, 0, 0, 544, 545, 5, 186, 0, 0, 545, 546, 3, 74, 37, 0, 546, 547, 5, 163, 0, 0, 547, 548, 3, 74, 37, 0, 548, 550, 1, 0, 0, 0, 549, 544, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 5, 52, 0, 0, 554, 556, 3, 74, 37, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 5, 53, 0, 0, 558, 663, 1, 0, 0, 0, 559, 560, 5, 20, 0, 0, 560, 561, 5, 220, 0, 0, 561, 562, 3, 74, 37, 0, 562, 563, 5, 10, 0, 0, 563, 564, 3, 70, 35, 0, 564, 565, 5, 236, 0, 0, 565, 663, 1, 0, 0, 0, 566, 567, 5, 36, 0, 0, 567, 663, 5, 200, 0, 0, 568, 569, 5, 86, 0, 0, 569, 570, 3, 74, 37, 0, 570, 571, 3, 110, 55, 0, 571, 663, 1, 0, 0, 0, 572, 573, 5, 155, 0, 0, 573, 574, 5, 220, 0, 0, 574, 575, 3, 74, 37, 0, 575, 576, 5, 68, 0, 0, 576, 579, 3, 74, 37, 0, 577, 578, 5, 65, 0, 0, 578, 580, 3, 74, 37, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 5, 236, 0, 0, 582, 663, 1, 0, 0, 0, 583, 584, 5, 166, 0, 0, 584, 663, 5, 200, 0, 0, 585, 586, 5, 171, 0, 0, 586, 587, 5, 220, 0, 0, 587, 588, 7, 9, 0, 0, 588, 589, 5, 200, 0, 0, 589, 590, 5, 68, 0, 0, 590, 591, 3, 74, 37, 0, 591, 592, 5, 236, 0, 0, 592, 663, 1, 0, 0, 0, 593, 594, 3, 118, 59, 0, 594, 596, 5, 220, 0, 0, 595, 597, 3, 72, 36, 0, 596, 595, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 5, 236, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 5, 125, 0, 0, 601, 602, 5, 220, 0, 0, 602, 603, 3, 56, 28, 0, 603, 604, 5, 236, 0, 0, 604, 663, 1, 0, 0, 0, 605, 606, 3, 118, 59, 0, 606, 608, 5, 220, 0, 0, 607, 609, 3, 72, 36, 0, 608, 607, 1, 0, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 5, 236, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 5, 125, 0, 0, 613, 614, 3, 118, 59, 0, 614, 663, 1, 0, 0, 0, 615, 621, 3, 118, 59, 0, 616, 618, 5, 220, 0, 0, 617, 619, 3, 72, 36, 0, 618, 617, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 622, 5, 236, 0, 0, 621, 616, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 625, 5, 220, 0, 0, 624, 626, 5, 49, 0, 0, 625, 624, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 628, 1, 0, 0, 0, 627, 629, 3, 76, 38, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 236, 0, 0, 631, 663, 1, 0, 0, 0, 632, 663, 3, 82, 41, 0, 633, 663, 3, 108, 54, 0, 634, 635, 5, 208, 0, 0, 635, 663, 3, 74, 37, 18, 636, 637, 5, 115, 0, 0, 637, 663, 3, 74, 37, 12, 638, 639, 3, 98, 49, 0, 639, 640, 5, 210, 0, 0, 640, 642, 1, 0, 0, 0, 641, 638, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 663, 5, 202, 0, 0, 644, 645, 5, 220, 0, 0, 645, 646, 3, 2, 1, 0, 646, 647, 5, 236, 0, 0, 647, 663, 1, 0, 0, 0, 648, 649, 5, 220, 0, 0, 649, 650, 3, 74, 37, 0, 650, 651, 5, 236, 0, 0, 651, 663, 1, 0, 0, 0, 652, 653, 5, 220, 0, 0, 653, 654, 3, 72, 36, 0, 654, 655, 5, 236, 0, 0, 655, 663, 1, 0, 0, 0, 656, 658, 5, 219, 0, 0, 657, 659, 3, 72, 36, 0, 658, 657, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 663, 5, 235, 0, 0, 661, 663, 3, 90, 45, 0, 662, 539, 1, 0, 0, 0, 662, 559, 1, 0, 0, 0, 662, 566, 1, 0, 0, 0, 662, 568, 1, 0, 0, 0, 662, 572, 1, 0, 0, 0, 662, 583, 1, 0, 0, 0, 662, 585, 1, 0, 0, 0, 662, 593, 1, 0, 0, 0, 662, 605, 1, 0, 0, 0, 662, 615, 1, 0, 0, 0, 662, 632, 1, 0, 0, 0, 662, 633, 1, 0, 0, 0, 662, 634, 1, 0, 0, 0, 662, 636, 1, 0, 0, 0, 662, 641, 1, 0, 0, 0, 662, 644, 1, 0, 0, 0, 662, 648, 1, 0, 0, 0, 662, 652, 1, 0, 0, 0, 662, 656, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 757, 1, 0, 0, 0, 664, 668, 10, 17, 0, 0, 665, 669, 5, 202, 0, 0, 666, 669, 5, 238, 0, 0, 667, 669, 5, 227, 0, 0, 668, 665, 1, 0, 0, 0, 668, 666, 1, 0, 0, 0, 668, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 756, 3, 74, 37, 18, 671, 675, 10, 16, 0, 0, 672, 676, 5, 228, 0, 0, 673, 676, 5, 208, 0, 0, 674, 676, 5, 207, 0, 0, 675, 672, 1, 0, 0, 0, 675, 673, 1, 0, 0, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 756, 3, 74, 37, 17, 678, 703, 10, 15, 0, 0, 679, 704, 5, 211, 0, 0, 680, 704, 5, 212, 0, 0, 681, 704, 5, 223, 0, 0, 682, 704, 5, 221, 0, 0, 683, 704, 5, 222, 0, 0, 684, 704, 5, 213, 0, 0, 685, 704, 5, 214, 0, 0, 686, 688, 5, 115, 0, 0, 687, 686, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 5, 80, 0, 0, 690, 692, 5, 25, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 704, 1, 0, 0, 0, 693, 695, 5, 115, 0, 0, 694, 693, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 704, 7, 10, 0, 0, 697, 704, 5, 232, 0, 0, 698, 704, 5, 233, 0, 0, 699, 704, 5, 225, 0, 0, 700, 704, 5, 216, 0, 0, 701, 704, 5, 217, 0, 0, 702, 704, 5, 224, 0, 0, 703, 679, 1, 0, 0, 0, 703, 680, 1, 0, 0, 0, 703, 681, 1, 0, 0, 0, 703, 682, 1, 0, 0, 0, 703, 683, 1, 0, 0, 0, 703, 684, 1, 0, 0, 0, 703, 685, 1, 0, 0, 0, 703, 687, 1, 0, 0, 0, 703, 694, 1, 0, 0, 0, 703, 697, 1, 0, 0, 0, 703, 698, 1, 0, 0, 0, 703, 699, 1, 0, 0, 0, 703, 700, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 756, 3, 74, 37, 16, 706, 707, 10, 13, 0, 0, 707, 708, 5, 226, 0, 0, 708, 756, 3, 74, 37, 14, 709, 710, 10, 11, 0, 0, 710, 711, 5, 6, 0, 0, 711, 756, 3, 74, 37, 12, 712, 713, 10, 10, 0, 0, 713, 714, 5, 121, 0, 0, 714, 756, 3, 74, 37, 11, 715, 717, 10, 9, 0, 0, 716, 718, 5, 115, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 720, 5, 16, 0, 0, 720, 721, 3, 74, 37, 0, 721, 722, 5, 6, 0, 0, 722, 723, 3, 74, 37, 10, 723, 756, 1, 0, 0, 0, 724, 725, 10, 8, 0, 0, 725, 726, 5, 229, 0, 0, 726, 727, 3, 74, 37, 0, 727, 728, 5, 205, 0, 0, 728, 729, 3, 74, 37, 8, 729, 756, 1, 0, 0, 0, 730, 731, 10, 21, 0, 0, 731, 732, 5, 219, 0, 0, 732, 733, 3, 74, 37, 0, 733, 734, 5, 235, 0, 0, 734, 756, 1, 0, 0, 0, 735, 736, 10, 20, 0, 0, 736, 737, 5, 210, 0, 0, 737, 756, 5, 198, 0, 0, 738, 739, 10, 19, 0, 0, 739, 740, 5, 210, 0, 0, 740, 756, 3, 118, 59, 0, 741, 742, 10, 14, 0, 0, 742, 744, 5, 88, 0, 0, 743, 745, 5, 115, 0, 0, 744, 743, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 756, 5, 116, 0, 0, 747, 753, 10, 7, 0, 0, 748, 754, 3, 116, 58, 0, 749, 750, 5, 10, 0, 0, 750, 754, 3, 118, 59, 0, 751, 752, 5, 10, 0, 0, 752, 754, 5, 200, 0, 0, 753, 748, 1, 0, 0, 0, 753, 749, 1, 0, 0, 0, 753, 751, 1, 0, 0, 0, 754, 756, 1, 0, 0, 0, 755, 664, 1, 0, 0, 0, 755, 671, 1, 0, 0, 0, 755, 678, 1, 0, 0, 0, 755, 706, 1, 0, 0, 0, 755, 709, 1, 0, 0, 0, 755, 712, 1, 0, 0, 0, 755, 715, 1, 0, 0, 0, 755, 724, 1, 0, 0, 0, 755, 730, 1, 0, 0, 0, 755, 735, 1, 0, 0, 0, 755, 738, 1, 0, 0, 0, 755, 741, 1, 0, 0, 0, 755, 747, 1, 0, 0, 0, 756, 759, 1, 0, 0, 0, 757, 755, 1, 0, 0, 0, 757, 758, 1, 0, 0, 0, 758, 75, 1, 0, 0, 0, 759, 757, 1, 0, 0, 0, 760, 765, 3, 78, 39, 0, 761, 762, 5, 206, 0, 0, 762, 764, 3, 78, 39, 0, 763, 761, 1, 0, 0, 0, 764, 767, 1, 0, 0, 0, 765, 763, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 77, 1, 0, 0, 0, 767, 765, 1, 0, 0, 0, 768, 771, 3, 80, 40, 0, 769, 771, 3, 74, 37, 0, 770, 768, 1, 0, 0, 0, 770, 769, 1, 0, 0, 0, 771, 79, 1, 0, 0, 0, 772, 773, 5, 220, 0, 0, 773, 778, 3, 118, 59, 0, 774, 775, 5, 206, 0, 0, 775, 777, 3, 118, 59, 0, 776, 774, 1, 0, 0, 0, 777, 780, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 781, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 781, 782, 5, 236, 0, 0, 782, 792, 1, 0, 0, 0, 783, 788, 3, 118, 59, 0, 784, 785, 5, 206, 0, 0, 785, 787, 3, 118, 59, 0, 786, 784, 1, 0, 0, 0, 787, 790, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 791, 772, 1, 0, 0, 0, 791, 783, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 794, 5, 201, 0, 0, 794, 795, 3, 74, 37, 0, 795, 81, 1, 0, 0, 0, 796, 797, 5, 222, 0, 0, 797, 801, 3, 118, 59, 0, 798, 800, 3, 84, 42, 0, 799, 798, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 801, 802, 1, 0, 0, 0, 802, 804, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 804, 805, 5, 238, 0, 0, 805, 806, 5, 214, 0, 0, 806, 825, 1, 0, 0, 0, 807, 808, 5, 222, 0, 0, 808, 812, 3, 118, 59, 0, 809, 811, 3, 84, 42, 0, 810, 809, 1, 0, 0, 0, 811, 814, 1, 0, 0, 0, 812, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 815, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 815, 817, 5, 214, 0, 0, 816, 818, 3, 82, 41, 0, 817, 816, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 5, 222, 0, 0, 820, 821, 5, 238, 0, 0, 821, 822, 3, 118, 59, 0, 822, 823, 5, 214, 0, 0, 823, 825, 1, 0, 0, 0, 824, 796, 1, 0, 0, 0, 824, 807, 1, 0, 0, 0, 825, 83, 1, 0, 0, 0, 826, 827, 3, 118, 59, 0, 827, 828, 5, 212, 0, 0, 828, 829, 5, 200, 0, 0, 829, 838, 1, 0, 0, 0, 830, 831, 3, 118, 59, 0, 831, 832, 5, 212, 0, 0, 832, 833, 5, 218, 0, 0, 833, 834, 3, 74, 37, 0, 834, 835, 5, 234, 0, 0, 835, 838, 1, 0, 0, 0, 836, 838, 3, 118, 59, 0, 837, 826, 1, 0, 0, 0, 837, 830, 1, 0, 0, 0, 837, 836, 1, 0, 0, 0, 838, 85, 1, 0, 0, 0, 839, 844, 3, 88, 44, 0, 840, 841, 5, 206, 0, 0, 841, 843, 3, 88, 44, 0, 842, 840, 1, 0, 0, 0, 843, 846, 1, 0, 0, 0, 844, 842, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 87, 1, 0, 0, 0, 846, 844, 1, 0, 0, 0, 847, 848, 3, 118, 59, 0, 848, 849, 5, 10, 0, 0, 849, 850, 5, 220, 0, 0, 850, 851, 3, 2, 1, 0, 851, 852, 5, 236, 0, 0, 852, 858, 1, 0, 0, 0, 853, 854, 3, 74, 37, 0, 854, 855, 5, 10, 0, 0, 855, 856, 3, 118, 59, 0, 856, 858, 1, 0, 0, 0, 857, 847, 1, 0, 0, 0, 857, 853, 1, 0, 0, 0, 858, 89, 1, 0, 0, 0, 859, 867, 3, 122, 61, 0, 860, 861, 3, 98, 49, 0, 861, 862, 5, 210, 0, 0, 862, 864, 1, 0, 0, 0, 863, 860, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 865, 1, 0, 0, 0, 865, 867, 3, 92, 46, 0, 866, 859, 1, 0, 0, 0, 866, 863, 1, 0, 0, 0, 867, 91, 1, 0, 0, 0, 868, 873, 3, 118, 59, 0, 869, 870, 5, 210, 0, 0, 870, 872, 3, 118, 59, 0, 871, 869, 1, 0, 0, 0, 872, 875, 1, 0, 0, 0, 873, 871, 1, 0, 0, 0, 873, 874, 1, 0, 0, 0, 874, 93, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 876, 877, 6, 47, -1, 0, 877, 886, 3, 98, 49, 0, 878, 886, 3, 96, 48, 0, 879, 880, 5, 220, 0, 0, 880, 881, 3, 2, 1, 0, 881, 882, 5, 236, 0, 0, 882, 886, 1, 0, 0, 0, 883, 886, 3, 82, 41, 0, 884, 886, 3, 122, 61, 0, 885, 876, 1, 0, 0, 0, 885, 878, 1, 0, 0, 0, 885, 879, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 895, 1, 0, 0, 0, 887, 891, 10, 3, 0, 0, 888, 892, 3, 116, 58, 0, 889, 890, 5, 10, 0, 0, 890, 892, 3, 118, 59, 0, 891, 888, 1, 0, 0, 0, 891, 889, 1, 0, 0, 0, 892, 894, 1, 0, 0, 0, 893, 887, 1, 0, 0, 0, 894, 897, 1, 0, 0, 0, 895, 893, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 95, 1, 0, 0, 0, 897, 895, 1, 0, 0, 0, 898, 899, 3, 118, 59, 0, 899, 901, 5, 220, 0, 0, 900, 902, 3, 100, 50, 0, 901, 900, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 904, 5, 236, 0, 0, 904, 97, 1, 0, 0, 0, 905, 906, 3, 102, 51, 0, 906, 907, 5, 210, 0, 0, 907, 909, 1, 0, 0, 0, 908, 905, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 3, 118, 59, 0, 911, 99, 1, 0, 0, 0, 912, 917, 3, 74, 37, 0, 913, 914, 5, 206, 0, 0, 914, 916, 3, 74, 37, 0, 915, 913, 1, 0, 0, 0, 916, 919, 1, 0, 0, 0, 917, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 101, 1, 0, 0, 0, 919, 917, 1, 0, 0, 0, 920, 921, 3, 118, 59, 0, 921, 103, 1, 0, 0, 0, 922, 931, 5, 196, 0, 0, 923, 924, 5, 210, 0, 0, 924, 931, 7, 11, 0, 0, 925, 926, 5, 198, 0, 0, 926, 928, 5, 210, 0, 0, 927, 929, 7, 11, 0, 0, 928, 927, 1, 0, 0, 0, 928, 929, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 922, 1, 0, 0, 0, 930, 923, 1, 0, 0, 0, 930, 925, 1, 0, 0, 0, 931, 105, 1, 0, 0, 0, 932, 934, 7, 12, 0, 0, 933, 932, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 941, 1, 0, 0, 0, 935, 942, 3, 104, 52, 0, 936, 942, 5, 197, 0, 0, 937, 942, 5, 198, 0, 0, 938, 942, 5, 199, 0, 0, 939, 942, 5, 82, 0, 0, 940, 942, 5, 113, 0, 0, 941, 935, 1, 0, 0, 0, 941, 936, 1, 0, 0, 0, 941, 937, 1, 0, 0, 0, 941, 938, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 940, 1, 0, 0, 0, 942, 107, 1, 0, 0, 0, 943, 947, 3, 106, 53, 0, 944, 947, 5, 200, 0, 0, 945, 947, 5, 116, 0, 0, 946, 943, 1, 0, 0, 0, 946, 944, 1, 0, 0, 0, 946, 945, 1, 0, 0, 0, 947, 109, 1, 0, 0, 0, 948, 949, 7, 13, 0, 0, 949, 111, 1, 0, 0, 0, 950, 951, 7, 14, 0, 0, 951, 113, 1, 0, 0, 0, 952, 953, 7, 15, 0, 0, 953, 115, 1, 0, 0, 0, 954, 957, 5, 195, 0, 0, 955, 957, 3, 114, 57, 0, 956, 954, 1, 0, 0, 0, 956, 955, 1, 0, 0, 0, 957, 117, 1, 0, 0, 0, 958, 962, 5, 195, 0, 0, 959, 962, 3, 110, 55, 0, 960, 962, 3, 112, 56, 0, 961, 958, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 960, 1, 0, 0, 0, 962, 119, 1, 0, 0, 0, 963, 964, 5, 200, 0, 0, 964, 965, 5, 212, 0, 0, 965, 966, 3, 106, 53, 0, 966, 121, 1, 0, 0, 0, 967, 968, 5, 218, 0, 0, 968, 969, 3, 118, 59, 0, 969, 970, 5, 234, 0, 0, 970, 123, 1, 0, 0, 0, 120, 127, 137, 146, 149, 153, 156, 160, 163, 166, 169, 172, 176, 180, 183, 186, 189, 193, 196, 205, 211, 232, 249, 266, 272, 278, 289, 291, 302, 305, 311, 319, 325, 327, 331, 336, 339, 342, 346, 350, 353, 355, 358, 362, 366, 369, 371, 373, 378, 389, 395, 402, 407, 411, 415, 421, 423, 430, 438, 441, 444, 463, 477, 493, 505, 517, 525, 529, 536, 542, 551, 555, 579, 596, 608, 618, 621, 625, 628, 641, 658, 662, 668, 675, 687, 691, 694, 703, 717, 744, 753, 755, 757, 765, 770, 778, 788, 791, 801, 812, 817, 824, 837, 844, 857, 863, 866, 873, 885, 891, 895, 901, 908, 917, 928, 930, 933, 941, 946, 956, 961] \ No newline at end of file diff --git a/posthog/hogql/grammar/HogQLParser.py b/posthog/hogql/grammar/HogQLParser.py index 6bf1053a93059..dff73f2d50927 100644 --- a/posthog/hogql/grammar/HogQLParser.py +++ b/posthog/hogql/grammar/HogQLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,242,979,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6, + 4,1,242,972,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6, 7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7, 13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2, 20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7, @@ -57,344 +57,342 @@ def serializedATN(): 36,5,36,535,8,36,10,36,12,36,538,9,36,1,37,1,37,1,37,3,37,543,8, 37,1,37,1,37,1,37,1,37,1,37,4,37,550,8,37,11,37,12,37,551,1,37,1, 37,3,37,556,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,587,8,37,1,37,1,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,604, - 8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,616, - 8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,626,8,37,1,37, - 3,37,629,8,37,1,37,1,37,3,37,633,8,37,1,37,3,37,636,8,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,649,8,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, - 37,1,37,3,37,666,8,37,1,37,1,37,3,37,670,8,37,1,37,1,37,1,37,1,37, - 3,37,676,8,37,1,37,1,37,1,37,1,37,1,37,3,37,683,8,37,1,37,1,37,1, - 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,695,8,37,1,37,1,37,3, - 37,699,8,37,1,37,3,37,702,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 3,37,711,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,3,37,725,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, + 37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3, + 37,580,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,3,37,597,8,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,1,37,1,37,3,37,609,8,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,37,3,37,619,8,37,1,37,3,37,622,8,37,1,37,1,37,3,37,626, + 8,37,1,37,3,37,629,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, + 1,37,1,37,1,37,3,37,642,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,659,8,37,1,37,1,37, + 3,37,663,8,37,1,37,1,37,1,37,1,37,3,37,669,8,37,1,37,1,37,1,37,1, + 37,1,37,3,37,676,8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,3,37,688,8,37,1,37,1,37,3,37,692,8,37,1,37,3,37,695,8,37, + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,704,8,37,1,37,1,37,1,37, + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,718,8,37,1,37, 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,37,3,37,752,8,37,1,37,1,37,1,37,1,37,1,37,1,37, - 1,37,3,37,761,8,37,5,37,763,8,37,10,37,12,37,766,9,37,1,38,1,38, - 1,38,5,38,771,8,38,10,38,12,38,774,9,38,1,39,1,39,3,39,778,8,39, - 1,40,1,40,1,40,1,40,5,40,784,8,40,10,40,12,40,787,9,40,1,40,1,40, - 1,40,1,40,1,40,5,40,794,8,40,10,40,12,40,797,9,40,3,40,799,8,40, - 1,40,1,40,1,40,1,41,1,41,1,41,5,41,807,8,41,10,41,12,41,810,9,41, - 1,41,1,41,1,41,1,41,1,41,1,41,5,41,818,8,41,10,41,12,41,821,9,41, - 1,41,1,41,3,41,825,8,41,1,41,1,41,1,41,1,41,1,41,3,41,832,8,41,1, - 42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,3,42,845,8, - 42,1,43,1,43,1,43,5,43,850,8,43,10,43,12,43,853,9,43,1,44,1,44,1, - 44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,3,44,865,8,44,1,45,1,45,1, - 45,1,45,3,45,871,8,45,1,45,3,45,874,8,45,1,46,1,46,1,46,5,46,879, - 8,46,10,46,12,46,882,9,46,1,47,1,47,1,47,1,47,1,47,1,47,1,47,1,47, - 1,47,3,47,893,8,47,1,47,1,47,1,47,1,47,3,47,899,8,47,5,47,901,8, - 47,10,47,12,47,904,9,47,1,48,1,48,1,48,3,48,909,8,48,1,48,1,48,1, - 49,1,49,1,49,3,49,916,8,49,1,49,1,49,1,50,1,50,1,50,5,50,923,8,50, - 10,50,12,50,926,9,50,1,51,1,51,1,52,1,52,1,52,1,52,1,52,1,52,3,52, - 936,8,52,3,52,938,8,52,1,53,3,53,941,8,53,1,53,1,53,1,53,1,53,1, - 53,1,53,3,53,949,8,53,1,54,1,54,1,54,3,54,954,8,54,1,55,1,55,1,56, - 1,56,1,57,1,57,1,58,1,58,3,58,964,8,58,1,59,1,59,1,59,3,59,969,8, - 59,1,60,1,60,1,60,1,60,1,61,1,61,1,61,1,61,1,61,0,3,36,74,94,62, - 0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44, - 46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,86,88, - 90,92,94,96,98,100,102,104,106,108,110,112,114,116,118,120,122,0, - 16,2,0,32,32,141,141,2,0,84,84,96,96,3,0,4,4,8,8,12,12,4,0,4,4,7, - 8,12,12,147,147,2,0,96,96,140,140,2,0,4,4,8,8,2,0,11,11,42,43,2, - 0,62,62,93,93,2,0,133,133,143,143,3,0,17,17,95,95,170,170,2,0,79, - 79,98,98,1,0,197,198,2,0,208,208,228,228,8,0,37,37,76,76,108,108, - 110,110,132,132,145,145,185,185,190,190,13,0,2,24,26,36,38,75,77, - 81,83,107,109,109,111,112,114,115,117,130,133,144,146,184,186,189, - 191,192,4,0,36,36,62,62,77,77,91,91,1107,0,127,1,0,0,0,2,131,1,0, - 0,0,4,146,1,0,0,0,6,149,1,0,0,0,8,198,1,0,0,0,10,201,1,0,0,0,12, - 207,1,0,0,0,14,211,1,0,0,0,16,217,1,0,0,0,18,235,1,0,0,0,20,238, - 1,0,0,0,22,241,1,0,0,0,24,251,1,0,0,0,26,254,1,0,0,0,28,258,1,0, - 0,0,30,291,1,0,0,0,32,293,1,0,0,0,34,296,1,0,0,0,36,311,1,0,0,0, - 38,373,1,0,0,0,40,378,1,0,0,0,42,389,1,0,0,0,44,391,1,0,0,0,46,397, - 1,0,0,0,48,405,1,0,0,0,50,423,1,0,0,0,52,425,1,0,0,0,54,433,1,0, - 0,0,56,438,1,0,0,0,58,446,1,0,0,0,60,450,1,0,0,0,62,454,1,0,0,0, - 64,463,1,0,0,0,66,477,1,0,0,0,68,479,1,0,0,0,70,529,1,0,0,0,72,531, - 1,0,0,0,74,669,1,0,0,0,76,767,1,0,0,0,78,777,1,0,0,0,80,798,1,0, - 0,0,82,831,1,0,0,0,84,844,1,0,0,0,86,846,1,0,0,0,88,864,1,0,0,0, - 90,873,1,0,0,0,92,875,1,0,0,0,94,892,1,0,0,0,96,905,1,0,0,0,98,915, - 1,0,0,0,100,919,1,0,0,0,102,927,1,0,0,0,104,937,1,0,0,0,106,940, - 1,0,0,0,108,953,1,0,0,0,110,955,1,0,0,0,112,957,1,0,0,0,114,959, - 1,0,0,0,116,963,1,0,0,0,118,968,1,0,0,0,120,970,1,0,0,0,122,974, - 1,0,0,0,124,128,3,2,1,0,125,128,3,6,3,0,126,128,3,82,41,0,127,124, - 1,0,0,0,127,125,1,0,0,0,127,126,1,0,0,0,128,129,1,0,0,0,129,130, - 5,0,0,1,130,1,1,0,0,0,131,137,3,4,2,0,132,133,5,176,0,0,133,134, - 5,4,0,0,134,136,3,4,2,0,135,132,1,0,0,0,136,139,1,0,0,0,137,135, - 1,0,0,0,137,138,1,0,0,0,138,3,1,0,0,0,139,137,1,0,0,0,140,147,3, - 6,3,0,141,142,5,220,0,0,142,143,3,2,1,0,143,144,5,236,0,0,144,147, - 1,0,0,0,145,147,3,122,61,0,146,140,1,0,0,0,146,141,1,0,0,0,146,145, - 1,0,0,0,147,5,1,0,0,0,148,150,3,8,4,0,149,148,1,0,0,0,149,150,1, - 0,0,0,150,151,1,0,0,0,151,153,5,146,0,0,152,154,5,49,0,0,153,152, - 1,0,0,0,153,154,1,0,0,0,154,156,1,0,0,0,155,157,3,10,5,0,156,155, - 1,0,0,0,156,157,1,0,0,0,157,158,1,0,0,0,158,160,3,72,36,0,159,161, - 3,12,6,0,160,159,1,0,0,0,160,161,1,0,0,0,161,163,1,0,0,0,162,164, - 3,14,7,0,163,162,1,0,0,0,163,164,1,0,0,0,164,166,1,0,0,0,165,167, - 3,18,9,0,166,165,1,0,0,0,166,167,1,0,0,0,167,169,1,0,0,0,168,170, - 3,20,10,0,169,168,1,0,0,0,169,170,1,0,0,0,170,172,1,0,0,0,171,173, - 3,22,11,0,172,171,1,0,0,0,172,173,1,0,0,0,173,176,1,0,0,0,174,175, - 5,189,0,0,175,177,7,0,0,0,176,174,1,0,0,0,176,177,1,0,0,0,177,180, - 1,0,0,0,178,179,5,189,0,0,179,181,5,169,0,0,180,178,1,0,0,0,180, - 181,1,0,0,0,181,183,1,0,0,0,182,184,3,24,12,0,183,182,1,0,0,0,183, - 184,1,0,0,0,184,186,1,0,0,0,185,187,3,16,8,0,186,185,1,0,0,0,186, - 187,1,0,0,0,187,189,1,0,0,0,188,190,3,26,13,0,189,188,1,0,0,0,189, - 190,1,0,0,0,190,193,1,0,0,0,191,194,3,30,15,0,192,194,3,32,16,0, - 193,191,1,0,0,0,193,192,1,0,0,0,193,194,1,0,0,0,194,196,1,0,0,0, - 195,197,3,34,17,0,196,195,1,0,0,0,196,197,1,0,0,0,197,7,1,0,0,0, - 198,199,5,189,0,0,199,200,3,86,43,0,200,9,1,0,0,0,201,202,5,168, - 0,0,202,205,5,198,0,0,203,204,5,189,0,0,204,206,5,164,0,0,205,203, - 1,0,0,0,205,206,1,0,0,0,206,11,1,0,0,0,207,208,5,68,0,0,208,209, - 3,36,18,0,209,13,1,0,0,0,210,212,7,1,0,0,211,210,1,0,0,0,211,212, - 1,0,0,0,212,213,1,0,0,0,213,214,5,9,0,0,214,215,5,90,0,0,215,216, - 3,72,36,0,216,15,1,0,0,0,217,218,5,188,0,0,218,219,3,118,59,0,219, - 220,5,10,0,0,220,221,5,220,0,0,221,222,3,56,28,0,222,232,5,236,0, - 0,223,224,5,206,0,0,224,225,3,118,59,0,225,226,5,10,0,0,226,227, - 5,220,0,0,227,228,3,56,28,0,228,229,5,236,0,0,229,231,1,0,0,0,230, - 223,1,0,0,0,231,234,1,0,0,0,232,230,1,0,0,0,232,233,1,0,0,0,233, - 17,1,0,0,0,234,232,1,0,0,0,235,236,5,129,0,0,236,237,3,74,37,0,237, - 19,1,0,0,0,238,239,5,187,0,0,239,240,3,74,37,0,240,21,1,0,0,0,241, - 242,5,73,0,0,242,249,5,18,0,0,243,244,7,0,0,0,244,245,5,220,0,0, - 245,246,3,72,36,0,246,247,5,236,0,0,247,250,1,0,0,0,248,250,3,72, - 36,0,249,243,1,0,0,0,249,248,1,0,0,0,250,23,1,0,0,0,251,252,5,74, - 0,0,252,253,3,74,37,0,253,25,1,0,0,0,254,255,5,122,0,0,255,256,5, - 18,0,0,256,257,3,46,23,0,257,27,1,0,0,0,258,259,5,122,0,0,259,260, - 5,18,0,0,260,261,3,72,36,0,261,29,1,0,0,0,262,263,5,99,0,0,263,266, - 3,74,37,0,264,265,5,206,0,0,265,267,3,74,37,0,266,264,1,0,0,0,266, - 267,1,0,0,0,267,272,1,0,0,0,268,269,5,189,0,0,269,273,5,164,0,0, - 270,271,5,18,0,0,271,273,3,72,36,0,272,268,1,0,0,0,272,270,1,0,0, - 0,272,273,1,0,0,0,273,292,1,0,0,0,274,275,5,99,0,0,275,278,3,74, - 37,0,276,277,5,189,0,0,277,279,5,164,0,0,278,276,1,0,0,0,278,279, - 1,0,0,0,279,280,1,0,0,0,280,281,5,118,0,0,281,282,3,74,37,0,282, - 292,1,0,0,0,283,284,5,99,0,0,284,285,3,74,37,0,285,286,5,118,0,0, - 286,289,3,74,37,0,287,288,5,18,0,0,288,290,3,72,36,0,289,287,1,0, - 0,0,289,290,1,0,0,0,290,292,1,0,0,0,291,262,1,0,0,0,291,274,1,0, - 0,0,291,283,1,0,0,0,292,31,1,0,0,0,293,294,5,118,0,0,294,295,3,74, - 37,0,295,33,1,0,0,0,296,297,5,150,0,0,297,298,3,52,26,0,298,35,1, - 0,0,0,299,300,6,18,-1,0,300,302,3,94,47,0,301,303,5,61,0,0,302,301, - 1,0,0,0,302,303,1,0,0,0,303,305,1,0,0,0,304,306,3,44,22,0,305,304, - 1,0,0,0,305,306,1,0,0,0,306,312,1,0,0,0,307,308,5,220,0,0,308,309, - 3,36,18,0,309,310,5,236,0,0,310,312,1,0,0,0,311,299,1,0,0,0,311, - 307,1,0,0,0,312,327,1,0,0,0,313,314,10,3,0,0,314,315,3,40,20,0,315, - 316,3,36,18,4,316,326,1,0,0,0,317,319,10,4,0,0,318,320,3,38,19,0, - 319,318,1,0,0,0,319,320,1,0,0,0,320,321,1,0,0,0,321,322,5,90,0,0, - 322,323,3,36,18,0,323,324,3,42,21,0,324,326,1,0,0,0,325,313,1,0, - 0,0,325,317,1,0,0,0,326,329,1,0,0,0,327,325,1,0,0,0,327,328,1,0, - 0,0,328,37,1,0,0,0,329,327,1,0,0,0,330,332,7,2,0,0,331,330,1,0,0, - 0,331,332,1,0,0,0,332,333,1,0,0,0,333,340,5,84,0,0,334,336,5,84, - 0,0,335,337,7,2,0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,340,1,0, - 0,0,338,340,7,2,0,0,339,331,1,0,0,0,339,334,1,0,0,0,339,338,1,0, - 0,0,340,374,1,0,0,0,341,343,7,3,0,0,342,341,1,0,0,0,342,343,1,0, - 0,0,343,344,1,0,0,0,344,346,7,4,0,0,345,347,5,123,0,0,346,345,1, - 0,0,0,346,347,1,0,0,0,347,356,1,0,0,0,348,350,7,4,0,0,349,351,5, - 123,0,0,350,349,1,0,0,0,350,351,1,0,0,0,351,353,1,0,0,0,352,354, - 7,3,0,0,353,352,1,0,0,0,353,354,1,0,0,0,354,356,1,0,0,0,355,342, - 1,0,0,0,355,348,1,0,0,0,356,374,1,0,0,0,357,359,7,5,0,0,358,357, - 1,0,0,0,358,359,1,0,0,0,359,360,1,0,0,0,360,362,5,69,0,0,361,363, - 5,123,0,0,362,361,1,0,0,0,362,363,1,0,0,0,363,372,1,0,0,0,364,366, - 5,69,0,0,365,367,5,123,0,0,366,365,1,0,0,0,366,367,1,0,0,0,367,369, - 1,0,0,0,368,370,7,5,0,0,369,368,1,0,0,0,369,370,1,0,0,0,370,372, - 1,0,0,0,371,358,1,0,0,0,371,364,1,0,0,0,372,374,1,0,0,0,373,339, - 1,0,0,0,373,355,1,0,0,0,373,371,1,0,0,0,374,39,1,0,0,0,375,376,5, - 31,0,0,376,379,5,90,0,0,377,379,5,206,0,0,378,375,1,0,0,0,378,377, - 1,0,0,0,379,41,1,0,0,0,380,381,5,119,0,0,381,390,3,72,36,0,382,383, - 5,179,0,0,383,384,5,220,0,0,384,385,3,72,36,0,385,386,5,236,0,0, - 386,390,1,0,0,0,387,388,5,179,0,0,388,390,3,72,36,0,389,380,1,0, - 0,0,389,382,1,0,0,0,389,387,1,0,0,0,390,43,1,0,0,0,391,392,5,144, - 0,0,392,395,3,50,25,0,393,394,5,118,0,0,394,396,3,50,25,0,395,393, - 1,0,0,0,395,396,1,0,0,0,396,45,1,0,0,0,397,402,3,48,24,0,398,399, - 5,206,0,0,399,401,3,48,24,0,400,398,1,0,0,0,401,404,1,0,0,0,402, - 400,1,0,0,0,402,403,1,0,0,0,403,47,1,0,0,0,404,402,1,0,0,0,405,407, - 3,74,37,0,406,408,7,6,0,0,407,406,1,0,0,0,407,408,1,0,0,0,408,411, - 1,0,0,0,409,410,5,117,0,0,410,412,7,7,0,0,411,409,1,0,0,0,411,412, - 1,0,0,0,412,415,1,0,0,0,413,414,5,26,0,0,414,416,5,200,0,0,415,413, - 1,0,0,0,415,416,1,0,0,0,416,49,1,0,0,0,417,424,3,122,61,0,418,421, - 3,106,53,0,419,420,5,238,0,0,420,422,3,106,53,0,421,419,1,0,0,0, - 421,422,1,0,0,0,422,424,1,0,0,0,423,417,1,0,0,0,423,418,1,0,0,0, - 424,51,1,0,0,0,425,430,3,54,27,0,426,427,5,206,0,0,427,429,3,54, - 27,0,428,426,1,0,0,0,429,432,1,0,0,0,430,428,1,0,0,0,430,431,1,0, - 0,0,431,53,1,0,0,0,432,430,1,0,0,0,433,434,3,118,59,0,434,435,5, - 212,0,0,435,436,3,108,54,0,436,55,1,0,0,0,437,439,3,58,29,0,438, - 437,1,0,0,0,438,439,1,0,0,0,439,441,1,0,0,0,440,442,3,60,30,0,441, - 440,1,0,0,0,441,442,1,0,0,0,442,444,1,0,0,0,443,445,3,62,31,0,444, - 443,1,0,0,0,444,445,1,0,0,0,445,57,1,0,0,0,446,447,5,126,0,0,447, - 448,5,18,0,0,448,449,3,72,36,0,449,59,1,0,0,0,450,451,5,122,0,0, - 451,452,5,18,0,0,452,453,3,46,23,0,453,61,1,0,0,0,454,455,7,8,0, - 0,455,456,3,64,32,0,456,63,1,0,0,0,457,464,3,66,33,0,458,459,5,16, - 0,0,459,460,3,66,33,0,460,461,5,6,0,0,461,462,3,66,33,0,462,464, - 1,0,0,0,463,457,1,0,0,0,463,458,1,0,0,0,464,65,1,0,0,0,465,466,5, - 33,0,0,466,478,5,142,0,0,467,468,5,175,0,0,468,478,5,128,0,0,469, - 470,5,175,0,0,470,478,5,64,0,0,471,472,3,106,53,0,472,473,5,128, - 0,0,473,478,1,0,0,0,474,475,3,106,53,0,475,476,5,64,0,0,476,478, - 1,0,0,0,477,465,1,0,0,0,477,467,1,0,0,0,477,469,1,0,0,0,477,471, - 1,0,0,0,477,474,1,0,0,0,478,67,1,0,0,0,479,480,3,74,37,0,480,481, - 5,0,0,1,481,69,1,0,0,0,482,530,3,118,59,0,483,484,3,118,59,0,484, - 485,5,220,0,0,485,486,3,118,59,0,486,493,3,70,35,0,487,488,5,206, - 0,0,488,489,3,118,59,0,489,490,3,70,35,0,490,492,1,0,0,0,491,487, - 1,0,0,0,492,495,1,0,0,0,493,491,1,0,0,0,493,494,1,0,0,0,494,496, - 1,0,0,0,495,493,1,0,0,0,496,497,5,236,0,0,497,530,1,0,0,0,498,499, - 3,118,59,0,499,500,5,220,0,0,500,505,3,120,60,0,501,502,5,206,0, - 0,502,504,3,120,60,0,503,501,1,0,0,0,504,507,1,0,0,0,505,503,1,0, - 0,0,505,506,1,0,0,0,506,508,1,0,0,0,507,505,1,0,0,0,508,509,5,236, - 0,0,509,530,1,0,0,0,510,511,3,118,59,0,511,512,5,220,0,0,512,517, - 3,70,35,0,513,514,5,206,0,0,514,516,3,70,35,0,515,513,1,0,0,0,516, - 519,1,0,0,0,517,515,1,0,0,0,517,518,1,0,0,0,518,520,1,0,0,0,519, - 517,1,0,0,0,520,521,5,236,0,0,521,530,1,0,0,0,522,523,3,118,59,0, - 523,525,5,220,0,0,524,526,3,72,36,0,525,524,1,0,0,0,525,526,1,0, - 0,0,526,527,1,0,0,0,527,528,5,236,0,0,528,530,1,0,0,0,529,482,1, - 0,0,0,529,483,1,0,0,0,529,498,1,0,0,0,529,510,1,0,0,0,529,522,1, - 0,0,0,530,71,1,0,0,0,531,536,3,74,37,0,532,533,5,206,0,0,533,535, - 3,74,37,0,534,532,1,0,0,0,535,538,1,0,0,0,536,534,1,0,0,0,536,537, - 1,0,0,0,537,73,1,0,0,0,538,536,1,0,0,0,539,540,6,37,-1,0,540,542, - 5,19,0,0,541,543,3,74,37,0,542,541,1,0,0,0,542,543,1,0,0,0,543,549, - 1,0,0,0,544,545,5,186,0,0,545,546,3,74,37,0,546,547,5,163,0,0,547, - 548,3,74,37,0,548,550,1,0,0,0,549,544,1,0,0,0,550,551,1,0,0,0,551, - 549,1,0,0,0,551,552,1,0,0,0,552,555,1,0,0,0,553,554,5,52,0,0,554, - 556,3,74,37,0,555,553,1,0,0,0,555,556,1,0,0,0,556,557,1,0,0,0,557, - 558,5,53,0,0,558,670,1,0,0,0,559,560,5,20,0,0,560,561,5,220,0,0, - 561,562,3,74,37,0,562,563,5,10,0,0,563,564,3,70,35,0,564,565,5,236, - 0,0,565,670,1,0,0,0,566,567,5,36,0,0,567,670,5,200,0,0,568,569,5, - 59,0,0,569,570,5,220,0,0,570,571,3,110,55,0,571,572,5,68,0,0,572, - 573,3,74,37,0,573,574,5,236,0,0,574,670,1,0,0,0,575,576,5,86,0,0, - 576,577,3,74,37,0,577,578,3,110,55,0,578,670,1,0,0,0,579,580,5,155, - 0,0,580,581,5,220,0,0,581,582,3,74,37,0,582,583,5,68,0,0,583,586, - 3,74,37,0,584,585,5,65,0,0,585,587,3,74,37,0,586,584,1,0,0,0,586, - 587,1,0,0,0,587,588,1,0,0,0,588,589,5,236,0,0,589,670,1,0,0,0,590, - 591,5,166,0,0,591,670,5,200,0,0,592,593,5,171,0,0,593,594,5,220, - 0,0,594,595,7,9,0,0,595,596,5,200,0,0,596,597,5,68,0,0,597,598,3, - 74,37,0,598,599,5,236,0,0,599,670,1,0,0,0,600,601,3,118,59,0,601, - 603,5,220,0,0,602,604,3,72,36,0,603,602,1,0,0,0,603,604,1,0,0,0, - 604,605,1,0,0,0,605,606,5,236,0,0,606,607,1,0,0,0,607,608,5,125, - 0,0,608,609,5,220,0,0,609,610,3,56,28,0,610,611,5,236,0,0,611,670, - 1,0,0,0,612,613,3,118,59,0,613,615,5,220,0,0,614,616,3,72,36,0,615, - 614,1,0,0,0,615,616,1,0,0,0,616,617,1,0,0,0,617,618,5,236,0,0,618, - 619,1,0,0,0,619,620,5,125,0,0,620,621,3,118,59,0,621,670,1,0,0,0, - 622,628,3,118,59,0,623,625,5,220,0,0,624,626,3,72,36,0,625,624,1, - 0,0,0,625,626,1,0,0,0,626,627,1,0,0,0,627,629,5,236,0,0,628,623, - 1,0,0,0,628,629,1,0,0,0,629,630,1,0,0,0,630,632,5,220,0,0,631,633, - 5,49,0,0,632,631,1,0,0,0,632,633,1,0,0,0,633,635,1,0,0,0,634,636, - 3,76,38,0,635,634,1,0,0,0,635,636,1,0,0,0,636,637,1,0,0,0,637,638, - 5,236,0,0,638,670,1,0,0,0,639,670,3,82,41,0,640,670,3,108,54,0,641, - 642,5,208,0,0,642,670,3,74,37,18,643,644,5,115,0,0,644,670,3,74, - 37,12,645,646,3,98,49,0,646,647,5,210,0,0,647,649,1,0,0,0,648,645, - 1,0,0,0,648,649,1,0,0,0,649,650,1,0,0,0,650,670,5,202,0,0,651,652, - 5,220,0,0,652,653,3,2,1,0,653,654,5,236,0,0,654,670,1,0,0,0,655, - 656,5,220,0,0,656,657,3,74,37,0,657,658,5,236,0,0,658,670,1,0,0, - 0,659,660,5,220,0,0,660,661,3,72,36,0,661,662,5,236,0,0,662,670, - 1,0,0,0,663,665,5,219,0,0,664,666,3,72,36,0,665,664,1,0,0,0,665, - 666,1,0,0,0,666,667,1,0,0,0,667,670,5,235,0,0,668,670,3,90,45,0, - 669,539,1,0,0,0,669,559,1,0,0,0,669,566,1,0,0,0,669,568,1,0,0,0, - 669,575,1,0,0,0,669,579,1,0,0,0,669,590,1,0,0,0,669,592,1,0,0,0, - 669,600,1,0,0,0,669,612,1,0,0,0,669,622,1,0,0,0,669,639,1,0,0,0, - 669,640,1,0,0,0,669,641,1,0,0,0,669,643,1,0,0,0,669,648,1,0,0,0, - 669,651,1,0,0,0,669,655,1,0,0,0,669,659,1,0,0,0,669,663,1,0,0,0, - 669,668,1,0,0,0,670,764,1,0,0,0,671,675,10,17,0,0,672,676,5,202, - 0,0,673,676,5,238,0,0,674,676,5,227,0,0,675,672,1,0,0,0,675,673, - 1,0,0,0,675,674,1,0,0,0,676,677,1,0,0,0,677,763,3,74,37,18,678,682, - 10,16,0,0,679,683,5,228,0,0,680,683,5,208,0,0,681,683,5,207,0,0, - 682,679,1,0,0,0,682,680,1,0,0,0,682,681,1,0,0,0,683,684,1,0,0,0, - 684,763,3,74,37,17,685,710,10,15,0,0,686,711,5,211,0,0,687,711,5, - 212,0,0,688,711,5,223,0,0,689,711,5,221,0,0,690,711,5,222,0,0,691, - 711,5,213,0,0,692,711,5,214,0,0,693,695,5,115,0,0,694,693,1,0,0, - 0,694,695,1,0,0,0,695,696,1,0,0,0,696,698,5,80,0,0,697,699,5,25, - 0,0,698,697,1,0,0,0,698,699,1,0,0,0,699,711,1,0,0,0,700,702,5,115, - 0,0,701,700,1,0,0,0,701,702,1,0,0,0,702,703,1,0,0,0,703,711,7,10, - 0,0,704,711,5,232,0,0,705,711,5,233,0,0,706,711,5,225,0,0,707,711, - 5,216,0,0,708,711,5,217,0,0,709,711,5,224,0,0,710,686,1,0,0,0,710, - 687,1,0,0,0,710,688,1,0,0,0,710,689,1,0,0,0,710,690,1,0,0,0,710, - 691,1,0,0,0,710,692,1,0,0,0,710,694,1,0,0,0,710,701,1,0,0,0,710, - 704,1,0,0,0,710,705,1,0,0,0,710,706,1,0,0,0,710,707,1,0,0,0,710, - 708,1,0,0,0,710,709,1,0,0,0,711,712,1,0,0,0,712,763,3,74,37,16,713, - 714,10,13,0,0,714,715,5,226,0,0,715,763,3,74,37,14,716,717,10,11, - 0,0,717,718,5,6,0,0,718,763,3,74,37,12,719,720,10,10,0,0,720,721, - 5,121,0,0,721,763,3,74,37,11,722,724,10,9,0,0,723,725,5,115,0,0, - 724,723,1,0,0,0,724,725,1,0,0,0,725,726,1,0,0,0,726,727,5,16,0,0, - 727,728,3,74,37,0,728,729,5,6,0,0,729,730,3,74,37,10,730,763,1,0, - 0,0,731,732,10,8,0,0,732,733,5,229,0,0,733,734,3,74,37,0,734,735, - 5,205,0,0,735,736,3,74,37,8,736,763,1,0,0,0,737,738,10,21,0,0,738, - 739,5,219,0,0,739,740,3,74,37,0,740,741,5,235,0,0,741,763,1,0,0, - 0,742,743,10,20,0,0,743,744,5,210,0,0,744,763,5,198,0,0,745,746, - 10,19,0,0,746,747,5,210,0,0,747,763,3,118,59,0,748,749,10,14,0,0, - 749,751,5,88,0,0,750,752,5,115,0,0,751,750,1,0,0,0,751,752,1,0,0, - 0,752,753,1,0,0,0,753,763,5,116,0,0,754,760,10,7,0,0,755,761,3,116, - 58,0,756,757,5,10,0,0,757,761,3,118,59,0,758,759,5,10,0,0,759,761, - 5,200,0,0,760,755,1,0,0,0,760,756,1,0,0,0,760,758,1,0,0,0,761,763, - 1,0,0,0,762,671,1,0,0,0,762,678,1,0,0,0,762,685,1,0,0,0,762,713, - 1,0,0,0,762,716,1,0,0,0,762,719,1,0,0,0,762,722,1,0,0,0,762,731, - 1,0,0,0,762,737,1,0,0,0,762,742,1,0,0,0,762,745,1,0,0,0,762,748, - 1,0,0,0,762,754,1,0,0,0,763,766,1,0,0,0,764,762,1,0,0,0,764,765, - 1,0,0,0,765,75,1,0,0,0,766,764,1,0,0,0,767,772,3,78,39,0,768,769, - 5,206,0,0,769,771,3,78,39,0,770,768,1,0,0,0,771,774,1,0,0,0,772, - 770,1,0,0,0,772,773,1,0,0,0,773,77,1,0,0,0,774,772,1,0,0,0,775,778, - 3,80,40,0,776,778,3,74,37,0,777,775,1,0,0,0,777,776,1,0,0,0,778, - 79,1,0,0,0,779,780,5,220,0,0,780,785,3,118,59,0,781,782,5,206,0, - 0,782,784,3,118,59,0,783,781,1,0,0,0,784,787,1,0,0,0,785,783,1,0, - 0,0,785,786,1,0,0,0,786,788,1,0,0,0,787,785,1,0,0,0,788,789,5,236, - 0,0,789,799,1,0,0,0,790,795,3,118,59,0,791,792,5,206,0,0,792,794, - 3,118,59,0,793,791,1,0,0,0,794,797,1,0,0,0,795,793,1,0,0,0,795,796, - 1,0,0,0,796,799,1,0,0,0,797,795,1,0,0,0,798,779,1,0,0,0,798,790, - 1,0,0,0,799,800,1,0,0,0,800,801,5,201,0,0,801,802,3,74,37,0,802, - 81,1,0,0,0,803,804,5,222,0,0,804,808,3,118,59,0,805,807,3,84,42, - 0,806,805,1,0,0,0,807,810,1,0,0,0,808,806,1,0,0,0,808,809,1,0,0, - 0,809,811,1,0,0,0,810,808,1,0,0,0,811,812,5,238,0,0,812,813,5,214, - 0,0,813,832,1,0,0,0,814,815,5,222,0,0,815,819,3,118,59,0,816,818, - 3,84,42,0,817,816,1,0,0,0,818,821,1,0,0,0,819,817,1,0,0,0,819,820, - 1,0,0,0,820,822,1,0,0,0,821,819,1,0,0,0,822,824,5,214,0,0,823,825, - 3,82,41,0,824,823,1,0,0,0,824,825,1,0,0,0,825,826,1,0,0,0,826,827, - 5,222,0,0,827,828,5,238,0,0,828,829,3,118,59,0,829,830,5,214,0,0, - 830,832,1,0,0,0,831,803,1,0,0,0,831,814,1,0,0,0,832,83,1,0,0,0,833, - 834,3,118,59,0,834,835,5,212,0,0,835,836,5,200,0,0,836,845,1,0,0, - 0,837,838,3,118,59,0,838,839,5,212,0,0,839,840,5,218,0,0,840,841, - 3,74,37,0,841,842,5,234,0,0,842,845,1,0,0,0,843,845,3,118,59,0,844, - 833,1,0,0,0,844,837,1,0,0,0,844,843,1,0,0,0,845,85,1,0,0,0,846,851, - 3,88,44,0,847,848,5,206,0,0,848,850,3,88,44,0,849,847,1,0,0,0,850, - 853,1,0,0,0,851,849,1,0,0,0,851,852,1,0,0,0,852,87,1,0,0,0,853,851, - 1,0,0,0,854,855,3,118,59,0,855,856,5,10,0,0,856,857,5,220,0,0,857, - 858,3,2,1,0,858,859,5,236,0,0,859,865,1,0,0,0,860,861,3,74,37,0, - 861,862,5,10,0,0,862,863,3,118,59,0,863,865,1,0,0,0,864,854,1,0, - 0,0,864,860,1,0,0,0,865,89,1,0,0,0,866,874,3,122,61,0,867,868,3, - 98,49,0,868,869,5,210,0,0,869,871,1,0,0,0,870,867,1,0,0,0,870,871, - 1,0,0,0,871,872,1,0,0,0,872,874,3,92,46,0,873,866,1,0,0,0,873,870, - 1,0,0,0,874,91,1,0,0,0,875,880,3,118,59,0,876,877,5,210,0,0,877, - 879,3,118,59,0,878,876,1,0,0,0,879,882,1,0,0,0,880,878,1,0,0,0,880, - 881,1,0,0,0,881,93,1,0,0,0,882,880,1,0,0,0,883,884,6,47,-1,0,884, - 893,3,98,49,0,885,893,3,96,48,0,886,887,5,220,0,0,887,888,3,2,1, - 0,888,889,5,236,0,0,889,893,1,0,0,0,890,893,3,82,41,0,891,893,3, - 122,61,0,892,883,1,0,0,0,892,885,1,0,0,0,892,886,1,0,0,0,892,890, - 1,0,0,0,892,891,1,0,0,0,893,902,1,0,0,0,894,898,10,3,0,0,895,899, - 3,116,58,0,896,897,5,10,0,0,897,899,3,118,59,0,898,895,1,0,0,0,898, - 896,1,0,0,0,899,901,1,0,0,0,900,894,1,0,0,0,901,904,1,0,0,0,902, - 900,1,0,0,0,902,903,1,0,0,0,903,95,1,0,0,0,904,902,1,0,0,0,905,906, - 3,118,59,0,906,908,5,220,0,0,907,909,3,100,50,0,908,907,1,0,0,0, - 908,909,1,0,0,0,909,910,1,0,0,0,910,911,5,236,0,0,911,97,1,0,0,0, - 912,913,3,102,51,0,913,914,5,210,0,0,914,916,1,0,0,0,915,912,1,0, - 0,0,915,916,1,0,0,0,916,917,1,0,0,0,917,918,3,118,59,0,918,99,1, - 0,0,0,919,924,3,74,37,0,920,921,5,206,0,0,921,923,3,74,37,0,922, - 920,1,0,0,0,923,926,1,0,0,0,924,922,1,0,0,0,924,925,1,0,0,0,925, - 101,1,0,0,0,926,924,1,0,0,0,927,928,3,118,59,0,928,103,1,0,0,0,929, - 938,5,196,0,0,930,931,5,210,0,0,931,938,7,11,0,0,932,933,5,198,0, - 0,933,935,5,210,0,0,934,936,7,11,0,0,935,934,1,0,0,0,935,936,1,0, - 0,0,936,938,1,0,0,0,937,929,1,0,0,0,937,930,1,0,0,0,937,932,1,0, - 0,0,938,105,1,0,0,0,939,941,7,12,0,0,940,939,1,0,0,0,940,941,1,0, - 0,0,941,948,1,0,0,0,942,949,3,104,52,0,943,949,5,197,0,0,944,949, - 5,198,0,0,945,949,5,199,0,0,946,949,5,82,0,0,947,949,5,113,0,0,948, - 942,1,0,0,0,948,943,1,0,0,0,948,944,1,0,0,0,948,945,1,0,0,0,948, - 946,1,0,0,0,948,947,1,0,0,0,949,107,1,0,0,0,950,954,3,106,53,0,951, - 954,5,200,0,0,952,954,5,116,0,0,953,950,1,0,0,0,953,951,1,0,0,0, - 953,952,1,0,0,0,954,109,1,0,0,0,955,956,7,13,0,0,956,111,1,0,0,0, - 957,958,7,14,0,0,958,113,1,0,0,0,959,960,7,15,0,0,960,115,1,0,0, - 0,961,964,5,195,0,0,962,964,3,114,57,0,963,961,1,0,0,0,963,962,1, - 0,0,0,964,117,1,0,0,0,965,969,5,195,0,0,966,969,3,110,55,0,967,969, - 3,112,56,0,968,965,1,0,0,0,968,966,1,0,0,0,968,967,1,0,0,0,969,119, - 1,0,0,0,970,971,5,200,0,0,971,972,5,212,0,0,972,973,3,106,53,0,973, - 121,1,0,0,0,974,975,5,218,0,0,975,976,3,118,59,0,976,977,5,234,0, - 0,977,123,1,0,0,0,120,127,137,146,149,153,156,160,163,166,169,172, - 176,180,183,186,189,193,196,205,211,232,249,266,272,278,289,291, - 302,305,311,319,325,327,331,336,339,342,346,350,353,355,358,362, - 366,369,371,373,378,389,395,402,407,411,415,421,423,430,438,441, - 444,463,477,493,505,517,525,529,536,542,551,555,586,603,615,625, - 628,632,635,648,665,669,675,682,694,698,701,710,724,751,760,762, - 764,772,777,785,795,798,808,819,824,831,844,851,864,870,873,880, - 892,898,902,908,915,924,935,937,940,948,953,963,968 + 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,745, + 8,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,3,37,754,8,37,5,37,756,8, + 37,10,37,12,37,759,9,37,1,38,1,38,1,38,5,38,764,8,38,10,38,12,38, + 767,9,38,1,39,1,39,3,39,771,8,39,1,40,1,40,1,40,1,40,5,40,777,8, + 40,10,40,12,40,780,9,40,1,40,1,40,1,40,1,40,1,40,5,40,787,8,40,10, + 40,12,40,790,9,40,3,40,792,8,40,1,40,1,40,1,40,1,41,1,41,1,41,5, + 41,800,8,41,10,41,12,41,803,9,41,1,41,1,41,1,41,1,41,1,41,1,41,5, + 41,811,8,41,10,41,12,41,814,9,41,1,41,1,41,3,41,818,8,41,1,41,1, + 41,1,41,1,41,1,41,3,41,825,8,41,1,42,1,42,1,42,1,42,1,42,1,42,1, + 42,1,42,1,42,1,42,1,42,3,42,838,8,42,1,43,1,43,1,43,5,43,843,8,43, + 10,43,12,43,846,9,43,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44, + 1,44,3,44,858,8,44,1,45,1,45,1,45,1,45,3,45,864,8,45,1,45,3,45,867, + 8,45,1,46,1,46,1,46,5,46,872,8,46,10,46,12,46,875,9,46,1,47,1,47, + 1,47,1,47,1,47,1,47,1,47,1,47,1,47,3,47,886,8,47,1,47,1,47,1,47, + 1,47,3,47,892,8,47,5,47,894,8,47,10,47,12,47,897,9,47,1,48,1,48, + 1,48,3,48,902,8,48,1,48,1,48,1,49,1,49,1,49,3,49,909,8,49,1,49,1, + 49,1,50,1,50,1,50,5,50,916,8,50,10,50,12,50,919,9,50,1,51,1,51,1, + 52,1,52,1,52,1,52,1,52,1,52,3,52,929,8,52,3,52,931,8,52,1,53,3,53, + 934,8,53,1,53,1,53,1,53,1,53,1,53,1,53,3,53,942,8,53,1,54,1,54,1, + 54,3,54,947,8,54,1,55,1,55,1,56,1,56,1,57,1,57,1,58,1,58,3,58,957, + 8,58,1,59,1,59,1,59,3,59,962,8,59,1,60,1,60,1,60,1,60,1,61,1,61, + 1,61,1,61,1,61,0,3,36,74,94,62,0,2,4,6,8,10,12,14,16,18,20,22,24, + 26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68, + 70,72,74,76,78,80,82,84,86,88,90,92,94,96,98,100,102,104,106,108, + 110,112,114,116,118,120,122,0,16,2,0,32,32,141,141,2,0,84,84,96, + 96,3,0,4,4,8,8,12,12,4,0,4,4,7,8,12,12,147,147,2,0,96,96,140,140, + 2,0,4,4,8,8,2,0,11,11,42,43,2,0,62,62,93,93,2,0,133,133,143,143, + 3,0,17,17,95,95,170,170,2,0,79,79,98,98,1,0,197,198,2,0,208,208, + 228,228,8,0,37,37,76,76,108,108,110,110,132,132,145,145,185,185, + 190,190,13,0,2,24,26,36,38,75,77,81,83,107,109,109,111,112,114,115, + 117,130,133,144,146,184,186,189,191,192,4,0,36,36,62,62,77,77,91, + 91,1099,0,127,1,0,0,0,2,131,1,0,0,0,4,146,1,0,0,0,6,149,1,0,0,0, + 8,198,1,0,0,0,10,201,1,0,0,0,12,207,1,0,0,0,14,211,1,0,0,0,16,217, + 1,0,0,0,18,235,1,0,0,0,20,238,1,0,0,0,22,241,1,0,0,0,24,251,1,0, + 0,0,26,254,1,0,0,0,28,258,1,0,0,0,30,291,1,0,0,0,32,293,1,0,0,0, + 34,296,1,0,0,0,36,311,1,0,0,0,38,373,1,0,0,0,40,378,1,0,0,0,42,389, + 1,0,0,0,44,391,1,0,0,0,46,397,1,0,0,0,48,405,1,0,0,0,50,423,1,0, + 0,0,52,425,1,0,0,0,54,433,1,0,0,0,56,438,1,0,0,0,58,446,1,0,0,0, + 60,450,1,0,0,0,62,454,1,0,0,0,64,463,1,0,0,0,66,477,1,0,0,0,68,479, + 1,0,0,0,70,529,1,0,0,0,72,531,1,0,0,0,74,662,1,0,0,0,76,760,1,0, + 0,0,78,770,1,0,0,0,80,791,1,0,0,0,82,824,1,0,0,0,84,837,1,0,0,0, + 86,839,1,0,0,0,88,857,1,0,0,0,90,866,1,0,0,0,92,868,1,0,0,0,94,885, + 1,0,0,0,96,898,1,0,0,0,98,908,1,0,0,0,100,912,1,0,0,0,102,920,1, + 0,0,0,104,930,1,0,0,0,106,933,1,0,0,0,108,946,1,0,0,0,110,948,1, + 0,0,0,112,950,1,0,0,0,114,952,1,0,0,0,116,956,1,0,0,0,118,961,1, + 0,0,0,120,963,1,0,0,0,122,967,1,0,0,0,124,128,3,2,1,0,125,128,3, + 6,3,0,126,128,3,82,41,0,127,124,1,0,0,0,127,125,1,0,0,0,127,126, + 1,0,0,0,128,129,1,0,0,0,129,130,5,0,0,1,130,1,1,0,0,0,131,137,3, + 4,2,0,132,133,5,176,0,0,133,134,5,4,0,0,134,136,3,4,2,0,135,132, + 1,0,0,0,136,139,1,0,0,0,137,135,1,0,0,0,137,138,1,0,0,0,138,3,1, + 0,0,0,139,137,1,0,0,0,140,147,3,6,3,0,141,142,5,220,0,0,142,143, + 3,2,1,0,143,144,5,236,0,0,144,147,1,0,0,0,145,147,3,122,61,0,146, + 140,1,0,0,0,146,141,1,0,0,0,146,145,1,0,0,0,147,5,1,0,0,0,148,150, + 3,8,4,0,149,148,1,0,0,0,149,150,1,0,0,0,150,151,1,0,0,0,151,153, + 5,146,0,0,152,154,5,49,0,0,153,152,1,0,0,0,153,154,1,0,0,0,154,156, + 1,0,0,0,155,157,3,10,5,0,156,155,1,0,0,0,156,157,1,0,0,0,157,158, + 1,0,0,0,158,160,3,72,36,0,159,161,3,12,6,0,160,159,1,0,0,0,160,161, + 1,0,0,0,161,163,1,0,0,0,162,164,3,14,7,0,163,162,1,0,0,0,163,164, + 1,0,0,0,164,166,1,0,0,0,165,167,3,18,9,0,166,165,1,0,0,0,166,167, + 1,0,0,0,167,169,1,0,0,0,168,170,3,20,10,0,169,168,1,0,0,0,169,170, + 1,0,0,0,170,172,1,0,0,0,171,173,3,22,11,0,172,171,1,0,0,0,172,173, + 1,0,0,0,173,176,1,0,0,0,174,175,5,189,0,0,175,177,7,0,0,0,176,174, + 1,0,0,0,176,177,1,0,0,0,177,180,1,0,0,0,178,179,5,189,0,0,179,181, + 5,169,0,0,180,178,1,0,0,0,180,181,1,0,0,0,181,183,1,0,0,0,182,184, + 3,24,12,0,183,182,1,0,0,0,183,184,1,0,0,0,184,186,1,0,0,0,185,187, + 3,16,8,0,186,185,1,0,0,0,186,187,1,0,0,0,187,189,1,0,0,0,188,190, + 3,26,13,0,189,188,1,0,0,0,189,190,1,0,0,0,190,193,1,0,0,0,191,194, + 3,30,15,0,192,194,3,32,16,0,193,191,1,0,0,0,193,192,1,0,0,0,193, + 194,1,0,0,0,194,196,1,0,0,0,195,197,3,34,17,0,196,195,1,0,0,0,196, + 197,1,0,0,0,197,7,1,0,0,0,198,199,5,189,0,0,199,200,3,86,43,0,200, + 9,1,0,0,0,201,202,5,168,0,0,202,205,5,198,0,0,203,204,5,189,0,0, + 204,206,5,164,0,0,205,203,1,0,0,0,205,206,1,0,0,0,206,11,1,0,0,0, + 207,208,5,68,0,0,208,209,3,36,18,0,209,13,1,0,0,0,210,212,7,1,0, + 0,211,210,1,0,0,0,211,212,1,0,0,0,212,213,1,0,0,0,213,214,5,9,0, + 0,214,215,5,90,0,0,215,216,3,72,36,0,216,15,1,0,0,0,217,218,5,188, + 0,0,218,219,3,118,59,0,219,220,5,10,0,0,220,221,5,220,0,0,221,222, + 3,56,28,0,222,232,5,236,0,0,223,224,5,206,0,0,224,225,3,118,59,0, + 225,226,5,10,0,0,226,227,5,220,0,0,227,228,3,56,28,0,228,229,5,236, + 0,0,229,231,1,0,0,0,230,223,1,0,0,0,231,234,1,0,0,0,232,230,1,0, + 0,0,232,233,1,0,0,0,233,17,1,0,0,0,234,232,1,0,0,0,235,236,5,129, + 0,0,236,237,3,74,37,0,237,19,1,0,0,0,238,239,5,187,0,0,239,240,3, + 74,37,0,240,21,1,0,0,0,241,242,5,73,0,0,242,249,5,18,0,0,243,244, + 7,0,0,0,244,245,5,220,0,0,245,246,3,72,36,0,246,247,5,236,0,0,247, + 250,1,0,0,0,248,250,3,72,36,0,249,243,1,0,0,0,249,248,1,0,0,0,250, + 23,1,0,0,0,251,252,5,74,0,0,252,253,3,74,37,0,253,25,1,0,0,0,254, + 255,5,122,0,0,255,256,5,18,0,0,256,257,3,46,23,0,257,27,1,0,0,0, + 258,259,5,122,0,0,259,260,5,18,0,0,260,261,3,72,36,0,261,29,1,0, + 0,0,262,263,5,99,0,0,263,266,3,74,37,0,264,265,5,206,0,0,265,267, + 3,74,37,0,266,264,1,0,0,0,266,267,1,0,0,0,267,272,1,0,0,0,268,269, + 5,189,0,0,269,273,5,164,0,0,270,271,5,18,0,0,271,273,3,72,36,0,272, + 268,1,0,0,0,272,270,1,0,0,0,272,273,1,0,0,0,273,292,1,0,0,0,274, + 275,5,99,0,0,275,278,3,74,37,0,276,277,5,189,0,0,277,279,5,164,0, + 0,278,276,1,0,0,0,278,279,1,0,0,0,279,280,1,0,0,0,280,281,5,118, + 0,0,281,282,3,74,37,0,282,292,1,0,0,0,283,284,5,99,0,0,284,285,3, + 74,37,0,285,286,5,118,0,0,286,289,3,74,37,0,287,288,5,18,0,0,288, + 290,3,72,36,0,289,287,1,0,0,0,289,290,1,0,0,0,290,292,1,0,0,0,291, + 262,1,0,0,0,291,274,1,0,0,0,291,283,1,0,0,0,292,31,1,0,0,0,293,294, + 5,118,0,0,294,295,3,74,37,0,295,33,1,0,0,0,296,297,5,150,0,0,297, + 298,3,52,26,0,298,35,1,0,0,0,299,300,6,18,-1,0,300,302,3,94,47,0, + 301,303,5,61,0,0,302,301,1,0,0,0,302,303,1,0,0,0,303,305,1,0,0,0, + 304,306,3,44,22,0,305,304,1,0,0,0,305,306,1,0,0,0,306,312,1,0,0, + 0,307,308,5,220,0,0,308,309,3,36,18,0,309,310,5,236,0,0,310,312, + 1,0,0,0,311,299,1,0,0,0,311,307,1,0,0,0,312,327,1,0,0,0,313,314, + 10,3,0,0,314,315,3,40,20,0,315,316,3,36,18,4,316,326,1,0,0,0,317, + 319,10,4,0,0,318,320,3,38,19,0,319,318,1,0,0,0,319,320,1,0,0,0,320, + 321,1,0,0,0,321,322,5,90,0,0,322,323,3,36,18,0,323,324,3,42,21,0, + 324,326,1,0,0,0,325,313,1,0,0,0,325,317,1,0,0,0,326,329,1,0,0,0, + 327,325,1,0,0,0,327,328,1,0,0,0,328,37,1,0,0,0,329,327,1,0,0,0,330, + 332,7,2,0,0,331,330,1,0,0,0,331,332,1,0,0,0,332,333,1,0,0,0,333, + 340,5,84,0,0,334,336,5,84,0,0,335,337,7,2,0,0,336,335,1,0,0,0,336, + 337,1,0,0,0,337,340,1,0,0,0,338,340,7,2,0,0,339,331,1,0,0,0,339, + 334,1,0,0,0,339,338,1,0,0,0,340,374,1,0,0,0,341,343,7,3,0,0,342, + 341,1,0,0,0,342,343,1,0,0,0,343,344,1,0,0,0,344,346,7,4,0,0,345, + 347,5,123,0,0,346,345,1,0,0,0,346,347,1,0,0,0,347,356,1,0,0,0,348, + 350,7,4,0,0,349,351,5,123,0,0,350,349,1,0,0,0,350,351,1,0,0,0,351, + 353,1,0,0,0,352,354,7,3,0,0,353,352,1,0,0,0,353,354,1,0,0,0,354, + 356,1,0,0,0,355,342,1,0,0,0,355,348,1,0,0,0,356,374,1,0,0,0,357, + 359,7,5,0,0,358,357,1,0,0,0,358,359,1,0,0,0,359,360,1,0,0,0,360, + 362,5,69,0,0,361,363,5,123,0,0,362,361,1,0,0,0,362,363,1,0,0,0,363, + 372,1,0,0,0,364,366,5,69,0,0,365,367,5,123,0,0,366,365,1,0,0,0,366, + 367,1,0,0,0,367,369,1,0,0,0,368,370,7,5,0,0,369,368,1,0,0,0,369, + 370,1,0,0,0,370,372,1,0,0,0,371,358,1,0,0,0,371,364,1,0,0,0,372, + 374,1,0,0,0,373,339,1,0,0,0,373,355,1,0,0,0,373,371,1,0,0,0,374, + 39,1,0,0,0,375,376,5,31,0,0,376,379,5,90,0,0,377,379,5,206,0,0,378, + 375,1,0,0,0,378,377,1,0,0,0,379,41,1,0,0,0,380,381,5,119,0,0,381, + 390,3,72,36,0,382,383,5,179,0,0,383,384,5,220,0,0,384,385,3,72,36, + 0,385,386,5,236,0,0,386,390,1,0,0,0,387,388,5,179,0,0,388,390,3, + 72,36,0,389,380,1,0,0,0,389,382,1,0,0,0,389,387,1,0,0,0,390,43,1, + 0,0,0,391,392,5,144,0,0,392,395,3,50,25,0,393,394,5,118,0,0,394, + 396,3,50,25,0,395,393,1,0,0,0,395,396,1,0,0,0,396,45,1,0,0,0,397, + 402,3,48,24,0,398,399,5,206,0,0,399,401,3,48,24,0,400,398,1,0,0, + 0,401,404,1,0,0,0,402,400,1,0,0,0,402,403,1,0,0,0,403,47,1,0,0,0, + 404,402,1,0,0,0,405,407,3,74,37,0,406,408,7,6,0,0,407,406,1,0,0, + 0,407,408,1,0,0,0,408,411,1,0,0,0,409,410,5,117,0,0,410,412,7,7, + 0,0,411,409,1,0,0,0,411,412,1,0,0,0,412,415,1,0,0,0,413,414,5,26, + 0,0,414,416,5,200,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,49,1,0, + 0,0,417,424,3,122,61,0,418,421,3,106,53,0,419,420,5,238,0,0,420, + 422,3,106,53,0,421,419,1,0,0,0,421,422,1,0,0,0,422,424,1,0,0,0,423, + 417,1,0,0,0,423,418,1,0,0,0,424,51,1,0,0,0,425,430,3,54,27,0,426, + 427,5,206,0,0,427,429,3,54,27,0,428,426,1,0,0,0,429,432,1,0,0,0, + 430,428,1,0,0,0,430,431,1,0,0,0,431,53,1,0,0,0,432,430,1,0,0,0,433, + 434,3,118,59,0,434,435,5,212,0,0,435,436,3,108,54,0,436,55,1,0,0, + 0,437,439,3,58,29,0,438,437,1,0,0,0,438,439,1,0,0,0,439,441,1,0, + 0,0,440,442,3,60,30,0,441,440,1,0,0,0,441,442,1,0,0,0,442,444,1, + 0,0,0,443,445,3,62,31,0,444,443,1,0,0,0,444,445,1,0,0,0,445,57,1, + 0,0,0,446,447,5,126,0,0,447,448,5,18,0,0,448,449,3,72,36,0,449,59, + 1,0,0,0,450,451,5,122,0,0,451,452,5,18,0,0,452,453,3,46,23,0,453, + 61,1,0,0,0,454,455,7,8,0,0,455,456,3,64,32,0,456,63,1,0,0,0,457, + 464,3,66,33,0,458,459,5,16,0,0,459,460,3,66,33,0,460,461,5,6,0,0, + 461,462,3,66,33,0,462,464,1,0,0,0,463,457,1,0,0,0,463,458,1,0,0, + 0,464,65,1,0,0,0,465,466,5,33,0,0,466,478,5,142,0,0,467,468,5,175, + 0,0,468,478,5,128,0,0,469,470,5,175,0,0,470,478,5,64,0,0,471,472, + 3,106,53,0,472,473,5,128,0,0,473,478,1,0,0,0,474,475,3,106,53,0, + 475,476,5,64,0,0,476,478,1,0,0,0,477,465,1,0,0,0,477,467,1,0,0,0, + 477,469,1,0,0,0,477,471,1,0,0,0,477,474,1,0,0,0,478,67,1,0,0,0,479, + 480,3,74,37,0,480,481,5,0,0,1,481,69,1,0,0,0,482,530,3,118,59,0, + 483,484,3,118,59,0,484,485,5,220,0,0,485,486,3,118,59,0,486,493, + 3,70,35,0,487,488,5,206,0,0,488,489,3,118,59,0,489,490,3,70,35,0, + 490,492,1,0,0,0,491,487,1,0,0,0,492,495,1,0,0,0,493,491,1,0,0,0, + 493,494,1,0,0,0,494,496,1,0,0,0,495,493,1,0,0,0,496,497,5,236,0, + 0,497,530,1,0,0,0,498,499,3,118,59,0,499,500,5,220,0,0,500,505,3, + 120,60,0,501,502,5,206,0,0,502,504,3,120,60,0,503,501,1,0,0,0,504, + 507,1,0,0,0,505,503,1,0,0,0,505,506,1,0,0,0,506,508,1,0,0,0,507, + 505,1,0,0,0,508,509,5,236,0,0,509,530,1,0,0,0,510,511,3,118,59,0, + 511,512,5,220,0,0,512,517,3,70,35,0,513,514,5,206,0,0,514,516,3, + 70,35,0,515,513,1,0,0,0,516,519,1,0,0,0,517,515,1,0,0,0,517,518, + 1,0,0,0,518,520,1,0,0,0,519,517,1,0,0,0,520,521,5,236,0,0,521,530, + 1,0,0,0,522,523,3,118,59,0,523,525,5,220,0,0,524,526,3,72,36,0,525, + 524,1,0,0,0,525,526,1,0,0,0,526,527,1,0,0,0,527,528,5,236,0,0,528, + 530,1,0,0,0,529,482,1,0,0,0,529,483,1,0,0,0,529,498,1,0,0,0,529, + 510,1,0,0,0,529,522,1,0,0,0,530,71,1,0,0,0,531,536,3,74,37,0,532, + 533,5,206,0,0,533,535,3,74,37,0,534,532,1,0,0,0,535,538,1,0,0,0, + 536,534,1,0,0,0,536,537,1,0,0,0,537,73,1,0,0,0,538,536,1,0,0,0,539, + 540,6,37,-1,0,540,542,5,19,0,0,541,543,3,74,37,0,542,541,1,0,0,0, + 542,543,1,0,0,0,543,549,1,0,0,0,544,545,5,186,0,0,545,546,3,74,37, + 0,546,547,5,163,0,0,547,548,3,74,37,0,548,550,1,0,0,0,549,544,1, + 0,0,0,550,551,1,0,0,0,551,549,1,0,0,0,551,552,1,0,0,0,552,555,1, + 0,0,0,553,554,5,52,0,0,554,556,3,74,37,0,555,553,1,0,0,0,555,556, + 1,0,0,0,556,557,1,0,0,0,557,558,5,53,0,0,558,663,1,0,0,0,559,560, + 5,20,0,0,560,561,5,220,0,0,561,562,3,74,37,0,562,563,5,10,0,0,563, + 564,3,70,35,0,564,565,5,236,0,0,565,663,1,0,0,0,566,567,5,36,0,0, + 567,663,5,200,0,0,568,569,5,86,0,0,569,570,3,74,37,0,570,571,3,110, + 55,0,571,663,1,0,0,0,572,573,5,155,0,0,573,574,5,220,0,0,574,575, + 3,74,37,0,575,576,5,68,0,0,576,579,3,74,37,0,577,578,5,65,0,0,578, + 580,3,74,37,0,579,577,1,0,0,0,579,580,1,0,0,0,580,581,1,0,0,0,581, + 582,5,236,0,0,582,663,1,0,0,0,583,584,5,166,0,0,584,663,5,200,0, + 0,585,586,5,171,0,0,586,587,5,220,0,0,587,588,7,9,0,0,588,589,5, + 200,0,0,589,590,5,68,0,0,590,591,3,74,37,0,591,592,5,236,0,0,592, + 663,1,0,0,0,593,594,3,118,59,0,594,596,5,220,0,0,595,597,3,72,36, + 0,596,595,1,0,0,0,596,597,1,0,0,0,597,598,1,0,0,0,598,599,5,236, + 0,0,599,600,1,0,0,0,600,601,5,125,0,0,601,602,5,220,0,0,602,603, + 3,56,28,0,603,604,5,236,0,0,604,663,1,0,0,0,605,606,3,118,59,0,606, + 608,5,220,0,0,607,609,3,72,36,0,608,607,1,0,0,0,608,609,1,0,0,0, + 609,610,1,0,0,0,610,611,5,236,0,0,611,612,1,0,0,0,612,613,5,125, + 0,0,613,614,3,118,59,0,614,663,1,0,0,0,615,621,3,118,59,0,616,618, + 5,220,0,0,617,619,3,72,36,0,618,617,1,0,0,0,618,619,1,0,0,0,619, + 620,1,0,0,0,620,622,5,236,0,0,621,616,1,0,0,0,621,622,1,0,0,0,622, + 623,1,0,0,0,623,625,5,220,0,0,624,626,5,49,0,0,625,624,1,0,0,0,625, + 626,1,0,0,0,626,628,1,0,0,0,627,629,3,76,38,0,628,627,1,0,0,0,628, + 629,1,0,0,0,629,630,1,0,0,0,630,631,5,236,0,0,631,663,1,0,0,0,632, + 663,3,82,41,0,633,663,3,108,54,0,634,635,5,208,0,0,635,663,3,74, + 37,18,636,637,5,115,0,0,637,663,3,74,37,12,638,639,3,98,49,0,639, + 640,5,210,0,0,640,642,1,0,0,0,641,638,1,0,0,0,641,642,1,0,0,0,642, + 643,1,0,0,0,643,663,5,202,0,0,644,645,5,220,0,0,645,646,3,2,1,0, + 646,647,5,236,0,0,647,663,1,0,0,0,648,649,5,220,0,0,649,650,3,74, + 37,0,650,651,5,236,0,0,651,663,1,0,0,0,652,653,5,220,0,0,653,654, + 3,72,36,0,654,655,5,236,0,0,655,663,1,0,0,0,656,658,5,219,0,0,657, + 659,3,72,36,0,658,657,1,0,0,0,658,659,1,0,0,0,659,660,1,0,0,0,660, + 663,5,235,0,0,661,663,3,90,45,0,662,539,1,0,0,0,662,559,1,0,0,0, + 662,566,1,0,0,0,662,568,1,0,0,0,662,572,1,0,0,0,662,583,1,0,0,0, + 662,585,1,0,0,0,662,593,1,0,0,0,662,605,1,0,0,0,662,615,1,0,0,0, + 662,632,1,0,0,0,662,633,1,0,0,0,662,634,1,0,0,0,662,636,1,0,0,0, + 662,641,1,0,0,0,662,644,1,0,0,0,662,648,1,0,0,0,662,652,1,0,0,0, + 662,656,1,0,0,0,662,661,1,0,0,0,663,757,1,0,0,0,664,668,10,17,0, + 0,665,669,5,202,0,0,666,669,5,238,0,0,667,669,5,227,0,0,668,665, + 1,0,0,0,668,666,1,0,0,0,668,667,1,0,0,0,669,670,1,0,0,0,670,756, + 3,74,37,18,671,675,10,16,0,0,672,676,5,228,0,0,673,676,5,208,0,0, + 674,676,5,207,0,0,675,672,1,0,0,0,675,673,1,0,0,0,675,674,1,0,0, + 0,676,677,1,0,0,0,677,756,3,74,37,17,678,703,10,15,0,0,679,704,5, + 211,0,0,680,704,5,212,0,0,681,704,5,223,0,0,682,704,5,221,0,0,683, + 704,5,222,0,0,684,704,5,213,0,0,685,704,5,214,0,0,686,688,5,115, + 0,0,687,686,1,0,0,0,687,688,1,0,0,0,688,689,1,0,0,0,689,691,5,80, + 0,0,690,692,5,25,0,0,691,690,1,0,0,0,691,692,1,0,0,0,692,704,1,0, + 0,0,693,695,5,115,0,0,694,693,1,0,0,0,694,695,1,0,0,0,695,696,1, + 0,0,0,696,704,7,10,0,0,697,704,5,232,0,0,698,704,5,233,0,0,699,704, + 5,225,0,0,700,704,5,216,0,0,701,704,5,217,0,0,702,704,5,224,0,0, + 703,679,1,0,0,0,703,680,1,0,0,0,703,681,1,0,0,0,703,682,1,0,0,0, + 703,683,1,0,0,0,703,684,1,0,0,0,703,685,1,0,0,0,703,687,1,0,0,0, + 703,694,1,0,0,0,703,697,1,0,0,0,703,698,1,0,0,0,703,699,1,0,0,0, + 703,700,1,0,0,0,703,701,1,0,0,0,703,702,1,0,0,0,704,705,1,0,0,0, + 705,756,3,74,37,16,706,707,10,13,0,0,707,708,5,226,0,0,708,756,3, + 74,37,14,709,710,10,11,0,0,710,711,5,6,0,0,711,756,3,74,37,12,712, + 713,10,10,0,0,713,714,5,121,0,0,714,756,3,74,37,11,715,717,10,9, + 0,0,716,718,5,115,0,0,717,716,1,0,0,0,717,718,1,0,0,0,718,719,1, + 0,0,0,719,720,5,16,0,0,720,721,3,74,37,0,721,722,5,6,0,0,722,723, + 3,74,37,10,723,756,1,0,0,0,724,725,10,8,0,0,725,726,5,229,0,0,726, + 727,3,74,37,0,727,728,5,205,0,0,728,729,3,74,37,8,729,756,1,0,0, + 0,730,731,10,21,0,0,731,732,5,219,0,0,732,733,3,74,37,0,733,734, + 5,235,0,0,734,756,1,0,0,0,735,736,10,20,0,0,736,737,5,210,0,0,737, + 756,5,198,0,0,738,739,10,19,0,0,739,740,5,210,0,0,740,756,3,118, + 59,0,741,742,10,14,0,0,742,744,5,88,0,0,743,745,5,115,0,0,744,743, + 1,0,0,0,744,745,1,0,0,0,745,746,1,0,0,0,746,756,5,116,0,0,747,753, + 10,7,0,0,748,754,3,116,58,0,749,750,5,10,0,0,750,754,3,118,59,0, + 751,752,5,10,0,0,752,754,5,200,0,0,753,748,1,0,0,0,753,749,1,0,0, + 0,753,751,1,0,0,0,754,756,1,0,0,0,755,664,1,0,0,0,755,671,1,0,0, + 0,755,678,1,0,0,0,755,706,1,0,0,0,755,709,1,0,0,0,755,712,1,0,0, + 0,755,715,1,0,0,0,755,724,1,0,0,0,755,730,1,0,0,0,755,735,1,0,0, + 0,755,738,1,0,0,0,755,741,1,0,0,0,755,747,1,0,0,0,756,759,1,0,0, + 0,757,755,1,0,0,0,757,758,1,0,0,0,758,75,1,0,0,0,759,757,1,0,0,0, + 760,765,3,78,39,0,761,762,5,206,0,0,762,764,3,78,39,0,763,761,1, + 0,0,0,764,767,1,0,0,0,765,763,1,0,0,0,765,766,1,0,0,0,766,77,1,0, + 0,0,767,765,1,0,0,0,768,771,3,80,40,0,769,771,3,74,37,0,770,768, + 1,0,0,0,770,769,1,0,0,0,771,79,1,0,0,0,772,773,5,220,0,0,773,778, + 3,118,59,0,774,775,5,206,0,0,775,777,3,118,59,0,776,774,1,0,0,0, + 777,780,1,0,0,0,778,776,1,0,0,0,778,779,1,0,0,0,779,781,1,0,0,0, + 780,778,1,0,0,0,781,782,5,236,0,0,782,792,1,0,0,0,783,788,3,118, + 59,0,784,785,5,206,0,0,785,787,3,118,59,0,786,784,1,0,0,0,787,790, + 1,0,0,0,788,786,1,0,0,0,788,789,1,0,0,0,789,792,1,0,0,0,790,788, + 1,0,0,0,791,772,1,0,0,0,791,783,1,0,0,0,792,793,1,0,0,0,793,794, + 5,201,0,0,794,795,3,74,37,0,795,81,1,0,0,0,796,797,5,222,0,0,797, + 801,3,118,59,0,798,800,3,84,42,0,799,798,1,0,0,0,800,803,1,0,0,0, + 801,799,1,0,0,0,801,802,1,0,0,0,802,804,1,0,0,0,803,801,1,0,0,0, + 804,805,5,238,0,0,805,806,5,214,0,0,806,825,1,0,0,0,807,808,5,222, + 0,0,808,812,3,118,59,0,809,811,3,84,42,0,810,809,1,0,0,0,811,814, + 1,0,0,0,812,810,1,0,0,0,812,813,1,0,0,0,813,815,1,0,0,0,814,812, + 1,0,0,0,815,817,5,214,0,0,816,818,3,82,41,0,817,816,1,0,0,0,817, + 818,1,0,0,0,818,819,1,0,0,0,819,820,5,222,0,0,820,821,5,238,0,0, + 821,822,3,118,59,0,822,823,5,214,0,0,823,825,1,0,0,0,824,796,1,0, + 0,0,824,807,1,0,0,0,825,83,1,0,0,0,826,827,3,118,59,0,827,828,5, + 212,0,0,828,829,5,200,0,0,829,838,1,0,0,0,830,831,3,118,59,0,831, + 832,5,212,0,0,832,833,5,218,0,0,833,834,3,74,37,0,834,835,5,234, + 0,0,835,838,1,0,0,0,836,838,3,118,59,0,837,826,1,0,0,0,837,830,1, + 0,0,0,837,836,1,0,0,0,838,85,1,0,0,0,839,844,3,88,44,0,840,841,5, + 206,0,0,841,843,3,88,44,0,842,840,1,0,0,0,843,846,1,0,0,0,844,842, + 1,0,0,0,844,845,1,0,0,0,845,87,1,0,0,0,846,844,1,0,0,0,847,848,3, + 118,59,0,848,849,5,10,0,0,849,850,5,220,0,0,850,851,3,2,1,0,851, + 852,5,236,0,0,852,858,1,0,0,0,853,854,3,74,37,0,854,855,5,10,0,0, + 855,856,3,118,59,0,856,858,1,0,0,0,857,847,1,0,0,0,857,853,1,0,0, + 0,858,89,1,0,0,0,859,867,3,122,61,0,860,861,3,98,49,0,861,862,5, + 210,0,0,862,864,1,0,0,0,863,860,1,0,0,0,863,864,1,0,0,0,864,865, + 1,0,0,0,865,867,3,92,46,0,866,859,1,0,0,0,866,863,1,0,0,0,867,91, + 1,0,0,0,868,873,3,118,59,0,869,870,5,210,0,0,870,872,3,118,59,0, + 871,869,1,0,0,0,872,875,1,0,0,0,873,871,1,0,0,0,873,874,1,0,0,0, + 874,93,1,0,0,0,875,873,1,0,0,0,876,877,6,47,-1,0,877,886,3,98,49, + 0,878,886,3,96,48,0,879,880,5,220,0,0,880,881,3,2,1,0,881,882,5, + 236,0,0,882,886,1,0,0,0,883,886,3,82,41,0,884,886,3,122,61,0,885, + 876,1,0,0,0,885,878,1,0,0,0,885,879,1,0,0,0,885,883,1,0,0,0,885, + 884,1,0,0,0,886,895,1,0,0,0,887,891,10,3,0,0,888,892,3,116,58,0, + 889,890,5,10,0,0,890,892,3,118,59,0,891,888,1,0,0,0,891,889,1,0, + 0,0,892,894,1,0,0,0,893,887,1,0,0,0,894,897,1,0,0,0,895,893,1,0, + 0,0,895,896,1,0,0,0,896,95,1,0,0,0,897,895,1,0,0,0,898,899,3,118, + 59,0,899,901,5,220,0,0,900,902,3,100,50,0,901,900,1,0,0,0,901,902, + 1,0,0,0,902,903,1,0,0,0,903,904,5,236,0,0,904,97,1,0,0,0,905,906, + 3,102,51,0,906,907,5,210,0,0,907,909,1,0,0,0,908,905,1,0,0,0,908, + 909,1,0,0,0,909,910,1,0,0,0,910,911,3,118,59,0,911,99,1,0,0,0,912, + 917,3,74,37,0,913,914,5,206,0,0,914,916,3,74,37,0,915,913,1,0,0, + 0,916,919,1,0,0,0,917,915,1,0,0,0,917,918,1,0,0,0,918,101,1,0,0, + 0,919,917,1,0,0,0,920,921,3,118,59,0,921,103,1,0,0,0,922,931,5,196, + 0,0,923,924,5,210,0,0,924,931,7,11,0,0,925,926,5,198,0,0,926,928, + 5,210,0,0,927,929,7,11,0,0,928,927,1,0,0,0,928,929,1,0,0,0,929,931, + 1,0,0,0,930,922,1,0,0,0,930,923,1,0,0,0,930,925,1,0,0,0,931,105, + 1,0,0,0,932,934,7,12,0,0,933,932,1,0,0,0,933,934,1,0,0,0,934,941, + 1,0,0,0,935,942,3,104,52,0,936,942,5,197,0,0,937,942,5,198,0,0,938, + 942,5,199,0,0,939,942,5,82,0,0,940,942,5,113,0,0,941,935,1,0,0,0, + 941,936,1,0,0,0,941,937,1,0,0,0,941,938,1,0,0,0,941,939,1,0,0,0, + 941,940,1,0,0,0,942,107,1,0,0,0,943,947,3,106,53,0,944,947,5,200, + 0,0,945,947,5,116,0,0,946,943,1,0,0,0,946,944,1,0,0,0,946,945,1, + 0,0,0,947,109,1,0,0,0,948,949,7,13,0,0,949,111,1,0,0,0,950,951,7, + 14,0,0,951,113,1,0,0,0,952,953,7,15,0,0,953,115,1,0,0,0,954,957, + 5,195,0,0,955,957,3,114,57,0,956,954,1,0,0,0,956,955,1,0,0,0,957, + 117,1,0,0,0,958,962,5,195,0,0,959,962,3,110,55,0,960,962,3,112,56, + 0,961,958,1,0,0,0,961,959,1,0,0,0,961,960,1,0,0,0,962,119,1,0,0, + 0,963,964,5,200,0,0,964,965,5,212,0,0,965,966,3,106,53,0,966,121, + 1,0,0,0,967,968,5,218,0,0,968,969,3,118,59,0,969,970,5,234,0,0,970, + 123,1,0,0,0,120,127,137,146,149,153,156,160,163,166,169,172,176, + 180,183,186,189,193,196,205,211,232,249,266,272,278,289,291,302, + 305,311,319,325,327,331,336,339,342,346,350,353,355,358,362,366, + 369,371,373,378,389,395,402,407,411,415,421,423,430,438,441,444, + 463,477,493,505,517,525,529,536,542,551,555,579,596,608,618,621, + 625,628,641,658,662,668,675,687,691,694,703,717,744,753,755,757, + 765,770,778,788,791,801,812,817,824,837,844,857,863,866,873,885, + 891,895,901,908,917,928,930,933,941,946,956,961 ] class HogQLParser ( Parser ): @@ -4150,34 +4148,6 @@ def accept(self, visitor:ParseTreeVisitor): return visitor.visitChildren(self) - class ColumnExprExtractContext(ColumnExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext - super().__init__(parser) - self.copyFrom(ctx) - - def EXTRACT(self): - return self.getToken(HogQLParser.EXTRACT, 0) - def LPAREN(self): - return self.getToken(HogQLParser.LPAREN, 0) - def interval(self): - return self.getTypedRuleContext(HogQLParser.IntervalContext,0) - - def FROM(self): - return self.getToken(HogQLParser.FROM, 0) - def columnExpr(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprContext,0) - - def RPAREN(self): - return self.getToken(HogQLParser.RPAREN, 0) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumnExprExtract" ): - return visitor.visitColumnExprExtract(self) - else: - return visitor.visitChildren(self) - - class ColumnExprNegateContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext @@ -4970,7 +4940,7 @@ def columnExpr(self, _p:int=0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 669 + self.state = 662 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,80,self._ctx) if la_ == 1: @@ -5049,317 +5019,299 @@ def columnExpr(self, _p:int=0): pass elif la_ == 4: - localctx = HogQLParser.ColumnExprExtractContext(self, localctx) + localctx = HogQLParser.ColumnExprIntervalContext(self, localctx) self._ctx = localctx _prevctx = localctx self.state = 568 - self.match(HogQLParser.EXTRACT) + self.match(HogQLParser.INTERVAL) self.state = 569 - self.match(HogQLParser.LPAREN) + self.columnExpr(0) self.state = 570 self.interval() - self.state = 571 - self.match(HogQLParser.FROM) - self.state = 572 - self.columnExpr(0) - self.state = 573 - self.match(HogQLParser.RPAREN) pass elif la_ == 5: - localctx = HogQLParser.ColumnExprIntervalContext(self, localctx) - self._ctx = localctx - _prevctx = localctx - self.state = 575 - self.match(HogQLParser.INTERVAL) - self.state = 576 - self.columnExpr(0) - self.state = 577 - self.interval() - pass - - elif la_ == 6: localctx = HogQLParser.ColumnExprSubstringContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 579 + self.state = 572 self.match(HogQLParser.SUBSTRING) - self.state = 580 + self.state = 573 self.match(HogQLParser.LPAREN) - self.state = 581 + self.state = 574 self.columnExpr(0) - self.state = 582 + self.state = 575 self.match(HogQLParser.FROM) - self.state = 583 + self.state = 576 self.columnExpr(0) - self.state = 586 + self.state = 579 self._errHandler.sync(self) _la = self._input.LA(1) if _la==65: - self.state = 584 + self.state = 577 self.match(HogQLParser.FOR) - self.state = 585 + self.state = 578 self.columnExpr(0) - self.state = 588 + self.state = 581 self.match(HogQLParser.RPAREN) pass - elif la_ == 7: + elif la_ == 6: localctx = HogQLParser.ColumnExprTimestampContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 590 + self.state = 583 self.match(HogQLParser.TIMESTAMP) - self.state = 591 + self.state = 584 self.match(HogQLParser.STRING_LITERAL) pass - elif la_ == 8: + elif la_ == 7: localctx = HogQLParser.ColumnExprTrimContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 592 + self.state = 585 self.match(HogQLParser.TRIM) - self.state = 593 + self.state = 586 self.match(HogQLParser.LPAREN) - self.state = 594 + self.state = 587 _la = self._input.LA(1) if not(_la==17 or _la==95 or _la==170): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 595 + self.state = 588 self.match(HogQLParser.STRING_LITERAL) - self.state = 596 + self.state = 589 self.match(HogQLParser.FROM) - self.state = 597 + self.state = 590 self.columnExpr(0) - self.state = 598 + self.state = 591 self.match(HogQLParser.RPAREN) pass - elif la_ == 9: + elif la_ == 8: localctx = HogQLParser.ColumnExprWinFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 600 + self.state = 593 self.identifier() - self.state = 601 + self.state = 594 self.match(HogQLParser.LPAREN) - self.state = 603 + self.state = 596 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 602 + self.state = 595 self.columnExprList() - self.state = 605 + self.state = 598 self.match(HogQLParser.RPAREN) - self.state = 607 + self.state = 600 self.match(HogQLParser.OVER) - self.state = 608 + self.state = 601 self.match(HogQLParser.LPAREN) - self.state = 609 + self.state = 602 self.windowExpr() - self.state = 610 + self.state = 603 self.match(HogQLParser.RPAREN) pass - elif la_ == 10: + elif la_ == 9: localctx = HogQLParser.ColumnExprWinFunctionTargetContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 612 + self.state = 605 self.identifier() - self.state = 613 + self.state = 606 self.match(HogQLParser.LPAREN) - self.state = 615 + self.state = 608 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 614 + self.state = 607 self.columnExprList() - self.state = 617 + self.state = 610 self.match(HogQLParser.RPAREN) - self.state = 619 + self.state = 612 self.match(HogQLParser.OVER) - self.state = 620 + self.state = 613 self.identifier() pass - elif la_ == 11: + elif la_ == 10: localctx = HogQLParser.ColumnExprFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 622 + self.state = 615 self.identifier() - self.state = 628 + self.state = 621 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,75,self._ctx) if la_ == 1: - self.state = 623 + self.state = 616 self.match(HogQLParser.LPAREN) - self.state = 625 + self.state = 618 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 624 + self.state = 617 self.columnExprList() - self.state = 627 + self.state = 620 self.match(HogQLParser.RPAREN) - self.state = 630 + self.state = 623 self.match(HogQLParser.LPAREN) - self.state = 632 + self.state = 625 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,76,self._ctx) if la_ == 1: - self.state = 631 + self.state = 624 self.match(HogQLParser.DISTINCT) - self.state = 635 + self.state = 628 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 634 + self.state = 627 self.columnArgList() - self.state = 637 + self.state = 630 self.match(HogQLParser.RPAREN) pass - elif la_ == 12: + elif la_ == 11: localctx = HogQLParser.ColumnExprTagElementContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 639 + self.state = 632 self.hogqlxTagElement() pass - elif la_ == 13: + elif la_ == 12: localctx = HogQLParser.ColumnExprLiteralContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 640 + self.state = 633 self.literal() pass - elif la_ == 14: + elif la_ == 13: localctx = HogQLParser.ColumnExprNegateContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 641 + self.state = 634 self.match(HogQLParser.DASH) - self.state = 642 + self.state = 635 self.columnExpr(18) pass - elif la_ == 15: + elif la_ == 14: localctx = HogQLParser.ColumnExprNotContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 643 + self.state = 636 self.match(HogQLParser.NOT) - self.state = 644 + self.state = 637 self.columnExpr(12) pass - elif la_ == 16: + elif la_ == 15: localctx = HogQLParser.ColumnExprAsteriskContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 648 + self.state = 641 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -5066549581053953) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or _la==192 or _la==195: - self.state = 645 + self.state = 638 self.tableIdentifier() - self.state = 646 + self.state = 639 self.match(HogQLParser.DOT) - self.state = 650 + self.state = 643 self.match(HogQLParser.ASTERISK) pass - elif la_ == 17: + elif la_ == 16: localctx = HogQLParser.ColumnExprSubqueryContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 651 + self.state = 644 self.match(HogQLParser.LPAREN) - self.state = 652 + self.state = 645 self.selectUnionStmt() - self.state = 653 + self.state = 646 self.match(HogQLParser.RPAREN) pass - elif la_ == 18: + elif la_ == 17: localctx = HogQLParser.ColumnExprParensContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 655 + self.state = 648 self.match(HogQLParser.LPAREN) - self.state = 656 + self.state = 649 self.columnExpr(0) - self.state = 657 + self.state = 650 self.match(HogQLParser.RPAREN) pass - elif la_ == 19: + elif la_ == 18: localctx = HogQLParser.ColumnExprTupleContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 659 + self.state = 652 self.match(HogQLParser.LPAREN) - self.state = 660 + self.state = 653 self.columnExprList() - self.state = 661 + self.state = 654 self.match(HogQLParser.RPAREN) pass - elif la_ == 20: + elif la_ == 19: localctx = HogQLParser.ColumnExprArrayContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 663 + self.state = 656 self.match(HogQLParser.LBRACKET) - self.state = 665 + self.state = 658 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 664 + self.state = 657 self.columnExprList() - self.state = 667 + self.state = 660 self.match(HogQLParser.RBRACKET) pass - elif la_ == 21: + elif la_ == 20: localctx = HogQLParser.ColumnExprIdentifierContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 668 + self.state = 661 self.columnIdentifier() pass self._ctx.stop = self._input.LT(-1) - self.state = 764 + self.state = 757 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,91,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -5367,36 +5319,36 @@ def columnExpr(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 762 + self.state = 755 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,90,self._ctx) if la_ == 1: localctx = HogQLParser.ColumnExprPrecedence1Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 671 + self.state = 664 if not self.precpred(self._ctx, 17): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 17)") - self.state = 675 + self.state = 668 self._errHandler.sync(self) token = self._input.LA(1) if token in [202]: - self.state = 672 + self.state = 665 localctx.operator = self.match(HogQLParser.ASTERISK) pass elif token in [238]: - self.state = 673 + self.state = 666 localctx.operator = self.match(HogQLParser.SLASH) pass elif token in [227]: - self.state = 674 + self.state = 667 localctx.operator = self.match(HogQLParser.PERCENT) pass else: raise NoViableAltException(self) - self.state = 677 + self.state = 670 localctx.right = self.columnExpr(18) pass @@ -5404,29 +5356,29 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprPrecedence2Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 678 + self.state = 671 if not self.precpred(self._ctx, 16): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 16)") - self.state = 682 + self.state = 675 self._errHandler.sync(self) token = self._input.LA(1) if token in [228]: - self.state = 679 + self.state = 672 localctx.operator = self.match(HogQLParser.PLUS) pass elif token in [208]: - self.state = 680 + self.state = 673 localctx.operator = self.match(HogQLParser.DASH) pass elif token in [207]: - self.state = 681 + self.state = 674 localctx.operator = self.match(HogQLParser.CONCAT) pass else: raise NoViableAltException(self) - self.state = 684 + self.state = 677 localctx.right = self.columnExpr(17) pass @@ -5434,79 +5386,79 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprPrecedence3Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 685 + self.state = 678 if not self.precpred(self._ctx, 15): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 15)") - self.state = 710 + self.state = 703 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,86,self._ctx) if la_ == 1: - self.state = 686 + self.state = 679 localctx.operator = self.match(HogQLParser.EQ_DOUBLE) pass elif la_ == 2: - self.state = 687 + self.state = 680 localctx.operator = self.match(HogQLParser.EQ_SINGLE) pass elif la_ == 3: - self.state = 688 + self.state = 681 localctx.operator = self.match(HogQLParser.NOT_EQ) pass elif la_ == 4: - self.state = 689 + self.state = 682 localctx.operator = self.match(HogQLParser.LT_EQ) pass elif la_ == 5: - self.state = 690 + self.state = 683 localctx.operator = self.match(HogQLParser.LT) pass elif la_ == 6: - self.state = 691 + self.state = 684 localctx.operator = self.match(HogQLParser.GT_EQ) pass elif la_ == 7: - self.state = 692 + self.state = 685 localctx.operator = self.match(HogQLParser.GT) pass elif la_ == 8: - self.state = 694 + self.state = 687 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 693 + self.state = 686 localctx.operator = self.match(HogQLParser.NOT) - self.state = 696 + self.state = 689 self.match(HogQLParser.IN) - self.state = 698 + self.state = 691 self._errHandler.sync(self) _la = self._input.LA(1) if _la==25: - self.state = 697 + self.state = 690 self.match(HogQLParser.COHORT) pass elif la_ == 9: - self.state = 701 + self.state = 694 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 700 + self.state = 693 localctx.operator = self.match(HogQLParser.NOT) - self.state = 703 + self.state = 696 _la = self._input.LA(1) if not(_la==79 or _la==98): self._errHandler.recoverInline(self) @@ -5516,209 +5468,209 @@ def columnExpr(self, _p:int=0): pass elif la_ == 10: - self.state = 704 + self.state = 697 localctx.operator = self.match(HogQLParser.REGEX_SINGLE) pass elif la_ == 11: - self.state = 705 + self.state = 698 localctx.operator = self.match(HogQLParser.REGEX_DOUBLE) pass elif la_ == 12: - self.state = 706 + self.state = 699 localctx.operator = self.match(HogQLParser.NOT_REGEX) pass elif la_ == 13: - self.state = 707 + self.state = 700 localctx.operator = self.match(HogQLParser.IREGEX_SINGLE) pass elif la_ == 14: - self.state = 708 + self.state = 701 localctx.operator = self.match(HogQLParser.IREGEX_DOUBLE) pass elif la_ == 15: - self.state = 709 + self.state = 702 localctx.operator = self.match(HogQLParser.NOT_IREGEX) pass - self.state = 712 + self.state = 705 localctx.right = self.columnExpr(16) pass elif la_ == 4: localctx = HogQLParser.ColumnExprNullishContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 713 + self.state = 706 if not self.precpred(self._ctx, 13): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") - self.state = 714 + self.state = 707 self.match(HogQLParser.NULLISH) - self.state = 715 + self.state = 708 self.columnExpr(14) pass elif la_ == 5: localctx = HogQLParser.ColumnExprAndContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 716 + self.state = 709 if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") - self.state = 717 + self.state = 710 self.match(HogQLParser.AND) - self.state = 718 + self.state = 711 self.columnExpr(12) pass elif la_ == 6: localctx = HogQLParser.ColumnExprOrContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 719 + self.state = 712 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 720 + self.state = 713 self.match(HogQLParser.OR) - self.state = 721 + self.state = 714 self.columnExpr(11) pass elif la_ == 7: localctx = HogQLParser.ColumnExprBetweenContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 722 + self.state = 715 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 724 + self.state = 717 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 723 + self.state = 716 self.match(HogQLParser.NOT) - self.state = 726 + self.state = 719 self.match(HogQLParser.BETWEEN) - self.state = 727 + self.state = 720 self.columnExpr(0) - self.state = 728 + self.state = 721 self.match(HogQLParser.AND) - self.state = 729 + self.state = 722 self.columnExpr(10) pass elif la_ == 8: localctx = HogQLParser.ColumnExprTernaryOpContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 731 + self.state = 724 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 732 + self.state = 725 self.match(HogQLParser.QUERY) - self.state = 733 + self.state = 726 self.columnExpr(0) - self.state = 734 + self.state = 727 self.match(HogQLParser.COLON) - self.state = 735 + self.state = 728 self.columnExpr(8) pass elif la_ == 9: localctx = HogQLParser.ColumnExprArrayAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 737 + self.state = 730 if not self.precpred(self._ctx, 21): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 21)") - self.state = 738 + self.state = 731 self.match(HogQLParser.LBRACKET) - self.state = 739 + self.state = 732 self.columnExpr(0) - self.state = 740 + self.state = 733 self.match(HogQLParser.RBRACKET) pass elif la_ == 10: localctx = HogQLParser.ColumnExprTupleAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 742 + self.state = 735 if not self.precpred(self._ctx, 20): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 20)") - self.state = 743 + self.state = 736 self.match(HogQLParser.DOT) - self.state = 744 + self.state = 737 self.match(HogQLParser.DECIMAL_LITERAL) pass elif la_ == 11: localctx = HogQLParser.ColumnExprPropertyAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 745 + self.state = 738 if not self.precpred(self._ctx, 19): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 19)") - self.state = 746 + self.state = 739 self.match(HogQLParser.DOT) - self.state = 747 + self.state = 740 self.identifier() pass elif la_ == 12: localctx = HogQLParser.ColumnExprIsNullContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 748 + self.state = 741 if not self.precpred(self._ctx, 14): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 14)") - self.state = 749 + self.state = 742 self.match(HogQLParser.IS) - self.state = 751 + self.state = 744 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 750 + self.state = 743 self.match(HogQLParser.NOT) - self.state = 753 + self.state = 746 self.match(HogQLParser.NULL_SQL) pass elif la_ == 13: localctx = HogQLParser.ColumnExprAliasContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 754 + self.state = 747 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 760 + self.state = 753 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,89,self._ctx) if la_ == 1: - self.state = 755 + self.state = 748 self.alias() pass elif la_ == 2: - self.state = 756 + self.state = 749 self.match(HogQLParser.AS) - self.state = 757 + self.state = 750 self.identifier() pass elif la_ == 3: - self.state = 758 + self.state = 751 self.match(HogQLParser.AS) - self.state = 759 + self.state = 752 self.match(HogQLParser.STRING_LITERAL) pass @@ -5726,7 +5678,7 @@ def columnExpr(self, _p:int=0): pass - self.state = 766 + self.state = 759 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,91,self._ctx) @@ -5778,17 +5730,17 @@ def columnArgList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 767 + self.state = 760 self.columnArgExpr() - self.state = 772 + self.state = 765 self._errHandler.sync(self) _la = self._input.LA(1) while _la==206: - self.state = 768 + self.state = 761 self.match(HogQLParser.COMMA) - self.state = 769 + self.state = 762 self.columnArgExpr() - self.state = 774 + self.state = 767 self._errHandler.sync(self) _la = self._input.LA(1) @@ -5833,18 +5785,18 @@ def columnArgExpr(self): localctx = HogQLParser.ColumnArgExprContext(self, self._ctx, self.state) self.enterRule(localctx, 78, self.RULE_columnArgExpr) try: - self.state = 777 + self.state = 770 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,93,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 775 + self.state = 768 self.columnLambdaExpr() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 776 + self.state = 769 self.columnExpr(0) pass @@ -5910,41 +5862,41 @@ def columnLambdaExpr(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 798 + self.state = 791 self._errHandler.sync(self) token = self._input.LA(1) if token in [220]: - self.state = 779 + self.state = 772 self.match(HogQLParser.LPAREN) - self.state = 780 + self.state = 773 self.identifier() - self.state = 785 + self.state = 778 self._errHandler.sync(self) _la = self._input.LA(1) while _la==206: - self.state = 781 + self.state = 774 self.match(HogQLParser.COMMA) - self.state = 782 + self.state = 775 self.identifier() - self.state = 787 + self.state = 780 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 788 + self.state = 781 self.match(HogQLParser.RPAREN) pass elif token in [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 195]: - self.state = 790 + self.state = 783 self.identifier() - self.state = 795 + self.state = 788 self._errHandler.sync(self) _la = self._input.LA(1) while _la==206: - self.state = 791 + self.state = 784 self.match(HogQLParser.COMMA) - self.state = 792 + self.state = 785 self.identifier() - self.state = 797 + self.state = 790 self._errHandler.sync(self) _la = self._input.LA(1) @@ -5952,9 +5904,9 @@ def columnLambdaExpr(self): else: raise NoViableAltException(self) - self.state = 800 + self.state = 793 self.match(HogQLParser.ARROW) - self.state = 801 + self.state = 794 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -6059,66 +6011,66 @@ def hogqlxTagElement(self): self.enterRule(localctx, 82, self.RULE_hogqlxTagElement) self._la = 0 # Token type try: - self.state = 831 + self.state = 824 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,100,self._ctx) if la_ == 1: localctx = HogQLParser.HogqlxTagElementClosedContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 803 + self.state = 796 self.match(HogQLParser.LT) - self.state = 804 + self.state = 797 self.identifier() - self.state = 808 + self.state = 801 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -5066549581053953) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or _la==192 or _la==195: - self.state = 805 + self.state = 798 self.hogqlxTagAttribute() - self.state = 810 + self.state = 803 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 811 + self.state = 804 self.match(HogQLParser.SLASH) - self.state = 812 + self.state = 805 self.match(HogQLParser.GT) pass elif la_ == 2: localctx = HogQLParser.HogqlxTagElementNestedContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 814 + self.state = 807 self.match(HogQLParser.LT) - self.state = 815 + self.state = 808 self.identifier() - self.state = 819 + self.state = 812 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -5066549581053953) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or _la==192 or _la==195: - self.state = 816 + self.state = 809 self.hogqlxTagAttribute() - self.state = 821 + self.state = 814 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 822 + self.state = 815 self.match(HogQLParser.GT) - self.state = 824 + self.state = 817 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,99,self._ctx) if la_ == 1: - self.state = 823 + self.state = 816 self.hogqlxTagElement() - self.state = 826 + self.state = 819 self.match(HogQLParser.LT) - self.state = 827 + self.state = 820 self.match(HogQLParser.SLASH) - self.state = 828 + self.state = 821 self.identifier() - self.state = 829 + self.state = 822 self.match(HogQLParser.GT) pass @@ -6176,36 +6128,36 @@ def hogqlxTagAttribute(self): localctx = HogQLParser.HogqlxTagAttributeContext(self, self._ctx, self.state) self.enterRule(localctx, 84, self.RULE_hogqlxTagAttribute) try: - self.state = 844 + self.state = 837 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,101,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 833 + self.state = 826 self.identifier() - self.state = 834 + self.state = 827 self.match(HogQLParser.EQ_SINGLE) - self.state = 835 + self.state = 828 self.match(HogQLParser.STRING_LITERAL) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 837 + self.state = 830 self.identifier() - self.state = 838 + self.state = 831 self.match(HogQLParser.EQ_SINGLE) - self.state = 839 + self.state = 832 self.match(HogQLParser.LBRACE) - self.state = 840 + self.state = 833 self.columnExpr(0) - self.state = 841 + self.state = 834 self.match(HogQLParser.RBRACE) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 843 + self.state = 836 self.identifier() pass @@ -6258,17 +6210,17 @@ def withExprList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 846 + self.state = 839 self.withExpr() - self.state = 851 + self.state = 844 self._errHandler.sync(self) _la = self._input.LA(1) while _la==206: - self.state = 847 + self.state = 840 self.match(HogQLParser.COMMA) - self.state = 848 + self.state = 841 self.withExpr() - self.state = 853 + self.state = 846 self._errHandler.sync(self) _la = self._input.LA(1) @@ -6352,32 +6304,32 @@ def withExpr(self): localctx = HogQLParser.WithExprContext(self, self._ctx, self.state) self.enterRule(localctx, 88, self.RULE_withExpr) try: - self.state = 864 + self.state = 857 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,103,self._ctx) if la_ == 1: localctx = HogQLParser.WithExprSubqueryContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 854 + self.state = 847 self.identifier() - self.state = 855 + self.state = 848 self.match(HogQLParser.AS) - self.state = 856 + self.state = 849 self.match(HogQLParser.LPAREN) - self.state = 857 + self.state = 850 self.selectUnionStmt() - self.state = 858 + self.state = 851 self.match(HogQLParser.RPAREN) pass elif la_ == 2: localctx = HogQLParser.WithExprColumnContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 860 + self.state = 853 self.columnExpr(0) - self.state = 861 + self.state = 854 self.match(HogQLParser.AS) - self.state = 862 + self.state = 855 self.identifier() pass @@ -6430,27 +6382,27 @@ def columnIdentifier(self): localctx = HogQLParser.ColumnIdentifierContext(self, self._ctx, self.state) self.enterRule(localctx, 90, self.RULE_columnIdentifier) try: - self.state = 873 + self.state = 866 self._errHandler.sync(self) token = self._input.LA(1) if token in [218]: self.enterOuterAlt(localctx, 1) - self.state = 866 + self.state = 859 self.placeholder() pass elif token in [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 195]: self.enterOuterAlt(localctx, 2) - self.state = 870 + self.state = 863 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,104,self._ctx) if la_ == 1: - self.state = 867 + self.state = 860 self.tableIdentifier() - self.state = 868 + self.state = 861 self.match(HogQLParser.DOT) - self.state = 872 + self.state = 865 self.nestedIdentifier() pass else: @@ -6503,18 +6455,18 @@ def nestedIdentifier(self): self.enterRule(localctx, 92, self.RULE_nestedIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 875 + self.state = 868 self.identifier() - self.state = 880 + self.state = 873 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,106,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 876 + self.state = 869 self.match(HogQLParser.DOT) - self.state = 877 + self.state = 870 self.identifier() - self.state = 882 + self.state = 875 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,106,self._ctx) @@ -6667,7 +6619,7 @@ def tableExpr(self, _p:int=0): self.enterRecursionRule(localctx, 94, self.RULE_tableExpr, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 892 + self.state = 885 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,107,self._ctx) if la_ == 1: @@ -6675,7 +6627,7 @@ def tableExpr(self, _p:int=0): self._ctx = localctx _prevctx = localctx - self.state = 884 + self.state = 877 self.tableIdentifier() pass @@ -6683,7 +6635,7 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 885 + self.state = 878 self.tableFunctionExpr() pass @@ -6691,11 +6643,11 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprSubqueryContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 886 + self.state = 879 self.match(HogQLParser.LPAREN) - self.state = 887 + self.state = 880 self.selectUnionStmt() - self.state = 888 + self.state = 881 self.match(HogQLParser.RPAREN) pass @@ -6703,7 +6655,7 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprTagContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 890 + self.state = 883 self.hogqlxTagElement() pass @@ -6711,13 +6663,13 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprPlaceholderContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 891 + self.state = 884 self.placeholder() pass self._ctx.stop = self._input.LT(-1) - self.state = 902 + self.state = 895 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,109,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -6727,27 +6679,27 @@ def tableExpr(self, _p:int=0): _prevctx = localctx localctx = HogQLParser.TableExprAliasContext(self, HogQLParser.TableExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_tableExpr) - self.state = 894 + self.state = 887 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 898 + self.state = 891 self._errHandler.sync(self) token = self._input.LA(1) if token in [36, 62, 77, 91, 195]: - self.state = 895 + self.state = 888 self.alias() pass elif token in [10]: - self.state = 896 + self.state = 889 self.match(HogQLParser.AS) - self.state = 897 + self.state = 890 self.identifier() pass else: raise NoViableAltException(self) - self.state = 904 + self.state = 897 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,109,self._ctx) @@ -6800,19 +6752,19 @@ def tableFunctionExpr(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 905 + self.state = 898 self.identifier() - self.state = 906 + self.state = 899 self.match(HogQLParser.LPAREN) - self.state = 908 + self.state = 901 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -33554436) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & -1) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & -9) != 0) or ((((_la - 192)) & ~0x3f) == 0 and ((1 << (_la - 192)) & 70263309817) != 0): - self.state = 907 + self.state = 900 self.tableArgList() - self.state = 910 + self.state = 903 self.match(HogQLParser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -6859,17 +6811,17 @@ def tableIdentifier(self): self.enterRule(localctx, 98, self.RULE_tableIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 915 + self.state = 908 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,111,self._ctx) if la_ == 1: - self.state = 912 + self.state = 905 self.databaseIdentifier() - self.state = 913 + self.state = 906 self.match(HogQLParser.DOT) - self.state = 917 + self.state = 910 self.identifier() except RecognitionException as re: localctx.exception = re @@ -6919,17 +6871,17 @@ def tableArgList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 919 + self.state = 912 self.columnExpr(0) - self.state = 924 + self.state = 917 self._errHandler.sync(self) _la = self._input.LA(1) while _la==206: - self.state = 920 + self.state = 913 self.match(HogQLParser.COMMA) - self.state = 921 + self.state = 914 self.columnExpr(0) - self.state = 926 + self.state = 919 self._errHandler.sync(self) _la = self._input.LA(1) @@ -6971,7 +6923,7 @@ def databaseIdentifier(self): self.enterRule(localctx, 102, self.RULE_databaseIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 927 + self.state = 920 self.identifier() except RecognitionException as re: localctx.exception = re @@ -7022,19 +6974,19 @@ def floatingLiteral(self): self.enterRule(localctx, 104, self.RULE_floatingLiteral) self._la = 0 # Token type try: - self.state = 937 + self.state = 930 self._errHandler.sync(self) token = self._input.LA(1) if token in [196]: self.enterOuterAlt(localctx, 1) - self.state = 929 + self.state = 922 self.match(HogQLParser.FLOATING_LITERAL) pass elif token in [210]: self.enterOuterAlt(localctx, 2) - self.state = 930 + self.state = 923 self.match(HogQLParser.DOT) - self.state = 931 + self.state = 924 _la = self._input.LA(1) if not(_la==197 or _la==198): self._errHandler.recoverInline(self) @@ -7044,15 +6996,15 @@ def floatingLiteral(self): pass elif token in [198]: self.enterOuterAlt(localctx, 3) - self.state = 932 + self.state = 925 self.match(HogQLParser.DECIMAL_LITERAL) - self.state = 933 + self.state = 926 self.match(HogQLParser.DOT) - self.state = 935 + self.state = 928 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,113,self._ctx) if la_ == 1: - self.state = 934 + self.state = 927 _la = self._input.LA(1) if not(_la==197 or _la==198): self._errHandler.recoverInline(self) @@ -7125,11 +7077,11 @@ def numberLiteral(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 940 + self.state = 933 self._errHandler.sync(self) _la = self._input.LA(1) if _la==208 or _la==228: - self.state = 939 + self.state = 932 _la = self._input.LA(1) if not(_la==208 or _la==228): self._errHandler.recoverInline(self) @@ -7138,36 +7090,36 @@ def numberLiteral(self): self.consume() - self.state = 948 + self.state = 941 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,116,self._ctx) if la_ == 1: - self.state = 942 + self.state = 935 self.floatingLiteral() pass elif la_ == 2: - self.state = 943 + self.state = 936 self.match(HogQLParser.OCTAL_LITERAL) pass elif la_ == 3: - self.state = 944 + self.state = 937 self.match(HogQLParser.DECIMAL_LITERAL) pass elif la_ == 4: - self.state = 945 + self.state = 938 self.match(HogQLParser.HEXADECIMAL_LITERAL) pass elif la_ == 5: - self.state = 946 + self.state = 939 self.match(HogQLParser.INF) pass elif la_ == 6: - self.state = 947 + self.state = 940 self.match(HogQLParser.NAN_SQL) pass @@ -7215,22 +7167,22 @@ def literal(self): localctx = HogQLParser.LiteralContext(self, self._ctx, self.state) self.enterRule(localctx, 108, self.RULE_literal) try: - self.state = 953 + self.state = 946 self._errHandler.sync(self) token = self._input.LA(1) if token in [82, 113, 196, 197, 198, 199, 208, 210, 228]: self.enterOuterAlt(localctx, 1) - self.state = 950 + self.state = 943 self.numberLiteral() pass elif token in [200]: self.enterOuterAlt(localctx, 2) - self.state = 951 + self.state = 944 self.match(HogQLParser.STRING_LITERAL) pass elif token in [116]: self.enterOuterAlt(localctx, 3) - self.state = 952 + self.state = 945 self.match(HogQLParser.NULL_SQL) pass else: @@ -7295,7 +7247,7 @@ def interval(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 955 + self.state = 948 _la = self._input.LA(1) if not(_la==37 or ((((_la - 76)) & ~0x3f) == 0 and ((1 << (_la - 76)) & 72057615512764417) != 0) or ((((_la - 145)) & ~0x3f) == 0 and ((1 << (_la - 145)) & 36283883716609) != 0)): self._errHandler.recoverInline(self) @@ -7871,7 +7823,7 @@ def keyword(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 957 + self.state = 950 _la = self._input.LA(1) if not(((((_la - 2)) & ~0x3f) == 0 and ((1 << (_la - 2)) & -34368126977) != 0) or ((((_la - 66)) & ~0x3f) == 0 and ((1 << (_la - 66)) & -1288627627820033) != 0) or ((((_la - 130)) & ~0x3f) == 0 and ((1 << (_la - 130)) & 8034421735228932089) != 0)): self._errHandler.recoverInline(self) @@ -7925,7 +7877,7 @@ def keywordForAlias(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 959 + self.state = 952 _la = self._input.LA(1) if not(((((_la - 36)) & ~0x3f) == 0 and ((1 << (_la - 36)) & 36030996109328385) != 0)): self._errHandler.recoverInline(self) @@ -7972,17 +7924,17 @@ def alias(self): localctx = HogQLParser.AliasContext(self, self._ctx, self.state) self.enterRule(localctx, 116, self.RULE_alias) try: - self.state = 963 + self.state = 956 self._errHandler.sync(self) token = self._input.LA(1) if token in [195]: self.enterOuterAlt(localctx, 1) - self.state = 961 + self.state = 954 self.match(HogQLParser.IDENTIFIER) pass elif token in [36, 62, 77, 91]: self.enterOuterAlt(localctx, 2) - self.state = 962 + self.state = 955 self.keywordForAlias() pass else: @@ -8032,22 +7984,22 @@ def identifier(self): localctx = HogQLParser.IdentifierContext(self, self._ctx, self.state) self.enterRule(localctx, 118, self.RULE_identifier) try: - self.state = 968 + self.state = 961 self._errHandler.sync(self) token = self._input.LA(1) if token in [195]: self.enterOuterAlt(localctx, 1) - self.state = 965 + self.state = 958 self.match(HogQLParser.IDENTIFIER) pass elif token in [37, 76, 108, 110, 132, 145, 185, 190]: self.enterOuterAlt(localctx, 2) - self.state = 966 + self.state = 959 self.interval() pass elif token in [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 111, 112, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 186, 187, 188, 189, 191, 192]: self.enterOuterAlt(localctx, 3) - self.state = 967 + self.state = 960 self.keyword() pass else: @@ -8097,11 +8049,11 @@ def enumValue(self): self.enterRule(localctx, 120, self.RULE_enumValue) try: self.enterOuterAlt(localctx, 1) - self.state = 970 + self.state = 963 self.match(HogQLParser.STRING_LITERAL) - self.state = 971 + self.state = 964 self.match(HogQLParser.EQ_SINGLE) - self.state = 972 + self.state = 965 self.numberLiteral() except RecognitionException as re: localctx.exception = re @@ -8147,11 +8099,11 @@ def placeholder(self): self.enterRule(localctx, 122, self.RULE_placeholder) try: self.enterOuterAlt(localctx, 1) - self.state = 974 + self.state = 967 self.match(HogQLParser.LBRACE) - self.state = 975 + self.state = 968 self.identifier() - self.state = 976 + self.state = 969 self.match(HogQLParser.RBRACE) except RecognitionException as re: localctx.exception = re diff --git a/posthog/hogql/grammar/HogQLParserVisitor.py b/posthog/hogql/grammar/HogQLParserVisitor.py index 389da7e99385c..66a36b5bdb4e9 100644 --- a/posthog/hogql/grammar/HogQLParserVisitor.py +++ b/posthog/hogql/grammar/HogQLParserVisitor.py @@ -254,11 +254,6 @@ def visitColumnExprAlias(self, ctx:HogQLParser.ColumnExprAliasContext): return self.visitChildren(ctx) - # Visit a parse tree produced by HogQLParser#ColumnExprExtract. - def visitColumnExprExtract(self, ctx:HogQLParser.ColumnExprExtractContext): - return self.visitChildren(ctx) - - # Visit a parse tree produced by HogQLParser#ColumnExprNegate. def visitColumnExprNegate(self, ctx:HogQLParser.ColumnExprNegateContext): return self.visitChildren(ctx) diff --git a/posthog/hogql/modifiers.py b/posthog/hogql/modifiers.py index 3c6fa11a9fc3b..8452016dc1411 100644 --- a/posthog/hogql/modifiers.py +++ b/posthog/hogql/modifiers.py @@ -1,7 +1,12 @@ from typing import Optional, TYPE_CHECKING -from posthog.schema import HogQLQueryModifiers, InCohortVia, MaterializationMode, PersonsArgMaxVersion -from posthog.utils import PersonOnEventsMode +from posthog.schema import ( + HogQLQueryModifiers, + InCohortVia, + MaterializationMode, + PersonsArgMaxVersion, + PersonsOnEventsMode, +) if TYPE_CHECKING: from posthog.models import Team @@ -16,7 +21,10 @@ def create_default_modifiers_for_team( modifiers = modifiers.model_copy() if modifiers.personsOnEventsMode is None: - modifiers.personsOnEventsMode = team.person_on_events_mode or PersonOnEventsMode.DISABLED + if team.person_on_events_v3_querying_enabled: + modifiers.personsOnEventsMode = PersonsOnEventsMode.v3_enabled + else: + modifiers.personsOnEventsMode = team.person_on_events_mode if modifiers.personsArgMaxVersion is None: modifiers.personsArgMaxVersion = PersonsArgMaxVersion.auto diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py index 399f6953698cd..07fa55228d671 100644 --- a/posthog/hogql/parser.py +++ b/posthog/hogql/parser.py @@ -499,9 +499,6 @@ def visitColumnExprAlias(self, ctx: HogQLParser.ColumnExprAliasContext): return ast.Alias(expr=expr, alias=alias) - def visitColumnExprExtract(self, ctx: HogQLParser.ColumnExprExtractContext): - raise NotImplementedException(f"Unsupported node: ColumnExprExtract") - def visitColumnExprNegate(self, ctx: HogQLParser.ColumnExprNegateContext): return ast.ArithmeticOperation( op=ast.ArithmeticOperationOp.Sub, diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index db6523cfa4078..98d3bdc4bc8a5 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -822,11 +822,21 @@ def visit_call(self, node: ast.Call): break # Found an overload matching the first function org if func_meta.tz_aware: - if (relevant_clickhouse_name == "now64" and len(node.args) == 0) or ( - relevant_clickhouse_name == "parseDateTime64BestEffortOrNull" and len(node.args) == 1 + has_tz_override = len(node.args) == func_meta.max_args + + if not has_tz_override: + args.append(self.visit(ast.Constant(value=self._get_timezone()))) + + if ( + relevant_clickhouse_name == "now64" + and (len(node.args) == 0 or (has_tz_override and len(node.args) == 1)) + ) or ( + relevant_clickhouse_name == "parseDateTime64BestEffortOrNull" + and (len(node.args) == 1 or (has_tz_override and len(node.args) == 2)) ): - args.append("6") # These two CH functions require the precision argument before timezone - args.append(self.visit(ast.Constant(value=self._get_timezone()))) + # These two CH functions require a precision argument before timezone + args = args[:-1] + ["6"] + args[-1:] + if node.name == "toStartOfWeek" and len(node.args) == 1: # If week mode hasn't been specified, use the project's default. # For Monday-based weeks mode 3 is used (which is ISO 8601), for Sunday-based mode 0 (CH default) diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index ba9f92443b4e8..98019cdaa54b7 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -147,10 +147,7 @@ def property_to_expr( value = property.value if property.type == "person" and scope != "person": - if property.table: - chain = ["person", property.table] - else: - chain = ["person", "properties"] + chain = ["person", "properties"] elif property.type == "group": chain = [f"group_{property.group_type_index}", "properties"] elif property.type == "data_warehouse": diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index e7bc3f7984205..f47c14c5cef86 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -28,6 +28,7 @@ def execute_hogql_query( query: Union[str, ast.SelectQuery, ast.SelectUnionQuery], team: Team, + *, query_type: str = "hogql_query", filters: Optional[HogQLFilters] = None, placeholders: Optional[Dict[str, ast.Expr]] = None, @@ -147,6 +148,7 @@ def execute_hogql_query( has_joins="JOIN" in clickhouse_sql, has_json_operations="JSONExtract" in clickhouse_sql or "JSONHas" in clickhouse_sql, timings=timings_dict, + modifiers={k: v for k, v in modifiers.model_dump().items() if v is not None} if modifiers else {}, ) error = None diff --git a/posthog/hogql/test/_test_parser.py b/posthog/hogql/test/_test_parser.py index 5a4c45b533647..61c90d031ffc3 100644 --- a/posthog/hogql/test/_test_parser.py +++ b/posthog/hogql/test/_test_parser.py @@ -1594,4 +1594,17 @@ def test_visit_hogqlx_tag_source(self): ], ) + def test_select_extract_as_function(self): + node = self._select("select extract('string', 'other string') from events") + + assert node == ast.SelectQuery( + select=[ + ast.Call( + name="extract", + args=[ast.Constant(value="string"), ast.Constant(value="other string")], + ) + ], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), + ) + return TestParser diff --git a/posthog/hogql/test/test_bytecode.py b/posthog/hogql/test/test_bytecode.py index cf0b8113b574d..f7d810700e74a 100644 --- a/posthog/hogql/test/test_bytecode.py +++ b/posthog/hogql/test/test_bytecode.py @@ -130,7 +130,7 @@ def test_bytecode_create(self): def test_bytecode_create_error(self): with self.assertRaises(NotImplementedException) as e: to_bytecode("(select 1)") - self.assertEqual(str(e.exception), "Visitor has no method visit_select_query") + self.assertEqual(str(e.exception), "BytecodeBuilder has no method visit_select_query") with self.assertRaises(NotImplementedException) as e: to_bytecode("1 in cohort 2") diff --git a/posthog/hogql/test/test_modifiers.py b/posthog/hogql/test/test_modifiers.py index eba1f5195ab3d..a33d57575e2f4 100644 --- a/posthog/hogql/test/test_modifiers.py +++ b/posthog/hogql/test/test_modifiers.py @@ -7,7 +7,11 @@ class TestModifiers(BaseTest): - @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) + @override_settings( + PERSON_ON_EVENTS_OVERRIDE=False, + PERSON_ON_EVENTS_V2_OVERRIDE=False, + PERSON_ON_EVENTS_V3_OVERRIDE=False, + ) def test_create_default_modifiers_for_team_init(self): assert self.team.person_on_events_mode == "disabled" modifiers = create_default_modifiers_for_team(self.team) @@ -23,6 +27,9 @@ def test_create_default_modifiers_for_team_init(self): ) assert modifiers.personsOnEventsMode == PersonsOnEventsMode.v2_enabled + with override_settings(PERSON_ON_EVENTS_V3_OVERRIDE=True): + assert create_default_modifiers_for_team(self.team).personsOnEventsMode == PersonsOnEventsMode.v3_enabled + def test_modifiers_persons_on_events_mode_v1_enabled(self): query = "SELECT event, person_id FROM events" @@ -74,6 +81,13 @@ def test_modifiers_persons_on_events_mode_mapping(self): "events.person_properties AS properties", "toTimeZone(events.person_created_at, %(hogql_val_1)s) AS created_at", ), + ( + PersonsOnEventsMode.v3_enabled, + "events.event AS event", + "if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id) AS id", + "events.person_properties AS properties", + "toTimeZone(events.person_created_at, %(hogql_val_0)s) AS created_at", + ), ] for mode, *expected in test_cases: diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 9bbf4c40aff98..1f3b2db78e7ed 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -1524,3 +1524,37 @@ def test_lookup_organic_medium_type(self): ), printed, ) + + def test_override_timezone(self): + context = HogQLContext( + team_id=self.team.pk, + enable_select_queries=True, + database=Database(None, WeekStartDay.SUNDAY), + ) + context.database.events.fields["test_date"] = DateDatabaseField(name="test_date") # type: ignore + + self.assertEqual( + self._select( + """ + SELECT + toDateTime(timestamp), + toDateTime(timestamp, 'US/Pacific'), + now(), + now('US/Pacific') + FROM events + """, + context, + ), + f"SELECT toDateTime(toTimeZone(events.timestamp, %(hogql_val_0)s), %(hogql_val_1)s), toDateTime(toTimeZone(events.timestamp, %(hogql_val_2)s), %(hogql_val_3)s), now64(6, %(hogql_val_4)s), now64(6, %(hogql_val_5)s) FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", + ) + self.assertEqual( + context.values, + { + "hogql_val_0": "UTC", + "hogql_val_1": "UTC", + "hogql_val_2": "UTC", + "hogql_val_3": "US/Pacific", + "hogql_val_4": "UTC", + "hogql_val_5": "US/Pacific", + }, + ) diff --git a/posthog/hogql/test/test_visitor.py b/posthog/hogql/test/test_visitor.py index 8aa6689328fbf..a01193f788d5f 100644 --- a/posthog/hogql/test/test_visitor.py +++ b/posthog/hogql/test/test_visitor.py @@ -125,7 +125,7 @@ def visit_arithmetic_operation(self, node: ast.ArithmeticOperation): with self.assertRaises(HogQLException) as e: UnknownNotDefinedVisitor().visit(parse_expr("1 + 3 / 'asd2'")) - self.assertEqual(str(e.exception), "Visitor has no method visit_constant") + self.assertEqual(str(e.exception), "UnknownNotDefinedVisitor has no method visit_constant") def test_hogql_exception_start_end(self): class EternalVisitor(TraversingVisitor): diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index bdbb322d54397..df8ce6962259c 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -309,7 +309,7 @@ def create_override(table_name: str, field_chain: List[str | int]) -> None: # For all the collected tables, create the subqueries, and add them to the table. for table_name, table_to_add in tables_to_add.items(): - subquery = table_to_add.lazy_table.lazy_select(table_to_add.fields_accessed, self.context.modifiers) + subquery = table_to_add.lazy_table.lazy_select(table_to_add.fields_accessed, self.context, node=node) subquery = cast(ast.SelectQuery, clone_expr(subquery, clear_locations=True)) subquery = cast(ast.SelectQuery, resolve_types(subquery, self.context, self.dialect, [node.type])) old_table_type = select_type.tables[table_name] diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr index 9ff7f8ee0ab49..e0f5ea847110d 100644 --- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr +++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr @@ -31,7 +31,7 @@ FROM events LEFT JOIN ( SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id FROM person_static_cohort - WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [12]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) + WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [11]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 @@ -42,7 +42,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [12])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [11])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' @@ -55,7 +55,7 @@ FROM events LEFT JOIN ( SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id FROM person_static_cohort - WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [13]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) + WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [12]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 @@ -66,7 +66,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [13])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [12])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' diff --git a/posthog/hogql/visitor.py b/posthog/hogql/visitor.py index c11856169297f..2bf968abf2ab0 100644 --- a/posthog/hogql/visitor.py +++ b/posthog/hogql/visitor.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, TypeVar, Generic, Any from posthog.hogql import ast from posthog.hogql.base import AST, Expr @@ -14,8 +14,11 @@ def clear_locations(expr: Expr) -> Expr: return CloningVisitor(clear_locations=True).visit(expr) -class Visitor(object): - def visit(self, node: AST): +T = TypeVar("T") + + +class Visitor(Generic[T]): + def visit(self, node: AST) -> T: if node is None: return node @@ -28,7 +31,7 @@ def visit(self, node: AST): raise e -class TraversingVisitor(Visitor): +class TraversingVisitor(Visitor[None]): """Visitor that traverses the AST tree without returning anything""" def visit_expr(self, node: Expr): @@ -258,7 +261,7 @@ def visit_hogqlx_attribute(self, node: ast.HogQLXAttribute): self.visit(node.value) -class CloningVisitor(Visitor): +class CloningVisitor(Visitor[Any]): """Visitor that traverses and clones the AST tree. Clears types.""" def __init__( diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index acc404ac9519a..7f30ae435803c 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -45,7 +45,7 @@ def to_actors_query(self) -> ast.SelectQuery: def calculate(self) -> HogQLQueryResponse: query = self.to_query() paginator = None - if not query.limit: + if isinstance(query, ast.SelectQuery) and not query.limit: paginator = HogQLHasMorePaginator.from_limit_context(limit_context=self.limit_context) func = cast( Callable[..., HogQLQueryResponse], diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index ef8782fade54a..4e97d79b94534 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -284,6 +284,7 @@ def _get_breakdown_expr(self) -> ast.Expr: properties_column = f"group_{breakdownFilter.breakdown_group_type_index}.properties" return get_breakdown_expr(breakdown, properties_column) elif breakdownType == "hogql": + assert isinstance(breakdown, list) return ast.Alias( alias="value", expr=ast.Array(exprs=[parse_expr(str(value)) for value in breakdown]), @@ -530,6 +531,7 @@ def _add_breakdown_attribution_subquery(self, inner_query: ast.SelectQuery) -> a # so just select that. Except for the empty case, where we select the default. if self._query_has_array_breakdown(): + assert isinstance(breakdown, list) default_breakdown_value = f"""[{','.join(["''" for _ in range(len(breakdown or []))])}]""" # default is [''] when dealing with a single breakdown array, otherwise ['', '', ...., ''] breakdown_selector = parse_expr( @@ -613,7 +615,7 @@ def _build_step_query( event_expr = ast.Constant(value=True) else: # event - event_expr = parse_expr(f"event = '{entity.event}'") + event_expr = parse_expr("event = {event}", {"event": ast.Constant(value=entity.event)}) if entity.properties is not None and entity.properties != []: # add property filters @@ -657,11 +659,15 @@ def _get_funnel_person_step_condition(self) -> ast.Expr: raise ValueError("Missing both funnelStep and funnelCustomSteps") if funnelStepBreakdown is not None: - breakdown_prop_value = funnelStepBreakdown - if isinstance(breakdown_prop_value, int) and breakdownType != "cohort": - breakdown_prop_value = str(breakdown_prop_value) + if isinstance(funnelStepBreakdown, int) and breakdownType != "cohort": + funnelStepBreakdown = str(funnelStepBreakdown) - conditions.append(parse_expr(f"arrayFlatten(array(prop)) = arrayFlatten(array({breakdown_prop_value}))")) + conditions.append( + parse_expr( + "arrayFlatten(array(prop)) = arrayFlatten(array({funnelStepBreakdown}))", + {"funnelStepBreakdown": ast.Constant(value=funnelStepBreakdown)}, + ) + ) return ast.And(exprs=conditions) @@ -898,7 +904,12 @@ def _get_breakdown_prop_expr(self, group_remaining=False) -> List[ast.Expr]: BreakdownType.group, ]: breakdown_values = self._get_breakdown_conditions() - return [parse_expr(f"if(has({breakdown_values}, prop), prop, {other_aggregation}) as prop")] + return [ + parse_expr( + f"if(has({{breakdown_values}}, prop), prop, {other_aggregation}) as prop", + {"breakdown_values": ast.Constant(value=breakdown_values)}, + ) + ] else: # Cohorts don't have "Other" aggregation return [ast.Field(chain=["prop"])] diff --git a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py index 2fef78a372324..bcb362ff3d4f9 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py @@ -236,6 +236,7 @@ def _calculate(self) -> tuple[List[EventOddsRatio], bool, str, HogQLQueryRespons team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) assert response.results diff --git a/posthog/hogql_queries/insights/funnels/funnel_query_context.py b/posthog/hogql_queries/insights/funnels/funnel_query_context.py index 66a0d28ad3d7f..3b777e3ff8026 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_query_context.py +++ b/posthog/hogql_queries/insights/funnels/funnel_query_context.py @@ -25,7 +25,7 @@ class FunnelQueryContext(QueryContext): interval: IntervalType - breakdown: List[Union[str, int]] | None + breakdown: List[Union[str, int]] | str | int | None breakdownType: BreakdownType breakdownAttributionType: BreakdownAttributionType diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends.py b/posthog/hogql_queries/insights/funnels/funnel_trends.py index 5c370512a20e8..9d486f1b06196 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends.py @@ -203,7 +203,16 @@ def get_query(self) -> ast.SelectQuery: [ ast.Alias( alias="breakdown_value", - expr=ast.Array(exprs=[parse_expr(str(value)) for value in self.breakdown_values]), + expr=ast.Array( + exprs=[ + ( + ast.Array(exprs=[ast.Constant(value=sub_value) for sub_value in value]) + if isinstance(value, list) + else ast.Constant(value=value) + ) + for value in self.breakdown_values + ] + ), hidden=False, ) ] diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index 38e04603f2725..b1ca8dfd6dd54 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -92,6 +92,7 @@ def calculate(self): team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) results = self.funnel_class._format_results(response.results) diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index 98f4d060fb905..89382bebfb994 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -18,7 +18,14 @@ from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.property_definition import PropertyDefinition from posthog.queries.funnels import ClickhouseFunnelActors -from posthog.schema import ActorsQuery, EventsNode, FunnelsActorsQuery, FunnelsQuery +from posthog.schema import ( + ActorsQuery, + BreakdownFilter, + DateRange, + EventsNode, + FunnelsActorsQuery, + FunnelsQuery, +) from posthog.test.base import ( APIBaseTest, BaseTest, @@ -3576,6 +3583,72 @@ def test_funnel_window_ignores_dst_transition(self): self.assertEqual(results[1]["average_conversion_time"], 1_207_020) self.assertEqual(results[1]["median_conversion_time"], 1_207_020) + def test_parses_breakdowns_correctly(self): + _create_person( + distinct_ids=[f"user_1"], + team=self.team, + ) + + events_by_person = { + "user_1": [ + { + "event": "$pageview", + "timestamp": datetime(2024, 3, 22, 13, 46), + "properties": {"utm_medium": "test''123"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2024, 3, 22, 13, 47), + "properties": {"utm_medium": "test''123"}, + }, + ], + } + journeys_for(events_by_person, self.team) + + query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="$pageview")], + dateRange=DateRange( + date_from="2024-03-22", + date_to="2024-03-22", + ), + breakdownFilter=BreakdownFilter(breakdown="utm_medium"), + ) + results = FunnelsQueryRunner(query=query, team=self.team).calculate().results + + self.assertEqual(results[0][1]["breakdown_value"], ["test'123"]) + self.assertEqual(results[0][1]["count"], 1) + + def test_funnel_parses_event_names_correctly(self): + _create_person( + distinct_ids=[f"user_1"], + team=self.team, + ) + + events_by_person = { + "user_1": [ + { + "event": "test''1", + "timestamp": datetime(2024, 3, 22, 13, 46), + }, + { + "event": "test''2", + "timestamp": datetime(2024, 3, 22, 13, 47), + }, + ], + } + journeys_for(events_by_person, self.team) + + query = FunnelsQuery( + series=[EventsNode(event="test'1"), EventsNode()], + dateRange=DateRange( + date_from="2024-03-22", + date_to="2024-03-22", + ), + ) + results = FunnelsQueryRunner(query=query, team=self.team).calculate().results + + self.assertEqual(results[0]["count"], 1) + return TestGetFunnel diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py index 4c342d2f2926c..dec7bdd933b3e 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_persons.py @@ -626,3 +626,45 @@ def test_funnel_person_recordings(self): } ], ) + + def test_parses_step_breakdown_correctly(self): + person1 = _create_person( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"$country": "PL"}, + ) + journeys_for( + { + "person1": [ + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "test''123"}, + }, + { + "event": "play movie", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "test''123"}, + }, + ], + }, + self.team, + create_people=False, + ) + + filters = { + "insight": INSIGHT_FUNNELS, + "date_from": "2020-01-01", + "date_to": "2020-01-08", + "interval": "day", + "funnel_window_days": 7, + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + ], + "breakdown_type": "event", + "breakdown": "$browser", + } + + results = get_actors(filters, self.team, funnelStep=1, funnelStepBreakdown=["test'123"]) + self.assertCountEqual([results[0][0]], [person1.uuid]) diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 6ca333b036f14..f9c7b107074de 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -1387,3 +1387,43 @@ def test_trend_for_hour_based_conversion_window(self): results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results conversion_rates = [row["conversion_rate"] for row in results] self.assertEqual(conversion_rates, [50.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + + def test_parses_breakdown_correctly(self): + journeys_for( + { + "user_one": [ + { + "event": "step one", + "timestamp": datetime(2021, 5, 1), + "properties": {"$browser": "test''123"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 3), + "properties": {"$browser": "test''123"}, + }, + ], + }, + self.team, + ) + + filters = { + "insight": INSIGHT_FUNNELS, + "funnel_viz_type": "trends", + "display": TRENDS_LINEAR, + "interval": "day", + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-13 23:59:59", + "funnel_window_days": 7, + "events": [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + ], + "breakdown_type": "event", + "breakdown": "$browser", + } + + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team).calculate().results + + self.assertEqual(len(results), 1) diff --git a/posthog/hogql_queries/insights/funnels/utils.py b/posthog/hogql_queries/insights/funnels/utils.py index 47c1487e5fbcc..cdccce0251a33 100644 --- a/posthog/hogql_queries/insights/funnels/utils.py +++ b/posthog/hogql_queries/insights/funnels/utils.py @@ -61,23 +61,26 @@ def funnel_window_interval_unit_to_sql( def get_breakdown_expr( - breakdown: List[str | int] | None, properties_column: str, normalize_url: bool | None = False + breakdowns: List[str | int] | str | int, properties_column: str, normalize_url: bool | None = False ) -> ast.Expr: - if isinstance(breakdown, str) or isinstance(breakdown, int) or breakdown is None: - return parse_expr(f"ifNull({properties_column}.\"{breakdown}\", '')") + if isinstance(breakdowns, str) or isinstance(breakdowns, int) or breakdowns is None: + return ast.Call( + name="ifNull", args=[ast.Field(chain=[*properties_column.split("."), breakdowns]), ast.Constant(value="")] + ) else: exprs = [] - for b in breakdown: - expr = parse_expr(normalize_url_breakdown(f"ifNull({properties_column}.\"{b}\", '')", normalize_url)) + for breakdown in breakdowns: + expr: ast.Expr = ast.Call( + name="ifNull", + args=[ast.Field(chain=[*properties_column.split("."), breakdown]), ast.Constant(value="")], + ) + if normalize_url: + regex = "[\\\\/?#]*$" + expr = parse_expr( + f"if( empty( replaceRegexpOne({{breakdown_value}}, '{regex}', '') ), '/', replaceRegexpOne({{breakdown_value}}, '{regex}', ''))", + {"breakdown_value": expr}, + ) exprs.append(expr) expression = ast.Array(exprs=exprs) return expression - - -def normalize_url_breakdown(breakdown_value, breakdown_normalize_url: bool | None): - if breakdown_normalize_url: - regex = "[\\\\/?#]*$" - return f"if( empty( replaceRegexpOne({breakdown_value}, '{regex}', '') ), '/', replaceRegexpOne({breakdown_value}, '{regex}', ''))" - - return breakdown_value diff --git a/posthog/hogql_queries/insights/insight_actors_query_runner.py b/posthog/hogql_queries/insights/insight_actors_query_runner.py index 782dd5b054a0e..d58f36cb6f7ee 100644 --- a/posthog/hogql_queries/insights/insight_actors_query_runner.py +++ b/posthog/hogql_queries/insights/insight_actors_query_runner.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import cast +from typing import cast, Optional from posthog.hogql import ast from posthog.hogql.query import execute_hogql_query @@ -37,7 +37,7 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: trends_runner = cast(TrendsQueryRunner, self.source_runner) query = cast(InsightActorsQuery, self.query) return trends_runner.to_actors_query( - time_frame=query.day, + time_frame=cast(Optional[str], query.day), # Other runner accept day as int, but not this one series_index=query.series or 0, breakdown_value=query.breakdown, compare=query.compare, @@ -102,6 +102,7 @@ def calculate(self) -> HogQLQueryResponse: team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) def _is_stale(self, cached_result_package): diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py index 24bbe36f1c6bf..ea883eec542bc 100644 --- a/posthog/hogql_queries/insights/lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py @@ -126,7 +126,7 @@ def to_actors_query( def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: return InsightActorsQueryOptionsResponse( - day=[{"label": day, "value": day} for day in self.query_date_range.all_values()], + day=[{"label": format_label_date(value), "value": value} for value in self.query_date_range.all_values()], status=[ { "label": "Dormant", @@ -157,6 +157,7 @@ def calculate(self) -> LifecycleQueryResponse: team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) # TODO: can we move the data conversion part into the query as well? It would make it easier to swap diff --git a/posthog/hogql_queries/insights/paginators.py b/posthog/hogql_queries/insights/paginators.py index 6dbdb1543b929..0dfda79ced617 100644 --- a/posthog/hogql_queries/insights/paginators.py +++ b/posthog/hogql_queries/insights/paginators.py @@ -54,8 +54,9 @@ def trim_results(self) -> list[Any]: def execute_hogql_query( self, - query_type: str, query: ast.SelectQuery, + *, + query_type: str, **kwargs, ) -> HogQLQueryResponse: self.response = cast( diff --git a/posthog/hogql_queries/insights/paths_query_runner.py b/posthog/hogql_queries/insights/paths_query_runner.py index c10a5a2320207..c454feb8e56ac 100644 --- a/posthog/hogql_queries/insights/paths_query_runner.py +++ b/posthog/hogql_queries/insights/paths_query_runner.py @@ -725,6 +725,7 @@ def calculate(self) -> PathsQueryResponse: team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) response.results = self.validate_results(response.results) diff --git a/posthog/hogql_queries/insights/retention_query_runner.py b/posthog/hogql_queries/insights/retention_query_runner.py index 221cb976757d2..3ac2c5b4b5462 100644 --- a/posthog/hogql_queries/insights/retention_query_runner.py +++ b/posthog/hogql_queries/insights/retention_query_runner.py @@ -313,6 +313,7 @@ def calculate(self) -> RetentionQueryResponse: team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) result_dict = { diff --git a/posthog/hogql_queries/insights/stickiness_query_runner.py b/posthog/hogql_queries/insights/stickiness_query_runner.py index d0b4b65c67f9b..184e3c0af02df 100644 --- a/posthog/hogql_queries/insights/stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/stickiness_query_runner.py @@ -212,6 +212,7 @@ def calculate(self): team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) if response.timings is not None: diff --git a/posthog/hogql_queries/insights/test/test_paginators.py b/posthog/hogql_queries/insights/test/test_paginators.py index ac83efb45b353..6698115c46535 100644 --- a/posthog/hogql_queries/insights/test/test_paginators.py +++ b/posthog/hogql_queries/insights/test/test_paginators.py @@ -1,3 +1,5 @@ +from typing import cast +from posthog.hogql.ast import SelectQuery from posthog.hogql.constants import ( LimitContext, get_default_limit_for_context, @@ -136,8 +138,8 @@ def test_response_params_consistency(self): """Test consistency of response_params method.""" paginator = HogQLHasMorePaginator(limit=5, offset=10) paginator.response = paginator.execute_hogql_query( - "test_query", - parse_select("SELECT * FROM persons"), + cast(SelectQuery, parse_select("SELECT * FROM persons")), + query_type="test_query", team=self.team, ) params = paginator.response_params() diff --git a/posthog/hogql_queries/insights/test/test_retention_query_runner.py b/posthog/hogql_queries/insights/test/test_retention_query_runner.py index 30edb32102f76..04c108dd779f1 100644 --- a/posthog/hogql_queries/insights/test/test_retention_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_retention_query_runner.py @@ -1,3 +1,5 @@ +from typing import Optional +from unittest.mock import MagicMock, patch import uuid from datetime import datetime @@ -6,11 +8,14 @@ from django.test import override_settings from rest_framework import status +from posthog.clickhouse.client.execute import sync_execute from posthog.constants import ( RETENTION_FIRST_TIME, TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS, ) +from posthog.hogql.constants import LimitContext +from posthog.hogql.query import INCREASED_MAX_EXECUTION_TIME from posthog.hogql_queries.insights.retention_query_runner import RetentionQueryRunner from posthog.hogql_queries.actors_query_runner import ActorsQueryRunner from posthog.models import Action, ActionStep @@ -1685,10 +1690,10 @@ def test_day_interval_sampled(self): class TestClickhouseRetentionGroupAggregation(ClickhouseTestMixin, APIBaseTest): - def run_query(self, query): + def run_query(self, query, *, limit_context: Optional[LimitContext] = None): if not query.get("retentionFilter"): query["retentionFilter"] = {} - runner = RetentionQueryRunner(team=self.team, query=query) + runner = RetentionQueryRunner(team=self.team, query=query, limit_context=limit_context) return runner.calculate().model_dump()["results"] def run_actors_query(self, interval, query, select=None, actor="person"): @@ -1920,3 +1925,10 @@ def test_groups_aggregating_person_on_events(self): [1], ], ) + + @patch("posthog.hogql.query.sync_execute", wraps=sync_execute) + def test_limit_is_context_aware(self, mock_sync_execute: MagicMock): + self.run_query(query={}, limit_context=LimitContext.QUERY_ASYNC) + + mock_sync_execute.assert_called_once() + self.assertIn(f" max_execution_time={INCREASED_MAX_EXECUTION_TIME},", mock_sync_execute.call_args[0][0]) diff --git a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py index 3de1fb6ce865e..6e25827e6ecba 100644 --- a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py @@ -1,8 +1,12 @@ from dataclasses import dataclass from typing import Dict, List, Optional, Union +from unittest.mock import MagicMock, patch from django.test import override_settings from freezegun import freeze_time +from posthog.clickhouse.client.execute import sync_execute +from posthog.hogql.constants import LimitContext +from posthog.hogql.query import INCREASED_MAX_EXECUTION_TIME from posthog.hogql_queries.insights.stickiness_query_runner import StickinessQueryRunner from posthog.models.action.action import Action from posthog.models.action_step import ActionStep @@ -197,6 +201,7 @@ def _run_query( properties: Optional[StickinessProperties] = None, filters: Optional[StickinessFilter] = None, filter_test_accounts: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, ): query_series: List[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series query_date_from = date_from or self.default_date_from @@ -211,7 +216,7 @@ def _run_query( stickinessFilter=filters, filterTestAccounts=filter_test_accounts, ) - return StickinessQueryRunner(team=self.team, query=query).calculate() + return StickinessQueryRunner(team=self.team, query=query, limit_context=limit_context).calculate() def test_stickiness_runs(self): self._create_test_events() @@ -580,3 +585,10 @@ def test_hogql_aggregations(self): 1, 0, ] + + @patch("posthog.hogql.query.sync_execute", wraps=sync_execute) + def test_limit_is_context_aware(self, mock_sync_execute: MagicMock): + self._run_query(limit_context=LimitContext.QUERY_ASYNC) + + mock_sync_execute.assert_called_once() + self.assertIn(f" max_execution_time={INCREASED_MAX_EXECUTION_TIME},", mock_sync_execute.call_args[0][0]) diff --git a/posthog/hogql_queries/insights/trends/breakdown.py b/posthog/hogql_queries/insights/trends/breakdown.py index 45a3a8421e8d8..1cae30854e5d1 100644 --- a/posthog/hogql_queries/insights/trends/breakdown.py +++ b/posthog/hogql_queries/insights/trends/breakdown.py @@ -3,9 +3,7 @@ from posthog.hogql.parser import parse_expr from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.trends.breakdown_values import ( - BREAKDOWN_NULL_NUMERIC_LABEL, BREAKDOWN_NULL_STRING_LABEL, - BREAKDOWN_OTHER_NUMERIC_LABEL, BREAKDOWN_OTHER_STRING_LABEL, BreakdownValues, ) @@ -19,6 +17,10 @@ from posthog.schema import ActionsNode, EventsNode, DataWarehouseNode, HogQLQueryModifiers, InCohortVia, TrendsQuery +def hogql_to_string(expr: ast.Expr) -> ast.Call: + return ast.Call(name="toString", args=[expr]) + + class Breakdown: query: TrendsQuery team: Team @@ -27,7 +29,7 @@ class Breakdown: timings: HogQLTimings modifiers: HogQLQueryModifiers events_filter: ast.Expr - breakdown_values_override: Optional[List[str | int | float]] + breakdown_values_override: Optional[List[str]] def __init__( self, @@ -38,7 +40,7 @@ def __init__( timings: HogQLTimings, modifiers: HogQLQueryModifiers, events_filter: ast.Expr, - breakdown_values_override: Optional[List[str | int | float]] = None, + breakdown_values_override: Optional[List[str]] = None, ): self.team = team self.query = query @@ -70,19 +72,15 @@ def placeholders(self) -> Dict[str, ast.Expr]: return {"cross_join_breakdown_values": ast.Alias(alias="breakdown_value", expr=values)} - def column_expr(self) -> ast.Expr: + def column_expr(self) -> ast.Alias: if self.is_histogram_breakdown: return ast.Alias(alias="breakdown_value", expr=self._get_breakdown_histogram_multi_if()) - elif self.query.breakdownFilter.breakdown_type == "hogql": - return ast.Alias( - alias="breakdown_value", - expr=parse_expr(self.query.breakdownFilter.breakdown), - ) - elif self.query.breakdownFilter.breakdown_type == "cohort": + + if self.query.breakdownFilter.breakdown_type == "cohort": if self.modifiers.inCohortVia == InCohortVia.leftjoin_conjoined: return ast.Alias( alias="breakdown_value", - expr=ast.Field(chain=["__in_cohort", "cohort_id"]), + expr=hogql_to_string(ast.Field(chain=["__in_cohort", "cohort_id"])), ) cohort_breakdown = ( @@ -90,19 +88,9 @@ def column_expr(self) -> ast.Expr: ) return ast.Alias( alias="breakdown_value", - expr=ast.Constant(value=cohort_breakdown), - ) - - if self.query.breakdownFilter.breakdown_type == "hogql": - return ast.Alias( - alias="breakdown_value", - expr=parse_expr(self.query.breakdownFilter.breakdown), + expr=hogql_to_string(ast.Constant(value=cohort_breakdown)), ) - # If there's no breakdown values - if len(self._breakdown_values) == 1 and self._breakdown_values[0] is None: - return ast.Alias(alias="breakdown_value", expr=ast.Field(chain=self._properties_chain)) - return ast.Alias(alias="breakdown_value", expr=self._get_breakdown_transform_func) def events_where_filter(self) -> ast.Expr | None: @@ -148,29 +136,28 @@ def events_where_filter(self) -> ast.Expr | None: else: left = ast.Field(chain=self._properties_chain) + if not self.is_histogram_breakdown: + left = hogql_to_string(left) + compare_ops = [] for _value in self._breakdown_values: - value: Optional[str | int | float] = _value + value: Optional[str] = str(_value) # non-cohorts are always strings # If the value is one of the "other" values, then use the `transform()` func - if ( - value == BREAKDOWN_OTHER_STRING_LABEL - or value == BREAKDOWN_OTHER_NUMERIC_LABEL - or value == float(BREAKDOWN_OTHER_NUMERIC_LABEL) - ): + if value == BREAKDOWN_OTHER_STRING_LABEL: transform_func = self._get_breakdown_transform_func compare_ops.append( ast.CompareOperation( left=transform_func, op=ast.CompareOperationOp.Eq, right=ast.Constant(value=value) ) ) + elif value == BREAKDOWN_NULL_STRING_LABEL: + compare_ops.append( + ast.CompareOperation(left=left, op=ast.CompareOperationOp.Eq, right=ast.Constant(value=None)) + ) + compare_ops.append( + ast.CompareOperation(left=left, op=ast.CompareOperationOp.Eq, right=ast.Constant(value="")) + ) else: - if ( - value == BREAKDOWN_NULL_STRING_LABEL - or value == BREAKDOWN_NULL_NUMERIC_LABEL - or value == float(BREAKDOWN_NULL_NUMERIC_LABEL) - ): - value = None - compare_ops.append( ast.CompareOperation(left=left, op=ast.CompareOperationOp.Eq, right=ast.Constant(value=value)) ) @@ -184,31 +171,22 @@ def events_where_filter(self) -> ast.Expr | None: @cached_property def _get_breakdown_transform_func(self) -> ast.Call: - values = self._breakdown_values - all_values_are_ints_or_none = all(isinstance(value, int) or value is None for value in values) - all_values_are_floats_or_none = all(isinstance(value, float) or value is None for value in values) - - if all_values_are_ints_or_none: - breakdown_other_value = BREAKDOWN_OTHER_NUMERIC_LABEL - breakdown_null_value = BREAKDOWN_NULL_NUMERIC_LABEL - elif all_values_are_floats_or_none: - breakdown_other_value = float(BREAKDOWN_OTHER_NUMERIC_LABEL) - breakdown_null_value = float(BREAKDOWN_NULL_NUMERIC_LABEL) - else: - breakdown_other_value = BREAKDOWN_OTHER_STRING_LABEL - breakdown_null_value = BREAKDOWN_NULL_STRING_LABEL - - return ast.Call( - name="transform", - args=[ - ast.Call( - name="ifNull", - args=[ast.Field(chain=self._properties_chain), ast.Constant(value=breakdown_null_value)], - ), - self._breakdown_values_ast, - self._breakdown_values_ast, - ast.Constant(value=breakdown_other_value), - ], + if self.query.breakdownFilter.breakdown_type == "hogql": + return self._get_breakdown_values_transform(parse_expr(self.query.breakdownFilter.breakdown)) + return self._get_breakdown_values_transform(ast.Field(chain=self._properties_chain)) + + def _get_breakdown_values_transform(self, node: ast.Expr) -> ast.Call: + return cast( + ast.Call, + parse_expr( + "transform(ifNull(nullIf(toString({node}), ''), {nil}), {values}, {values}, {other})", + placeholders={ + "node": node, + "values": self._breakdown_values_ast, + "nil": ast.Constant(value=BREAKDOWN_NULL_STRING_LABEL), + "other": ast.Constant(value=BREAKDOWN_OTHER_STRING_LABEL), + }, + ), ) @cached_property @@ -220,15 +198,21 @@ def _breakdown_buckets_ast(self) -> ast.Array: return ast.Array(exprs=list(map(lambda v: ast.Constant(value=v), values))) - @cached_property + @property def _breakdown_values_ast(self) -> ast.Array: - return ast.Array(exprs=[ast.Constant(value=v) for v in self._breakdown_values]) + exprs: list[ast.Expr] = [] + for value in self._breakdown_values: + if isinstance(value, str): + exprs.append(ast.Constant(value=value)) + else: + exprs.append(hogql_to_string(ast.Constant(value=value))) + return ast.Array(exprs=exprs) @cached_property - def _all_breakdown_values(self) -> List[str | int | float | None]: + def _all_breakdown_values(self) -> List[str | int | None]: # Used in the actors query if self.breakdown_values_override is not None: - return cast(List[str | int | float | None], self.breakdown_values_override) + return cast(List[str | int | None], self.breakdown_values_override) if self.query.breakdownFilter is None: return [] @@ -243,25 +227,12 @@ def _all_breakdown_values(self) -> List[str | int | float | None]: query_date_range=self.query_date_range, modifiers=self.modifiers, ) - return cast(List[str | int | float | None], breakdown.get_breakdown_values()) + return cast(List[str | int | None], breakdown.get_breakdown_values()) @cached_property - def _breakdown_values(self) -> List[str | int | float]: - values = self._all_breakdown_values - if len(values) == 0 or all(value is None for value in values): - return [] - - if None in values: - all_values_are_ints_or_none = all(isinstance(value, int) or value is None for value in values) - all_values_are_floats_or_none = all(isinstance(value, float) or value is None for value in values) - - if all_values_are_ints_or_none: - values = [v if v is not None else BREAKDOWN_NULL_NUMERIC_LABEL for v in values] - elif all_values_are_floats_or_none: - values = [v if v is not None else float(BREAKDOWN_NULL_NUMERIC_LABEL) for v in values] - else: - values = [v if v is not None else BREAKDOWN_NULL_STRING_LABEL for v in values] - return cast(List[str | int | float], values) + def _breakdown_values(self) -> List[str | int]: + values = [BREAKDOWN_NULL_STRING_LABEL if v is None else v for v in self._all_breakdown_values] + return cast(List[str | int], values) @cached_property def has_breakdown_values(self) -> bool: diff --git a/posthog/hogql_queries/insights/trends/breakdown_values.py b/posthog/hogql_queries/insights/trends/breakdown_values.py index 7b1522d5f25c5..d9ab11891f210 100644 --- a/posthog/hogql_queries/insights/trends/breakdown_values.py +++ b/posthog/hogql_queries/insights/trends/breakdown_values.py @@ -97,6 +97,9 @@ def get_breakdown_values(self) -> List[str | int]: ), ) + if not self.histogram_bin_count: + select_field.expr = ast.Call(name="toString", args=[select_field.expr]) + if self.chart_display_type == ChartDisplayType.WorldMap: breakdown_limit = BREAKDOWN_VALUES_LIMIT_FOR_COUNTRIES else: @@ -211,23 +214,9 @@ def get_breakdown_values(self) -> List[str | int]: # Add "other" value if "other" is not hidden and we're not bucketing numeric values if self.hide_other_aggregation is not True and self.histogram_bin_count is None: - all_values_are_ints_or_none = all(isinstance(value, int) or value is None for value in values) - all_values_are_floats_or_none = all(isinstance(value, float) or value is None for value in values) - all_values_are_string_or_none = all(isinstance(value, str) or value is None for value in values) - - if all_values_are_string_or_none: - values = [BREAKDOWN_NULL_STRING_LABEL if value in (None, "") else value for value in values] - if needs_other: - values.insert(0, BREAKDOWN_OTHER_STRING_LABEL) - elif all_values_are_ints_or_none or all_values_are_floats_or_none: - if all_values_are_ints_or_none: - values = [BREAKDOWN_NULL_NUMERIC_LABEL if value is None else value for value in values] - if needs_other: - values.insert(0, BREAKDOWN_OTHER_NUMERIC_LABEL) - else: - values = [float(BREAKDOWN_NULL_NUMERIC_LABEL) if value is None else value for value in values] - if needs_other: - values.insert(0, float(BREAKDOWN_OTHER_NUMERIC_LABEL)) + values = [BREAKDOWN_NULL_STRING_LABEL if value in (None, "") else value for value in values] + if needs_other: + values = [BREAKDOWN_OTHER_STRING_LABEL] + values if len(values) == 0: values.insert(0, None) diff --git a/posthog/hogql_queries/insights/trends/display.py b/posthog/hogql_queries/insights/trends/display.py index c5823a22a5877..8747544f7241c 100644 --- a/posthog/hogql_queries/insights/trends/display.py +++ b/posthog/hogql_queries/insights/trends/display.py @@ -86,4 +86,5 @@ def _get_cumulative_query(self, inner_query: ast.SelectQuery, breakdown_enabled: ), ], select_from=ast.JoinExpr(table=inner_query), + order_by=[ast.OrderExpr(expr=ast.Field(chain=["day_start"]), order="ASC")], ) diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index f6eb3748afb2b..3d37d683cc26c 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -187,7 +187,7 @@ # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events ''' - SELECT e__group_0.properties___industry AS value, + SELECT toString(e__group_0.properties___industry) AS value, count(e.uuid) AS count FROM events AS e LEFT JOIN @@ -210,7 +210,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -231,7 +231,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(e__group_0.properties___industry, '$$_posthog_breakdown_null_$$'), ['finance', 'technology'], ['finance', 'technology'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__group_0.properties___industry), ''), '$$_posthog_breakdown_null_$$'), ['finance', 'technology'], ['finance', 'technology'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry, @@ -241,7 +241,7 @@ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) GROUP BY groups.group_type_index, groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(e__group_0.properties___industry, 'finance'), 0), ifNull(equals(e__group_0.properties___industry, 'technology'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(e__group_0.properties___industry), 'finance'), 0), ifNull(equals(toString(e__group_0.properties___industry), 'technology'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -287,7 +287,7 @@ # --- # name: TestTrends.test_breakdown_by_group_props_with_person_filter_person_on_events ''' - SELECT e__group_0.properties___industry AS value, + SELECT toString(e__group_0.properties___industry) AS value, count(e.uuid) AS count FROM events AS e LEFT JOIN @@ -310,7 +310,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -331,7 +331,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(e__group_0.properties___industry, '$$_posthog_breakdown_null_$$'), ['finance'], ['finance'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__group_0.properties___industry), ''), '$$_posthog_breakdown_null_$$'), ['finance'], ['finance'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry, @@ -341,7 +341,7 @@ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) GROUP BY groups.group_type_index, groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, 'key'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(e__group_0.properties___industry, 'finance'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, 'key'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(toString(e__group_0.properties___industry), 'finance'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -356,7 +356,7 @@ # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0))) @@ -371,7 +371,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -392,9 +392,9 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['second url'], ['second url'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['second url'], ['second url'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'second url'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), 'second url'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -409,7 +409,7 @@ # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0))) @@ -423,24 +423,38 @@ # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.3 ''' SELECT groupArray(day_start) AS date, - groupArray(count) AS total + groupArray(count) AS total, + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, - day_start AS day_start + day_start AS day_start, + breakdown_value AS breakdown_value FROM (SELECT 0 AS total, - minus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), toIntervalDay(numbers.number)) AS day_start - FROM numbers(coalesce(dateDiff('day', assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), 0)) AS numbers - UNION ALL SELECT 0 AS total, - toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))) AS day_start + ticks.day_start AS day_start, + sec.breakdown_value AS breakdown_value + FROM + (SELECT minus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), toIntervalDay(numbers.number)) AS day_start + FROM numbers(coalesce(dateDiff('day', assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), 0)) AS numbers + UNION ALL SELECT toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))) AS day_start) AS ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['$$_posthog_breakdown_null_$$'] AS breakdown_value) ARRAY + JOIN breakdown_value AS breakdown_value) AS sec + ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, - toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$'], ['$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0)) - GROUP BY day_start) - GROUP BY day_start - ORDER BY day_start ASC) - ORDER BY sum(count) DESC + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Windows'), 0)), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), ''), 0))) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1 @@ -448,7 +462,7 @@ # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')) AS value, count(DISTINCT e__pdi.person_id) AS count FROM events AS e INNER JOIN @@ -480,7 +494,7 @@ CROSS JOIN (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS actor_id, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['val', 'bor'], ['val', 'bor'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['val', 'bor'], ['val', 'bor'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -489,7 +503,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(equals(e.event, '$pageview'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'bor'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) + WHERE and(equals(e.team_id, 2), and(equals(e.event, '$pageview'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'val'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'bor'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) GROUP BY timestamp, actor_id, breakdown_value) AS e WHERE and(ifNull(lessOrEquals(e.timestamp, plus(d.timestamp, toIntervalDay(1))), 0), ifNull(greater(e.timestamp, minus(d.timestamp, toIntervalDay(6))), 0)) @@ -506,7 +520,7 @@ # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated_materialized ''' - SELECT nullIf(nullIf(e.mat_key, ''), 'null') AS value, + SELECT toString(nullIf(nullIf(e.mat_key, ''), 'null')) AS value, count(DISTINCT e__pdi.person_id) AS count FROM events AS e INNER JOIN @@ -538,7 +552,7 @@ CROSS JOIN (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS actor_id, - transform(ifNull(nullIf(nullIf(e.mat_key, ''), 'null'), '$$_posthog_breakdown_null_$$'), ['val', 'bor'], ['val', 'bor'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(nullIf(nullIf(e.mat_key, ''), 'null')), ''), '$$_posthog_breakdown_null_$$'), ['val', 'bor'], ['val', 'bor'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -547,7 +561,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(equals(e.event, '$pageview'), or(ifNull(equals(nullIf(nullIf(e.mat_key, ''), 'null'), 'val'), 0), ifNull(equals(nullIf(nullIf(e.mat_key, ''), 'null'), 'bor'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) + WHERE and(equals(e.team_id, 2), and(equals(e.event, '$pageview'), or(ifNull(equals(toString(nullIf(nullIf(e.mat_key, ''), 'null')), 'val'), 0), ifNull(equals(toString(nullIf(nullIf(e.mat_key, ''), 'null')), 'bor'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) GROUP BY timestamp, actor_id, breakdown_value) AS e WHERE and(ifNull(lessOrEquals(e.timestamp, plus(d.timestamp, toIntervalDay(1))), 0), ifNull(greater(e.timestamp, minus(d.timestamp, toIntervalDay(6))), 0)) @@ -584,7 +598,7 @@ # --- # name: TestTrends.test_breakdown_weekly_active_users_daily_based_on_action.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')) AS value, count(DISTINCT e__pdi.person_id) AS count FROM events AS e INNER JOIN @@ -622,7 +636,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -654,7 +668,7 @@ CROSS JOIN (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS actor_id, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['val'], ['val'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['val'], ['val'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -679,7 +693,7 @@ FROM cohortpeople WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2)) GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version - HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), 0)) + HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'val'), 0)), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), 0)) GROUP BY timestamp, actor_id, breakdown_value) AS e WHERE and(ifNull(lessOrEquals(e.timestamp, plus(d.timestamp, toIntervalDay(1))), 0), ifNull(greater(e.timestamp, minus(d.timestamp, toIntervalDay(6))), 0)) @@ -699,7 +713,7 @@ # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e LEFT JOIN @@ -722,7 +736,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -743,7 +757,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['uh', 'oh'], ['uh', 'oh'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['uh', 'oh'], ['uh', 'oh'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry, @@ -753,7 +767,7 @@ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) GROUP BY groups.group_type_index, groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(e__group_0.properties___industry, 'finance'), 0), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'uh'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'oh'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(e__group_0.properties___industry, 'finance'), 0), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'uh'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'oh'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -782,7 +796,7 @@ # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events_v2.1 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e LEFT JOIN @@ -805,7 +819,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -826,7 +840,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['uh', 'oh'], ['uh', 'oh'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['uh', 'oh'], ['uh', 'oh'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT OUTER JOIN (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id, @@ -842,7 +856,7 @@ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) GROUP BY groups.group_type_index, groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(e__group_0.properties___industry, 'finance'), 0), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'uh'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'oh'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(e__group_0.properties___industry, 'finance'), 0), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'uh'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', '')), 'oh'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -857,7 +871,7 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -872,7 +886,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -893,7 +907,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['other_value', '$$_posthog_breakdown_null_$$', 'value'], ['other_value', '$$_posthog_breakdown_null_$$', 'value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['other_value', '$$_posthog_breakdown_null_$$', 'value'], ['other_value', '$$_posthog_breakdown_null_$$', 'value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1.0 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -902,7 +916,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0), isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -917,7 +931,7 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -932,7 +946,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -953,7 +967,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['other_value', '$$_posthog_breakdown_null_$$', 'value'], ['other_value', '$$_posthog_breakdown_null_$$', 'value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['other_value', '$$_posthog_breakdown_null_$$', 'value'], ['other_value', '$$_posthog_breakdown_null_$$', 'value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1.0 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -962,7 +976,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0), isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -1242,7 +1256,7 @@ # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter ''' - SELECT e__pdi__person.`properties___$some_prop` AS value, + SELECT toString(e__pdi__person.`properties___$some_prop`) AS value, count(DISTINCT e__pdi.person_id) AS count FROM events AS e INNER JOIN @@ -1276,7 +1290,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -1308,7 +1322,7 @@ CROSS JOIN (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS actor_id, - transform(ifNull(e__pdi__person.`properties___$some_prop`, '$$_posthog_breakdown_null_$$'), ['some_val2', 'some_val'], ['some_val2', 'some_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__pdi__person.`properties___$some_prop`), ''), '$$_posthog_breakdown_null_$$'), ['some_val2', 'some_val'], ['some_val2', 'some_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -1329,7 +1343,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(equals(e.event, 'sign up'), ifNull(equals(e__pdi__person.properties___filter_prop, 'filter_val'), 0), or(ifNull(equals(e__pdi__person.`properties___$some_prop`, 'some_val2'), 0), ifNull(equals(e__pdi__person.`properties___$some_prop`, 'some_val'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), toIntervalDay(30))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + WHERE and(equals(e.team_id, 2), and(equals(e.event, 'sign up'), ifNull(equals(e__pdi__person.properties___filter_prop, 'filter_val'), 0), or(ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), 'some_val2'), 0), ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), 'some_val'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), toIntervalDay(30))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY timestamp, actor_id, breakdown_value) AS e WHERE and(ifNull(lessOrEquals(e.timestamp, plus(d.timestamp, toIntervalDay(1))), 0), ifNull(greater(e.timestamp, minus(d.timestamp, toIntervalDay(29))), 0)) @@ -1349,7 +1363,7 @@ # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', '')) AS value, count(DISTINCT ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)) AS count FROM events AS e LEFT OUTER JOIN @@ -1370,7 +1384,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -1402,7 +1416,7 @@ CROSS JOIN (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id) AS actor_id, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['some_val2', 'some_val'], ['some_val2', 'some_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['some_val2', 'some_val'], ['some_val2', 'some_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT OUTER JOIN (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id, @@ -1410,7 +1424,7 @@ FROM person_overrides WHERE equals(person_overrides.team_id, 2) GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id) - WHERE and(equals(e.team_id, 2), and(equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, 'filter_prop'), ''), 'null'), '^"|"$', ''), 'filter_val'), 0), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', ''), 'some_val2'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', ''), 'some_val'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), toIntervalDay(30))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + WHERE and(equals(e.team_id, 2), and(equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, 'filter_prop'), ''), 'null'), '^"|"$', ''), 'filter_val'), 0), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', '')), 'some_val2'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$some_prop'), ''), 'null'), '^"|"$', '')), 'some_val'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), toIntervalDay(30))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) GROUP BY timestamp, actor_id, breakdown_value) AS e WHERE and(ifNull(lessOrEquals(e.timestamp, plus(d.timestamp, toIntervalDay(1))), 0), ifNull(greater(e.timestamp, minus(d.timestamp, toIntervalDay(29))), 0)) @@ -1476,7 +1490,7 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e INNER JOIN @@ -1503,7 +1517,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -1524,7 +1538,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -1538,7 +1552,7 @@ FROM cohortpeople WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2)) GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version - HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0))) + HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -1573,7 +1587,7 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e LEFT OUTER JOIN @@ -1599,7 +1613,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -1620,7 +1634,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT OUTER JOIN (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id, @@ -1633,7 +1647,7 @@ FROM cohortpeople WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2)) GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version - HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0))) + HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -2217,7 +2231,7 @@ # --- # name: TestTrends.test_timezones_daily.4 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -2232,7 +2246,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -2253,7 +2267,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -2262,7 +2276,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), 'Mac'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -2408,7 +2422,7 @@ # --- # name: TestTrends.test_timezones_daily_minus_utc.4 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')))), lessOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')))), lessOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))), equals(e.event, 'sign up'))) @@ -2423,7 +2437,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -2444,7 +2458,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'America/Phoenix')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -2453,7 +2467,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')))), lessOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))), equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')))), lessOrEquals(toTimeZone(e.timestamp, 'America/Phoenix'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))), equals(e.event, 'sign up'), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), 'Mac'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -2599,7 +2613,7 @@ # --- # name: TestTrends.test_timezones_daily_plus_utc.4 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')))), lessOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')))), lessOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))), equals(e.event, 'sign up'))) @@ -2614,7 +2628,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -2635,7 +2649,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'Asia/Tokyo')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['Mac'], ['Mac'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -2644,7 +2658,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')))), lessOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))), equals(e.event, 'sign up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', ''), 'Mac'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')))), lessOrEquals(toTimeZone(e.timestamp, 'Asia/Tokyo'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))), equals(e.event, 'sign up'), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$os'), ''), 'null'), '^"|"$', '')), 'Mac'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -2992,7 +3006,7 @@ # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns ''' - SELECT e__pdi__person.properties___email AS value, + SELECT toString(e__pdi__person.properties___email) AS value, count(e.uuid) AS count FROM events AS e INNER JOIN @@ -3027,7 +3041,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -3048,7 +3062,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(e__pdi__person.properties___email, '$$_posthog_breakdown_null_$$'), ['test2@posthog.com', 'test@gmail.com', 'test5@posthog.com', 'test4@posthog.com', 'test3@posthog.com'], ['test2@posthog.com', 'test@gmail.com', 'test5@posthog.com', 'test4@posthog.com', 'test3@posthog.com'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__pdi__person.properties___email), ''), '$$_posthog_breakdown_null_$$'), ['test2@posthog.com', 'test@gmail.com', 'test5@posthog.com', 'test4@posthog.com', 'test3@posthog.com'], ['test2@posthog.com', 'test@gmail.com', 'test5@posthog.com', 'test4@posthog.com', 'test3@posthog.com'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, @@ -3070,7 +3084,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(or(ifNull(notILike(e__pdi__person.properties___email, '%@posthog.com%'), 1), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), or(ifNull(equals(e__pdi__person.`properties___$os`, 'android'), 0), ifNull(equals(e__pdi__person.`properties___$browser`, 'safari'), 0))), or(ifNull(equals(e__pdi__person.properties___email, 'test2@posthog.com'), 0), ifNull(equals(e__pdi__person.properties___email, 'test@gmail.com'), 0), ifNull(equals(e__pdi__person.properties___email, 'test5@posthog.com'), 0), ifNull(equals(e__pdi__person.properties___email, 'test4@posthog.com'), 0), ifNull(equals(e__pdi__person.properties___email, 'test3@posthog.com'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(or(ifNull(notILike(e__pdi__person.properties___email, '%@posthog.com%'), 1), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), or(ifNull(equals(e__pdi__person.`properties___$os`, 'android'), 0), ifNull(equals(e__pdi__person.`properties___$browser`, 'safari'), 0))), or(ifNull(equals(toString(e__pdi__person.properties___email), 'test2@posthog.com'), 0), ifNull(equals(toString(e__pdi__person.properties___email), 'test@gmail.com'), 0), ifNull(equals(toString(e__pdi__person.properties___email), 'test5@posthog.com'), 0), ifNull(equals(toString(e__pdi__person.properties___email), 'test4@posthog.com'), 0), ifNull(equals(toString(e__pdi__person.properties___email), 'test3@posthog.com'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -3085,7 +3099,7 @@ # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.2 ''' - SELECT e__pdi__person.properties___email AS value, + SELECT toString(e__pdi__person.properties___email) AS value, count(e.uuid) AS count FROM events AS e INNER JOIN @@ -3120,7 +3134,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -3141,7 +3155,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.uuid) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(e__pdi__person.properties___email, '$$_posthog_breakdown_null_$$'), ['test2@posthog.com'], ['test2@posthog.com'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__pdi__person.properties___email), ''), '$$_posthog_breakdown_null_$$'), ['test2@posthog.com'], ['test2@posthog.com'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, @@ -3163,7 +3177,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(ifNull(equals(e__pdi__person.`properties___$os`, 'android'), 0), ifNull(equals(e__pdi__person.`properties___$browser`, 'chrome'), 0)), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0), ifNull(ilike(e__pdi__person.properties___email, '%@posthog.com%'), 0)), ifNull(equals(e__pdi__person.properties___email, 'test2@posthog.com'), 0)) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), and(ifNull(equals(e__pdi__person.`properties___$os`, 'android'), 0), ifNull(equals(e__pdi__person.`properties___$browser`, 'chrome'), 0)), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0), ifNull(ilike(e__pdi__person.properties___email, '%@posthog.com%'), 0)), ifNull(equals(toString(e__pdi__person.properties___email), 'test2@posthog.com'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -3248,7 +3262,7 @@ # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.2 ''' - SELECT e__pdi__person.`properties___$some_prop` AS value, + SELECT toString(e__pdi__person.`properties___$some_prop`) AS value, count(e.uuid) AS count FROM events AS e INNER JOIN @@ -3281,7 +3295,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -3302,7 +3316,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e.distinct_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(e__pdi__person.`properties___$some_prop`, '$$_posthog_breakdown_null_$$'), ['some_val', '$$_posthog_breakdown_null_$$'], ['some_val', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__pdi__person.`properties___$some_prop`), ''), '$$_posthog_breakdown_null_$$'), ['some_val', '$$_posthog_breakdown_null_$$'], ['some_val', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, @@ -3322,7 +3336,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(e__pdi__person.`properties___$some_prop`, 'some_val'), 0), isNull(e__pdi__person.`properties___$some_prop`))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), 'some_val'), 0), isNull(toString(e__pdi__person.`properties___$some_prop`)), ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), ''), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -3415,7 +3429,7 @@ # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.6 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -3430,7 +3444,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -3451,9 +3465,9 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e.distinct_id) AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$'], ['$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$'], ['$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', ''))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_prop'), ''), 'null'), '^"|"$', '')), ''), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -3520,7 +3534,7 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -3535,7 +3549,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT day_start AS day_start, sum(count) OVER (PARTITION BY breakdown_value @@ -3561,7 +3575,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT e__pdi.person_id) AS total, min(toStartOfDay(toTimeZone(e.timestamp, 'UTC'))) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -3570,12 +3584,13 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0))) GROUP BY e__pdi.person_id, breakdown_value) GROUP BY day_start, breakdown_value - ORDER BY day_start ASC, breakdown_value ASC)) + ORDER BY day_start ASC, breakdown_value ASC) + ORDER BY day_start ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, @@ -3585,7 +3600,7 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))) @@ -3600,7 +3615,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT day_start AS day_start, sum(count) OVER (PARTITION BY breakdown_value @@ -3626,7 +3641,7 @@ ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(DISTINCT ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)) AS total, min(toStartOfDay(toTimeZone(e.timestamp, 'UTC'))) AS day_start, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], ['$$_posthog_breakdown_null_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT OUTER JOIN (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id, @@ -3634,12 +3649,13 @@ FROM person_overrides WHERE equals(person_overrides.team_id, 2) GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'other_value'), 0))) GROUP BY ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id), breakdown_value) GROUP BY day_start, breakdown_value - ORDER BY day_start ASC, breakdown_value ASC)) + ORDER BY day_start ASC, breakdown_value ASC) + ORDER BY day_start ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC LIMIT 10000 SETTINGS readonly=2, @@ -3649,7 +3665,7 @@ # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, max(e__session.duration) AS count FROM events AS e INNER JOIN @@ -3672,7 +3688,7 @@ breakdown_value AS breakdown_value FROM (SELECT any(e__session.duration) AS session_duration, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1', '$$_posthog_breakdown_null_$$'], ['value2', 'value1', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1', '$$_posthog_breakdown_null_$$'], ['value2', 'value1', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT events.`$session_id` AS id, @@ -3680,7 +3696,7 @@ FROM events WHERE and(equals(events.team_id, 2), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), ifNull(notEquals(id, ''), 1)) GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value2'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value1'), 0), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value2'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value1'), 0), isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0))) GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value @@ -3691,7 +3707,7 @@ # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, max(e__session.duration) AS count FROM events AS e INNER JOIN @@ -3714,7 +3730,7 @@ breakdown_value AS breakdown_value FROM (SELECT any(e__session.duration) AS session_duration, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1', '$$_posthog_breakdown_null_$$'], ['value2', 'value1', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1', '$$_posthog_breakdown_null_$$'], ['value2', 'value1', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT events.`$session_id` AS id, @@ -3722,7 +3738,7 @@ FROM events WHERE and(equals(events.team_id, 2), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), ifNull(notEquals(id, ''), 1)) GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value2'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value1'), 0), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value2'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value1'), 0), isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), 0))) GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value @@ -3854,7 +3870,7 @@ # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_with_event_property_breakdown_with_sampling ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')) AS value, count(e.uuid) AS count FROM events AS e WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC')))), equals(e.event, 'viewed video')) @@ -3874,7 +3890,7 @@ breakdown_value AS breakdown_value FROM (SELECT count(e.uuid) AS total, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['red', 'blue', '$$_posthog_breakdown_null_$$'], ['red', 'blue', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['red', 'blue', '$$_posthog_breakdown_null_$$'], ['red', 'blue', '$$_posthog_breakdown_null_$$'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1.0 INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -3883,7 +3899,7 @@ WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(equals(e.event, 'viewed video'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', ''), 'red'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', ''), 'blue'), 0), isNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')))), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(0))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC')))) + WHERE and(equals(e.team_id, 2), and(equals(e.event, 'viewed video'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')), 'red'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')), 'blue'), 0), isNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', ''))), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'color'), ''), 'null'), '^"|"$', '')), ''), 0))), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(0))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC')))) GROUP BY e__pdi.person_id, breakdown_value) GROUP BY breakdown_value) @@ -4022,7 +4038,8 @@ WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-30 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-06 23:59:59', 6, 'UTC'))), equals(e.event, 'viewed video'), ifNull(notEquals(nullIf(nullIf(e.`$group_0`, ''), 'null'), ''), 1), notEquals(e.`$group_0`, '')) GROUP BY e.`$group_0`) GROUP BY day_start - ORDER BY day_start ASC)) + ORDER BY day_start ASC) + ORDER BY day_start ASC) ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, @@ -4052,7 +4069,8 @@ WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start) GROUP BY day_start - ORDER BY day_start ASC)) + ORDER BY day_start ASC) + ORDER BY day_start ASC) ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, @@ -4089,7 +4107,8 @@ WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e__pdi.person_id) GROUP BY day_start - ORDER BY day_start ASC)) + ORDER BY day_start ASC) + ORDER BY day_start ASC) ORDER BY sum(count) DESC LIMIT 10000 SETTINGS readonly=2, max_execution_time=60, @@ -4098,7 +4117,7 @@ # --- # name: TestTrends.test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown ''' - SELECT e__pdi__person.`properties___$some_prop` AS value, + SELECT toString(e__pdi__person.`properties___$some_prop`) AS value, max(e__session.duration) AS count FROM events AS e INNER JOIN @@ -4139,7 +4158,7 @@ breakdown_value AS breakdown_value FROM (SELECT any(e__session.duration) AS session_duration, - transform(ifNull(e__pdi__person.`properties___$some_prop`, '$$_posthog_breakdown_null_$$'), ['some_val', 'another_val'], ['some_val', 'another_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value + transform(ifNull(nullIf(toString(e__pdi__person.`properties___$some_prop`), ''), '$$_posthog_breakdown_null_$$'), ['some_val', 'another_val'], ['some_val', 'another_val'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM events AS e SAMPLE 1 INNER JOIN (SELECT events.`$session_id` AS id, @@ -4165,7 +4184,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(e__pdi__person.`properties___$some_prop`, 'some_val'), 0), ifNull(equals(e__pdi__person.`properties___$some_prop`, 'another_val'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), 'some_val'), 0), ifNull(equals(toString(e__pdi__person.`properties___$some_prop`), 'another_val'), 0))) GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value @@ -4316,7 +4335,7 @@ # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, max(e__session.duration) AS count FROM events AS e INNER JOIN @@ -4337,7 +4356,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -4361,7 +4380,7 @@ breakdown_value AS breakdown_value FROM (SELECT any(e__session.duration) AS session_duration, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1'], ['value2', 'value1'], '$$_posthog_breakdown_other_$$') AS breakdown_value, + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1'], ['value2', 'value1'], '$$_posthog_breakdown_other_$$') AS breakdown_value, toStartOfWeek(toTimeZone(e.timestamp, 'UTC'), 0) AS day_start FROM events AS e SAMPLE 1 INNER JOIN @@ -4370,7 +4389,7 @@ FROM events WHERE and(equals(events.team_id, 2), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), ifNull(notEquals(id, ''), 1)) GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value2'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value1'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value2'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value1'), 0))) GROUP BY day_start, e__session.id, breakdown_value, @@ -4389,7 +4408,7 @@ # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.2 ''' - SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '') AS value, + SELECT toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')) AS value, max(e__session.duration) AS count FROM events AS e INNER JOIN @@ -4410,7 +4429,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -4434,7 +4453,7 @@ breakdown_value AS breakdown_value FROM (SELECT any(e__session.duration) AS session_duration, - transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1'], ['value2', 'value1'], '$$_posthog_breakdown_other_$$') AS breakdown_value, + transform(ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$'), ['value2', 'value1'], ['value2', 'value1'], '$$_posthog_breakdown_other_$$') AS breakdown_value, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 INNER JOIN @@ -4443,7 +4462,7 @@ FROM events WHERE and(equals(events.team_id, 2), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), ifNull(notEquals(id, ''), 1)) GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value2'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value1'), 0))) + WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), or(ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value2'), 0), ifNull(equals(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), 'value1'), 0))) GROUP BY day_start, e__session.id, breakdown_value, diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr index db9e8e1d45000..68ff0e1542b02 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr @@ -1,10 +1,10 @@ # serializer version: 1 # name: TestTrendsDataWarehouseQuery.test_trends_breakdown ''' - SELECT e.prop_1 AS value, + SELECT toString(e.prop_1) AS value, count(e.id) AS count FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)), and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0))) + WHERE and(and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)), and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0))) GROUP BY value ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, @@ -16,7 +16,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -36,10 +36,10 @@ JOIN breakdown_value AS breakdown_value) AS sec ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.id) AS total, - toStartOfDay(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC')) AS day_start, - transform(ifNull(e.prop_1, '$$_posthog_breakdown_null_$$'), ['d', 'c', 'b', 'a'], ['d', 'c', 'b', 'a'], '$$_posthog_breakdown_other_$$') AS breakdown_value + toStartOfDay(toTimeZone(e.created, 'UTC')) AS day_start, + transform(ifNull(nullIf(toString(e.prop_1), ''), '$$_posthog_breakdown_null_$$'), ['d', 'c', 'b', 'a'], ['d', 'c', 'b', 'a'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), or(equals(e.prop_1, 'd'), equals(e.prop_1, 'c'), equals(e.prop_1, 'b'), equals(e.prop_1, 'a'))) + WHERE and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), or(ifNull(equals(toString(e.prop_1), 'd'), 0), ifNull(equals(toString(e.prop_1), 'c'), 0), ifNull(equals(toString(e.prop_1), 'b'), 0), ifNull(equals(toString(e.prop_1), 'a'), 0))) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -54,10 +54,10 @@ # --- # name: TestTrendsDataWarehouseQuery.test_trends_breakdown_with_property ''' - SELECT e.prop_1 AS value, + SELECT toString(e.prop_1) AS value, count(e.id) AS count FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)), and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a'))) + WHERE and(and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)), and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a'))) GROUP BY value ORDER BY count DESC, value DESC LIMIT 26 SETTINGS readonly=2, @@ -69,7 +69,7 @@ ''' SELECT groupArray(day_start) AS date, groupArray(count) AS total, - ifNull(toString(breakdown_value), '') AS breakdown_value + ifNull(toString(breakdown_value), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM (SELECT sum(total) AS count, day_start AS day_start, @@ -89,10 +89,10 @@ JOIN breakdown_value AS breakdown_value) AS sec ORDER BY sec.breakdown_value ASC, day_start ASC UNION ALL SELECT count(e.id) AS total, - toStartOfDay(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC')) AS day_start, - transform(ifNull(e.prop_1, '$$_posthog_breakdown_null_$$'), ['a'], ['a'], '$$_posthog_breakdown_other_$$') AS breakdown_value + toStartOfDay(toTimeZone(e.created, 'UTC')) AS day_start, + transform(ifNull(nullIf(toString(e.prop_1), ''), '$$_posthog_breakdown_null_$$'), ['a'], ['a'], '$$_posthog_breakdown_other_$$') AS breakdown_value FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a'), equals(e.prop_1, 'a')) + WHERE and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a'), ifNull(equals(toString(e.prop_1), 'a'), 0)) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -119,9 +119,9 @@ UNION ALL SELECT 0 AS total, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))) AS day_start UNION ALL SELECT count(e.id) AS total, - toStartOfDay(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC')) AS day_start + toStartOfDay(toTimeZone(e.created, 'UTC')) AS day_start FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)) + WHERE and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0)) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) @@ -145,9 +145,9 @@ UNION ALL SELECT 0 AS total, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))) AS day_start UNION ALL SELECT count(e.id) AS total, - toStartOfDay(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC')) AS day_start + toStartOfDay(toTimeZone(e.created, 'UTC')) AS day_start FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a')) + WHERE and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a')) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) @@ -171,9 +171,9 @@ UNION ALL SELECT 0 AS total, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))) AS day_start UNION ALL SELECT count(e.id) AS total, - toStartOfDay(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC')) AS day_start + toStartOfDay(toTimeZone(e.created, 'UTC')) AS day_start FROM s3('http://host.docker.internal:19000/posthog/test_storage_bucket-posthog.hogql.datawarehouse.trendquery/*.parquet', 'object_storage_root_user', 'object_storage_root_password', 'Parquet', 'id String, prop_1 String, prop_2 String, created DateTime64(3, \'UTC\')') AS e - WHERE and(ifNull(greaterOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toDateTime(toTimeZone(e.created, 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a')) + WHERE and(ifNull(greaterOrEquals(toTimeZone(e.created, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(toTimeZone(e.created, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))), 0), equals(e.prop_1, 'a')) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) diff --git a/posthog/hogql_queries/insights/trends/test/test_trends.py b/posthog/hogql_queries/insights/trends/test/test_trends.py index 1ac54e16de629..1ef5c15c59b4a 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends.py @@ -22,7 +22,7 @@ from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.hogql_queries.legacy_compatibility.filter_to_query import ( clean_entity_properties, - clean_properties, + clean_global_properties, ) from posthog.models import ( Action, @@ -118,7 +118,7 @@ def _props(dict: Dict): "values": [{"type": "AND", "values": [props]}], } - return PropertyGroupFilter(**clean_properties(raw_properties)) + return PropertyGroupFilter(**clean_global_properties(raw_properties)) def convert_filter_to_trends_query(filter: Filter) -> TrendsQuery: @@ -5180,7 +5180,9 @@ def test_breakdown_filtering_with_properties_in_new_format(self): ) response = sorted(response, key=lambda x: x["label"]) - self.assertEqual(len(response), 0) + self.assertEqual(len(response), 1) + self.assertEqual(response[0]["label"], "$$_posthog_breakdown_null_$$") + self.assertEqual(response[0]["count"], 0) @also_test_with_person_on_events_v2 @snapshot_clickhouse_queries diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index 104e232a01406..6bb41b19c79cf 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -1,11 +1,15 @@ +import zoneinfo from dataclasses import dataclass +from datetime import datetime from typing import Dict, List, Optional -from unittest.mock import patch +from unittest.mock import MagicMock, patch from django.test import override_settings from freezegun import freeze_time +from posthog.clickhouse.client.execute import sync_execute from posthog.hogql import ast -from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS +from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS, LimitContext from posthog.hogql.modifiers import create_default_modifiers_for_team +from posthog.hogql.query import INCREASED_MAX_EXECUTION_TIME from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.models.cohort.cohort import Cohort from posthog.models.property_definition import PropertyDefinition @@ -175,6 +179,7 @@ def _create_query_runner( breakdown: Optional[BreakdownFilter] = None, filter_test_accounts: Optional[bool] = None, hogql_modifiers: Optional[HogQLQueryModifiers] = None, + limit_context: Optional[LimitContext] = None, ) -> TrendsQueryRunner: query_series: List[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series query = TrendsQuery( @@ -185,7 +190,7 @@ def _create_query_runner( breakdownFilter=breakdown, filterTestAccounts=filter_test_accounts, ) - return TrendsQueryRunner(team=self.team, query=query, modifiers=hogql_modifiers) + return TrendsQueryRunner(team=self.team, query=query, modifiers=hogql_modifiers, limit_context=limit_context) def _run_trends_query( self, @@ -195,8 +200,10 @@ def _run_trends_query( series: Optional[List[EventsNode | ActionsNode]], trends_filters: Optional[TrendsFilter] = None, breakdown: Optional[BreakdownFilter] = None, + *, filter_test_accounts: Optional[bool] = None, hogql_modifiers: Optional[HogQLQueryModifiers] = None, + limit_context: Optional[LimitContext] = None, ): return self._create_query_runner( date_from=date_from, @@ -207,6 +214,7 @@ def _run_trends_query( breakdown=breakdown, filter_test_accounts=filter_test_accounts, hogql_modifiers=hogql_modifiers, + limit_context=limit_context, ).calculate() def test_trends_query_label(self): @@ -364,6 +372,19 @@ def test_trends_query_formula(self): self.assertEqual("Formula (A+B)", response.results[0]["label"]) self.assertEqual([1, 0, 2, 4, 4, 0, 2, 1, 1, 0, 1], response.results[0]["data"]) + def test_trends_query_formula_breakdown_no_data(self): + self._create_test_events() + + response = self._run_trends_query( + self.default_date_from, + self.default_date_to, + IntervalType.day, + [EventsNode(event="$pageviewxxx"), EventsNode(event="$pageleavexxx")], + TrendsFilter(formula="A+B"), + BreakdownFilter(breakdown_type=BreakdownType.person, breakdown="$browser"), + ) + self.assertEqual([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], response.results[0]["data"]) + def test_trends_query_formula_aggregate(self): self._create_test_events() @@ -695,16 +716,7 @@ def test_trends_breakdowns_multiple_hogql(self): breakdown_labels = [result["breakdown_value"] for result in response.results] assert len(response.results) == 8 - assert breakdown_labels == [ - "Chrome", - "Firefox", - "Edge", - "Safari", - "Chrome", - "Edge", - "Firefox", - "Safari", - ] + assert breakdown_labels == ["Chrome", "Firefox", "Edge", "Safari", "Chrome", "Edge", "Firefox", "Safari"] assert response.results[0]["label"] == f"$pageview - Chrome" assert response.results[1]["label"] == f"$pageview - Firefox" assert response.results[2]["label"] == f"$pageview - Edge" @@ -804,6 +816,7 @@ def test_trends_breakdown_and_aggregation_query_orchestration(self): 10, 0, ] + assert response.results[1]["data"] == [ 20, 0, @@ -1114,6 +1127,38 @@ def test_breakdown_values_limit(self): ) self.assertEqual(len(response.results), 11) + def test_breakdown_values_unknown_property(self): + # same as above test, just without creating the property definition + for value in list(range(30)): + _create_event( + team=self.team, + event="$pageview", + distinct_id=f"person_{value}", + timestamp="2020-01-11T12:00:00Z", + properties={"breakdown_value": f"{value}"}, + ) + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview")], + TrendsFilter(display=ChartDisplayType.ActionsLineGraph), + BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.event), + ) + + self.assertEqual(len(response.results), 26) + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview")], + TrendsFilter(display=ChartDisplayType.ActionsLineGraph), + BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.event, breakdown_limit=10), + ) + self.assertEqual(len(response.results), 11) + def test_breakdown_values_world_map_limit(self): PropertyDefinition.objects.create(team=self.team, name="breakdown_value", property_type="String") @@ -1435,18 +1480,18 @@ def test_to_actors_query_options(self): response = runner.to_actors_query_options() assert response.day == [ - DayItem(label="2020-01-09", value="2020-01-09"), - DayItem(label="2020-01-10", value="2020-01-10"), - DayItem(label="2020-01-11", value="2020-01-11"), - DayItem(label="2020-01-12", value="2020-01-12"), - DayItem(label="2020-01-13", value="2020-01-13"), - DayItem(label="2020-01-14", value="2020-01-14"), - DayItem(label="2020-01-15", value="2020-01-15"), - DayItem(label="2020-01-16", value="2020-01-16"), - DayItem(label="2020-01-17", value="2020-01-17"), - DayItem(label="2020-01-18", value="2020-01-18"), - DayItem(label="2020-01-19", value="2020-01-19"), - DayItem(label="2020-01-20", value="2020-01-20"), + DayItem(label="9-Jan-2020", value=datetime(2020, 1, 9, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="10-Jan-2020", value=datetime(2020, 1, 10, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="11-Jan-2020", value=datetime(2020, 1, 11, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="12-Jan-2020", value=datetime(2020, 1, 12, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="13-Jan-2020", value=datetime(2020, 1, 13, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="14-Jan-2020", value=datetime(2020, 1, 14, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="15-Jan-2020", value=datetime(2020, 1, 15, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="16-Jan-2020", value=datetime(2020, 1, 16, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="17-Jan-2020", value=datetime(2020, 1, 17, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="18-Jan-2020", value=datetime(2020, 1, 18, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="19-Jan-2020", value=datetime(2020, 1, 19, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="20-Jan-2020", value=datetime(2020, 1, 20, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), ] assert response.breakdown is None @@ -1470,18 +1515,18 @@ def test_to_actors_query_options_compare(self): response = runner.to_actors_query_options() assert response.day == [ - DayItem(label="2020-01-09", value="2020-01-09"), - DayItem(label="2020-01-10", value="2020-01-10"), - DayItem(label="2020-01-11", value="2020-01-11"), - DayItem(label="2020-01-12", value="2020-01-12"), - DayItem(label="2020-01-13", value="2020-01-13"), - DayItem(label="2020-01-14", value="2020-01-14"), - DayItem(label="2020-01-15", value="2020-01-15"), - DayItem(label="2020-01-16", value="2020-01-16"), - DayItem(label="2020-01-17", value="2020-01-17"), - DayItem(label="2020-01-18", value="2020-01-18"), - DayItem(label="2020-01-19", value="2020-01-19"), - DayItem(label="2020-01-20", value="2020-01-20"), + DayItem(label="9-Jan-2020", value=datetime(2020, 1, 9, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="10-Jan-2020", value=datetime(2020, 1, 10, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="11-Jan-2020", value=datetime(2020, 1, 11, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="12-Jan-2020", value=datetime(2020, 1, 12, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="13-Jan-2020", value=datetime(2020, 1, 13, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="14-Jan-2020", value=datetime(2020, 1, 14, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="15-Jan-2020", value=datetime(2020, 1, 15, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="16-Jan-2020", value=datetime(2020, 1, 16, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="17-Jan-2020", value=datetime(2020, 1, 17, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="18-Jan-2020", value=datetime(2020, 1, 18, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="19-Jan-2020", value=datetime(2020, 1, 19, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), + DayItem(label="20-Jan-2020", value=datetime(2020, 1, 20, 0, 0, tzinfo=zoneinfo.ZoneInfo(key="UTC"))), ] assert response.breakdown is None @@ -1555,9 +1600,8 @@ def test_to_actors_query_options_breakdowns_boolean(self): assert response.series == [InsightActorsQuerySeries(label="$pageview", value=0)] assert response.breakdown == [ - # BreakdownItem(label="Other", value="$$_posthog_breakdown_other_$$"), # TODO: Add when "Other" works - BreakdownItem(label="true", value=1), - BreakdownItem(label="false", value=0), + BreakdownItem(label="true", value="true"), + BreakdownItem(label="false", value="false"), ] def test_to_actors_query_options_breakdowns_histogram(self): @@ -1649,3 +1693,16 @@ def test_to_actors_query_options_breakdowns_hogql(self): BreakdownItem(label="Safari", value="Safari"), BreakdownItem(label="Edge", value="Edge"), ] + + @patch("posthog.hogql.query.sync_execute", wraps=sync_execute) + def test_limit_is_context_aware(self, mock_sync_execute: MagicMock): + self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.day, + [EventsNode(event="$pageview")], + limit_context=LimitContext.QUERY_ASYNC, + ) + + mock_sync_execute.assert_called_once() + self.assertIn(f" max_execution_time={INCREASED_MAX_EXECUTION_TIME},", mock_sync_execute.call_args[0][0]) diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py index ed5d867b48b75..a911e4bf8302a 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py @@ -14,6 +14,7 @@ from posthog.models.action.action import Action from posthog.models.filters.mixins.utils import cached_property from posthog.models.team.team import Team +from posthog.queries.trends.breakdown import BREAKDOWN_NULL_STRING_LABEL from posthog.schema import ( ActionsNode, DataWarehouseNode, @@ -68,14 +69,18 @@ def build_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: return full_query def build_actors_query( - self, time_frame: Optional[str | int] = None, breakdown_filter: Optional[str | int] = None + self, time_frame: Optional[str] = None, breakdown_filter: Optional[str] = None ) -> ast.SelectQuery | ast.SelectUnionQuery: breakdown = self._breakdown(is_actors_query=True, breakdown_values_override=breakdown_filter) return parse_select( """ - SELECT DISTINCT actor_id + SELECT + actor_id, + count() as event_count, + groupUniqArray(100)((timestamp, uuid, $session_id, $window_id)) as matching_events FROM {subquery} + GROUP BY actor_id """, placeholders={ "subquery": self._get_events_subquery( @@ -165,7 +170,7 @@ def _get_events_subquery( is_actors_query: bool, breakdown: Breakdown, breakdown_values_override: Optional[str | int] = None, - actors_query_time_frame: Optional[str | int] = None, + actors_query_time_frame: Optional[str] = None, ) -> ast.SelectQuery: day_start = ast.Alias( alias="day_start", @@ -182,31 +187,16 @@ def _get_events_subquery( actors_query_time_frame=actors_query_time_frame, ) - default_query = cast( - ast.SelectQuery, - parse_select( - """ - SELECT - {aggregation_operation} AS total - FROM {table} AS e - WHERE {events_filter} - """ - if isinstance(self.series, DataWarehouseNode) - else """ - SELECT - {aggregation_operation} AS total - FROM {table} AS e - SAMPLE {sample} - WHERE {events_filter} - """, - placeholders={ - "table": self._table_expr, - "events_filter": events_filter, - "aggregation_operation": self._aggregation_operation.select_aggregation(), - "sample": self._sample_value(), - }, - ), + default_query = ast.SelectQuery( + select=[ast.Alias(alias="total", expr=self._aggregation_operation.select_aggregation())], + select_from=ast.JoinExpr(table=self._table_expr, alias="e"), + where=events_filter, ) + if not isinstance(self.series, DataWarehouseNode): + assert default_query.select_from is not None + default_query.select_from.sample = ast.SampleExpr( + sample_value=self._sample_value(), + ) default_query.group_by = [] @@ -225,8 +215,13 @@ def _get_events_subquery( # TODO: Move this logic into the below branches when working on adding breakdown support for the person modal if is_actors_query: - default_query.select = [ast.Alias(alias="actor_id", expr=self._aggregation_operation.actor_id())] - default_query.distinct = True + default_query.select = [ + ast.Alias(alias="actor_id", expr=self._aggregation_operation.actor_id()), + ast.Field(chain=["e", "timestamp"]), + ast.Field(chain=["e", "uuid"]), + ast.Field(chain=["e", "$session_id"]), + ast.Field(chain=["e", "$window_id"]), + ] default_query.group_by = [] # No breakdowns and no complex series aggregation @@ -298,7 +293,8 @@ def _get_events_subquery( # Just breakdowns elif breakdown.enabled: if not is_actors_query: - default_query.select.append(breakdown.column_expr()) + breakdown_expr = breakdown.column_expr() + default_query.select.append(breakdown_expr) default_query.group_by.append(ast.Field(chain=["breakdown_value"])) # Just session duration math property elif self._aggregation_operation.aggregating_on_session_duration(): @@ -375,7 +371,7 @@ def _outer_select_query(self, breakdown: Breakdown, inner_query: ast.SelectQuery name="ifNull", args=[ ast.Call(name="toString", args=[ast.Field(chain=["breakdown_value"])]), - ast.Constant(value=""), + ast.Constant(value=BREAKDOWN_NULL_STRING_LABEL), ], ), ) @@ -454,20 +450,27 @@ def _events_filter( breakdown: Breakdown | None, ignore_breakdowns: bool = False, breakdown_values_override: Optional[str | int] = None, - actors_query_time_frame: Optional[str | int] = None, + actors_query_time_frame: Optional[str] = None, ) -> ast.Expr: series = self.series filters: List[ast.Expr] = [] # Dates if is_actors_query and actors_query_time_frame is not None: - to_start_of_time_frame = f"toStartOf{self.query_date_range.interval_name.capitalize()}" - filters.append( - ast.CompareOperation( - left=ast.Call(name=to_start_of_time_frame, args=[ast.Field(chain=["timestamp"])]), - op=ast.CompareOperationOp.Eq, - right=ast.Call(name="toDateTime", args=[ast.Constant(value=actors_query_time_frame)]), - ) + actors_from, actors_to = self.query_date_range.interval_bounds_from_str(actors_query_time_frame) + filters.extend( + [ + ast.CompareOperation( + left=ast.Field(chain=["timestamp"]), + op=ast.CompareOperationOp.GtEq, + right=ast.Constant(value=actors_from), + ), + ast.CompareOperation( + left=ast.Field(chain=["timestamp"]), + op=ast.CompareOperationOp.Lt, + right=ast.Constant(value=actors_to), + ), + ] ) elif not self._aggregation_operation.requires_query_orchestration(): filters.extend( @@ -564,7 +567,7 @@ def session_duration_math_property_wrapper(self, default_query: ast.SelectQuery) query.group_by = [] return query - def _breakdown(self, is_actors_query: bool, breakdown_values_override: Optional[str | int] = None): + def _breakdown(self, is_actors_query: bool, breakdown_values_override: Optional[str] = None): return Breakdown( team=self.team, query=self.query, diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 29d29b55e8b0f..67e160084e68e 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -23,9 +23,7 @@ from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.trends.breakdown_values import ( - BREAKDOWN_NULL_NUMERIC_LABEL, BREAKDOWN_NULL_STRING_LABEL, - BREAKDOWN_OTHER_NUMERIC_LABEL, BREAKDOWN_OTHER_STRING_LABEL, ) from posthog.hogql_queries.insights.trends.display import TrendsDisplay @@ -142,7 +140,7 @@ def to_queries(self) -> List[ast.SelectQuery | ast.SelectUnionQuery]: def to_actors_query( self, - time_frame: Optional[str | int], + time_frame: Optional[str], series_index: int, breakdown_value: Optional[str | int] = None, compare: Optional[Compare] = None, @@ -175,7 +173,7 @@ def to_actors_query( modifiers=self.modifiers, ) - query = query_builder.build_actors_query(time_frame=time_frame, breakdown_filter=breakdown_value) + query = query_builder.build_actors_query(time_frame=time_frame, breakdown_filter=str(breakdown_value)) return query @@ -185,7 +183,13 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: res_compare: List[CompareItem] | None = None # Days - res_days: List[DayItem] = [DayItem(label=day, value=day) for day in self.query_date_range.all_values()] + res_days: list[DayItem] = [ + DayItem( + label=format_label_date(value, self.query_date_range.interval_name), + value=value, + ) + for value in self.query_date_range.all_values() + ] # Series for index, series in enumerate(self.query.series): @@ -240,14 +244,10 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: cohort_name = "all users" if str(value) == "0" else Cohort.objects.get(pk=value).name label = cohort_name value = value - elif value == BREAKDOWN_OTHER_STRING_LABEL or value == BREAKDOWN_OTHER_NUMERIC_LABEL: - # label = "Other" - # value = BREAKDOWN_OTHER_STRING_LABEL - continue # TODO: Add support for "other" breakdowns - elif value == BREAKDOWN_NULL_STRING_LABEL or value == BREAKDOWN_NULL_NUMERIC_LABEL: - # label = "Null" - # value = BREAKDOWN_NULL_STRING_LABEL - continue # TODO: Add support for "null" breakdowns + elif value == BREAKDOWN_OTHER_STRING_LABEL: + label = "Other (Groups all remaining values)" + elif value == BREAKDOWN_NULL_STRING_LABEL: + label = "None (No value)" elif is_boolean_breakdown: label = self._convert_boolean(value) else: @@ -292,6 +292,7 @@ def run(index: int, query: ast.SelectQuery | ast.SelectUnionQuery, is_parallel: team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) timings_matrix[index] = response.timings @@ -500,18 +501,6 @@ def get_value(name: str, val: Any): series_object["breakdown_value"] = remapped_label - # If the breakdown value is the numeric "other", then set it to the string version - if ( - remapped_label == BREAKDOWN_OTHER_NUMERIC_LABEL - or remapped_label == str(BREAKDOWN_OTHER_NUMERIC_LABEL) - or remapped_label == float(BREAKDOWN_OTHER_NUMERIC_LABEL) - ): - series_object["breakdown_value"] = BREAKDOWN_OTHER_STRING_LABEL - if real_series_count > 1 or self._is_breakdown_field_boolean(): - series_object["label"] = "{} - {}".format(series_label or "All events", "Other") - else: - series_object["label"] = "Other" - res.append(series_object) return res @@ -666,25 +655,27 @@ def apply_formula(self, formula: str, results: List[Dict[str, Any]]) -> List[Dic res.append(new_result) return res - if self._trends_display.should_aggregate_values(): - series_data = list(map(lambda s: [s["aggregated_value"]], results)) - new_series_data = FormulaAST(series_data).call(formula) + if len(results) > 0: + if self._trends_display.should_aggregate_values(): + series_data = list(map(lambda s: [s["aggregated_value"]], results)) + new_series_data = FormulaAST(series_data).call(formula) - new_result = results[0] - new_result["aggregated_value"] = float(sum(new_series_data)) - new_result["data"] = None - new_result["count"] = 0 - new_result["label"] = f"Formula ({formula})" - else: - series_data = list(map(lambda s: s["data"], results)) - new_series_data = FormulaAST(series_data).call(formula) + new_result = results[0] + new_result["aggregated_value"] = float(sum(new_series_data)) + new_result["data"] = None + new_result["count"] = 0 + new_result["label"] = f"Formula ({formula})" + else: + series_data = list(map(lambda s: s["data"], results)) + new_series_data = FormulaAST(series_data).call(formula) - new_result = results[0] - new_result["data"] = new_series_data - new_result["count"] = float(sum(new_series_data)) - new_result["label"] = f"Formula ({formula})" + new_result = results[0] + new_result["data"] = new_series_data + new_result["count"] = float(sum(new_series_data)) + new_result["label"] = f"Formula ({formula})" - return [new_result] + return [new_result] + return [] def _is_breakdown_field_boolean(self): if not self.query.breakdownFilter or not self.query.breakdownFilter.breakdown_type: @@ -741,13 +732,19 @@ def _event_property( field: str, field_type: PropertyDefinition.Type, group_type_index: Optional[int], - ): - return PropertyDefinition.objects.get( - name=field, - team=self.team, - type=field_type, - group_type_index=group_type_index if field_type == PropertyDefinition.Type.GROUP else None, - ).property_type + ) -> str: + try: + return ( + PropertyDefinition.objects.get( + name=field, + team=self.team, + type=field_type, + group_type_index=group_type_index if field_type == PropertyDefinition.Type.GROUP else None, + ).property_type + or "String" + ) + except PropertyDefinition.DoesNotExist: + return "String" # TODO: Move this to posthog/hogql_queries/legacy_compatibility/query_to_filter.py def _query_to_filter(self) -> Dict[str, Any]: diff --git a/posthog/hogql_queries/legacy_compatibility/clean_properties.py b/posthog/hogql_queries/legacy_compatibility/clean_properties.py new file mode 100644 index 0000000000000..27e400d8c99e3 --- /dev/null +++ b/posthog/hogql_queries/legacy_compatibility/clean_properties.py @@ -0,0 +1,133 @@ +def clean_global_properties(properties: dict | list[dict] | None): + if properties is None or len(properties) == 0: + # empty properties + return None + elif is_old_style_properties(properties): + # old style properties + properties = transform_old_style_properties(properties) + properties = { + "type": "AND", + "values": [{"type": "AND", "values": properties}], + } + return clean_property_group_filter(properties) + elif isinstance(properties, list): + # list of property filters + properties = { + "type": "AND", + "values": [{"type": "AND", "values": properties}], + } + return clean_property_group_filter(properties) + elif ( + isinstance(properties, dict) + and properties.get("type") in ["AND", "OR"] + and not any(property.get("type") in ["AND", "OR"] for property in properties["values"]) + ): + # property group filter value + properties = { + "type": "AND", + "values": [properties], + } + return clean_property_group_filter(properties) + else: + # property group filter + return clean_property_group_filter(properties) + + +def clean_entity_properties(properties: list[dict] | dict | None): + if properties is None or len(properties) == 0: + # empty properties + return None + elif is_old_style_properties(properties): + # old style properties + return transform_old_style_properties(properties) + elif isinstance(properties, list): + # list of property filters + return list(map(clean_property, properties)) + elif ( + isinstance(properties, dict) + and properties.get("type") in ["AND", "OR"] + and not any(property.get("type") in ["AND", "OR"] for property in properties["values"]) + ): + # property group filter value + return list(map(clean_property, properties["values"])) + else: + raise ValueError("Unexpected format of entity properties.") + + +def clean_property_group_filter(properties: dict): + properties["values"] = clean_property_group_filter_values(properties["values"]) + return properties + + +def clean_property_group_filter_values(properties: list[dict]): + return [clean_property_group_filter_value(property) for property in properties] + + +def clean_property_group_filter_value(property: dict): + if property.get("type") in ["AND", "OR"]: + # property group filter value + property["values"] = clean_property_group_filter_values(property["values"]) + return property + else: + # property filter + return clean_property(property) + + +def clean_property(property: dict): + cleaned_property = {**property} + + # fix type typo + if cleaned_property.get("type") == "events": + cleaned_property["type"] = "event" + + # fix value key typo + if cleaned_property.get("values") is not None and cleaned_property.get("value") is None: + cleaned_property["value"] = cleaned_property.pop("values") + + # convert precalculated and static cohorts to cohorts + if cleaned_property.get("type") in ("precalculated-cohort", "static-cohort"): + cleaned_property["type"] = "cohort" + + # fix invalid property key for cohorts + if cleaned_property.get("type") == "cohort" and cleaned_property.get("key") != "id": + cleaned_property["key"] = "id" + + # set a default operator for properties that support it, but don't have an operator set + if is_property_with_operator(cleaned_property) and cleaned_property.get("operator") is None: + cleaned_property["operator"] = "exact" + + # remove the operator for properties that don't support it, but have it set + if not is_property_with_operator(cleaned_property) and cleaned_property.get("operator") is not None: + del cleaned_property["operator"] + + # remove none from values + if isinstance(cleaned_property.get("value"), list): + cleaned_property["value"] = list(filter(lambda x: x is not None, cleaned_property.get("value"))) # type: ignore + + # remove keys without concrete value + cleaned_property = {key: value for key, value in cleaned_property.items() if value is not None} + + return cleaned_property + + +def is_property_with_operator(property: dict): + return property.get("type") not in ("cohort", "hogql") + + +# old style dict properties e.g. {"utm_medium__icontains": "email"} +def is_old_style_properties(properties): + return isinstance(properties, dict) and len(properties) == 1 and properties.get("type") not in ("AND", "OR") + + +def transform_old_style_properties(properties): + key = list(properties.keys())[0] + value = list(properties.values())[0] + key_split = key.split("__") + return [ + { + "key": key_split[0], + "value": value, + "operator": key_split[1] if len(key_split) > 1 else "exact", + "type": "event", + } + ] diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index 2b8f59f88a421..199294a3a5969 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -2,6 +2,7 @@ from enum import Enum import json from typing import List, Dict, Literal +from posthog.hogql_queries.legacy_compatibility.clean_properties import clean_entity_properties, clean_global_properties from posthog.models.entity.entity import Entity as LegacyEntity from posthog.schema import ( ActionsNode, @@ -18,7 +19,6 @@ LifecycleQuery, PathsFilter, PathsQuery, - PropertyGroupFilter, RetentionFilter, RetentionQuery, StickinessFilter, @@ -45,88 +45,6 @@ class MathAvailability(str, Enum): ] -def is_property_with_operator(property: Dict): - return property.get("type") not in ("cohort", "hogql") - - -def clean_property(property: Dict): - cleaned_property = {**property} - - # fix type typo - if cleaned_property.get("type") == "events": - cleaned_property["type"] = "event" - - # fix value key typo - if cleaned_property.get("values") is not None and cleaned_property.get("value") is None: - cleaned_property["value"] = cleaned_property.pop("values") - - # convert precalculated and static cohorts to cohorts - if cleaned_property.get("type") in ("precalculated-cohort", "static-cohort"): - cleaned_property["type"] = "cohort" - - # fix invalid property key for cohorts - if cleaned_property.get("type") == "cohort" and cleaned_property.get("key") != "id": - cleaned_property["key"] = "id" - - # set a default operator for properties that support it, but don't have an operator set - if is_property_with_operator(cleaned_property) and cleaned_property.get("operator") is None: - cleaned_property["operator"] = "exact" - - # remove the operator for properties that don't support it, but have it set - if not is_property_with_operator(cleaned_property) and cleaned_property.get("operator") is not None: - del cleaned_property["operator"] - - # remove none from values - if isinstance(cleaned_property.get("value"), List): - cleaned_property["value"] = list(filter(lambda x: x is not None, cleaned_property.get("value"))) - - # remove keys without concrete value - cleaned_property = {key: value for key, value in cleaned_property.items() if value is not None} - - return cleaned_property - - -# old style dict properties -def is_old_style_properties(properties): - return isinstance(properties, Dict) and len(properties) == 1 and properties.get("type") not in ("AND", "OR") - - -def transform_old_style_properties(properties): - key = list(properties.keys())[0] - value = list(properties.values())[0] - key_split = key.split("__") - return [ - { - "key": key_split[0], - "value": value, - "operator": key_split[1] if len(key_split) > 1 else "exact", - "type": "event", - } - ] - - -def clean_entity_properties(properties: List[Dict] | None): - if properties is None: - return None - elif is_old_style_properties(properties): - return transform_old_style_properties(properties) - else: - return list(map(clean_property, properties)) - - -def clean_property_group_filter_value(value: Dict): - if value.get("type") in ("AND", "OR"): - value["values"] = map(clean_property_group_filter_value, value.get("values")) - return value - else: - return clean_property(value) - - -def clean_properties(properties: Dict): - properties["values"] = map(clean_property_group_filter_value, properties.get("values")) - return properties - - def clean_display(display: str): if display not in ChartDisplayType.__members__: return None @@ -299,29 +217,13 @@ def _sampling_factor(filter: Dict): return {"samplingFactor": filter.get("sampling_factor")} -def _filter_test_accounts(filter: Dict): - return {"filterTestAccounts": filter.get("filter_test_accounts")} - - def _properties(filter: Dict): raw_properties = filter.get("properties", None) - if raw_properties is None or len(raw_properties) == 0: - return {} - elif isinstance(raw_properties, list): - raw_properties = { - "type": "AND", - "values": [{"type": "AND", "values": raw_properties}], - } - return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} - elif is_old_style_properties(raw_properties): - raw_properties = transform_old_style_properties(raw_properties) - raw_properties = { - "type": "AND", - "values": [{"type": "AND", "values": raw_properties}], - } - return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} - else: - return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} + return {"properties": clean_global_properties(raw_properties)} + + +def _filter_test_accounts(filter: Dict): + return {"filterTestAccounts": filter.get("filter_test_accounts")} def _breakdown_filter(_filter: Dict): diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_clean_properties.py b/posthog/hogql_queries/legacy_compatibility/test/test_clean_properties.py new file mode 100644 index 0000000000000..e3e5fd91acd7b --- /dev/null +++ b/posthog/hogql_queries/legacy_compatibility/test/test_clean_properties.py @@ -0,0 +1,149 @@ +from posthog.hogql_queries.legacy_compatibility.clean_properties import clean_entity_properties, clean_global_properties +from posthog.test.base import BaseTest + + +class TestCleanGlobalProperties(BaseTest): + def test_handles_empty_properties(self): + properties: dict = {} + + result = clean_global_properties(properties) + + self.assertEqual(result, None) + + def test_handles_old_style_properties(self): + properties = {"utm_medium__icontains": "email"} + + result = clean_global_properties(properties) + + self.assertEqual( + result, + { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "utm_medium", "operator": "icontains", "type": "event", "value": "email"}], + } + ], + }, + ) + + def test_handles_property_filter_lists(self): + properties = [{"key": "id", "type": "cohort", "value": 636, "operator": None}] + + result = clean_global_properties(properties) + + self.assertEqual( + result, + { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "id", "type": "cohort", "value": 636}], + } + ], + }, + ) + + def test_handles_property_group_filters(self): + properties = { + "type": "AND", + "values": [{"type": "AND", "values": [{"key": "id", "type": "cohort", "value": 850, "operator": None}]}], + } + + result = clean_global_properties(properties) + + self.assertEqual( + result, + { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "id", "type": "cohort", "value": 850}], + } + ], + }, + ) + + def test_handles_property_group_filters_values(self): + properties = { + "type": "AND", + "values": [{"key": "id", "type": "cohort", "value": 850, "operator": None}], + } + + result = clean_global_properties(properties) + + self.assertEqual( + result, + { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "id", "type": "cohort", "value": 850}], + } + ], + }, + ) + + +class TestCleanEntityProperties(BaseTest): + def test_handles_empty_properties(self): + properties: dict = {} + + result = clean_entity_properties(properties) + + self.assertEqual(result, None) + + def test_handles_old_style_properties(self): + properties = {"utm_medium__icontains": "email"} + + result = clean_entity_properties(properties) + + self.assertEqual( + result, + [{"key": "utm_medium", "operator": "icontains", "type": "event", "value": "email"}], + ) + + def test_handles_property_filter_lists(self): + properties = [ + {"key": "$current_url", type: "event", "value": "https://hedgebox.net/signup/", "operator": "exact"}, + ] + + result = clean_entity_properties(properties) + + self.assertEqual( + result, + [ + {"key": "$current_url", type: "event", "value": "https://hedgebox.net/signup/", "operator": "exact"}, + ], + ) + + def test_handles_property_group_values(self): + properties = { + "type": "AND", + "values": [ + { + "key": "$current_url", + "operator": "exact", + "type": "event", + "value": "https://hedgebox.net/signup/", + }, + ], + } + + result = clean_entity_properties(properties) + + self.assertEqual( + result, + [ + { + "key": "$current_url", + "operator": "exact", + "type": "event", + "value": "https://hedgebox.net/signup/", + }, + ], + ) diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py index 9421ac41be854..9abc0f3506b8c 100644 --- a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py @@ -1167,7 +1167,7 @@ def test_series_properties(self): self.assertEqual( query.series, [ - EventsNode(event="$pageview", name="$pageview", properties=[]), + EventsNode(event="$pageview", name="$pageview", properties=None), EventsNode( event="$pageview", name="$pageview", diff --git a/posthog/hogql_queries/sessions_timeline_query_runner.py b/posthog/hogql_queries/sessions_timeline_query_runner.py index d920ec7cf94fd..cda9433d63efa 100644 --- a/posthog/hogql_queries/sessions_timeline_query_runner.py +++ b/posthog/hogql_queries/sessions_timeline_query_runner.py @@ -135,6 +135,7 @@ def calculate(self) -> SessionsTimelineQueryResponse: query_type="SessionsTimelineQuery", timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) assert query_result.results is not None timeline_entries_map: Dict[str, TimelineEntry] = {} diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py index f2e5cef3d82a3..b6386ac85f4ed 100644 --- a/posthog/hogql_queries/utils/query_date_range.py +++ b/posthog/hogql_queries/utils/query_date_range.py @@ -1,9 +1,10 @@ import re from datetime import datetime, timedelta from functools import cached_property -from typing import Literal, Optional, Dict, List +from typing import Literal, Optional, Dict from zoneinfo import ZoneInfo +from dateutil.parser import parse from dateutil.relativedelta import relativedelta from posthog.hogql.errors import HogQLException @@ -116,36 +117,38 @@ def interval_type(self) -> IntervalType: def interval_name(self) -> Literal["hour", "day", "week", "month"]: return self.interval_type.name - def all_values(self) -> List[str]: - start: datetime = self.date_from() - end: datetime = self.date_to() - interval = self.interval_name - - if interval == "hour": - start = start.replace(minute=0, second=0, microsecond=0) - elif interval == "day": - start = start.replace(hour=0, minute=0, second=0, microsecond=0) - elif interval == "week": + def align_with_interval(self, start: datetime) -> datetime: + if self.interval_name == "hour": + return start.replace(minute=0, second=0, microsecond=0) + elif self.interval_name == "day": + return start.replace(hour=0, minute=0, second=0, microsecond=0) + elif self.interval_name == "week": start = start.replace(hour=0, minute=0, second=0, microsecond=0) week_start_alignment_days = start.isoweekday() % 7 if self._team.week_start_day == WeekStartDay.MONDAY: week_start_alignment_days = start.weekday() start -= timedelta(days=week_start_alignment_days) - elif interval == "month": - start = start.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + return start + elif self.interval_name == "month": + return start.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + + def interval_relativedelta(self) -> relativedelta: + return relativedelta( + days=1 if self.interval_name == "day" else 0, + weeks=1 if self.interval_name == "week" else 0, + months=1 if self.interval_name == "month" else 0, + hours=1 if self.interval_name == "hour" else 0, + ) - values: List[str] = [] + def all_values(self) -> list[datetime]: + start = self.align_with_interval(self.date_from()) + end: datetime = self.date_to() + delta = self.interval_relativedelta() + + values: list[datetime] = [] while start <= end: - if interval == "hour": - values.append(start.strftime("%Y-%m-%d %H:%M:%S")) - else: - values.append(start.strftime("%Y-%m-%d")) - start += relativedelta( - days=1 if interval == "day" else 0, - weeks=1 if interval == "week" else 0, - months=1 if interval == "month" else 0, - hours=1 if interval == "hour" else 0, - ) + values.append(start) + start += delta return values def date_to_as_hogql(self) -> ast.Expr: @@ -257,6 +260,11 @@ def to_placeholders(self) -> Dict[str, ast.Expr]: else self.date_from_as_hogql(), } + def interval_bounds_from_str(self, time_frame: str) -> tuple[datetime, datetime]: + date_from = parse(time_frame, tzinfos={None: self._team.timezone_info}) + date_to = date_from + self.interval_relativedelta() + return date_from, date_to + class QueryDateRangeWithIntervals(QueryDateRange): def __init__( diff --git a/posthog/hogql_queries/utils/test/test_query_date_range.py b/posthog/hogql_queries/utils/test/test_query_date_range.py index fd38ef700e137..f377e06880bbe 100644 --- a/posthog/hogql_queries/utils/test/test_query_date_range.py +++ b/posthog/hogql_queries/utils/test/test_query_date_range.py @@ -61,32 +61,47 @@ def test_all_values(self): QueryDateRange( team=self.team, date_range=DateRange(date_from="-20h"), interval=IntervalType.day, now=now ).all_values(), - ["2021-08-24", "2021-08-25"], + [parser.isoparse("2021-08-24T00:00:00Z"), parser.isoparse("2021-08-25T00:00:00Z")], ) self.assertEqual( QueryDateRange( team=self.team, date_range=DateRange(date_from="-20d"), interval=IntervalType.week, now=now ).all_values(), - ["2021-08-01", "2021-08-08", "2021-08-15", "2021-08-22"], + [ + parser.isoparse("2021-08-01T00:00:00Z"), + parser.isoparse("2021-08-08T00:00:00Z"), + parser.isoparse("2021-08-15T00:00:00Z"), + parser.isoparse("2021-08-22T00:00:00Z"), + ], ) self.team.week_start_day = WeekStartDay.MONDAY self.assertEqual( QueryDateRange( team=self.team, date_range=DateRange(date_from="-20d"), interval=IntervalType.week, now=now ).all_values(), - ["2021-08-02", "2021-08-09", "2021-08-16", "2021-08-23"], + [ + parser.isoparse("2021-08-02T00:00:00Z"), + parser.isoparse("2021-08-09T00:00:00Z"), + parser.isoparse("2021-08-16T00:00:00Z"), + parser.isoparse("2021-08-23T00:00:00Z"), + ], ) self.assertEqual( QueryDateRange( team=self.team, date_range=DateRange(date_from="-50d"), interval=IntervalType.month, now=now ).all_values(), - ["2021-07-01", "2021-08-01"], + [parser.isoparse("2021-07-01T00:00:00Z"), parser.isoparse("2021-08-01T00:00:00Z")], ) self.assertEqual( QueryDateRange( team=self.team, date_range=DateRange(date_from="-3h"), interval=IntervalType.hour, now=now ).all_values(), - ["2021-08-24 21:00:00", "2021-08-24 22:00:00", "2021-08-24 23:00:00", "2021-08-25 00:00:00"], + [ + parser.isoparse("2021-08-24T21:00:00Z"), + parser.isoparse("2021-08-24T22:00:00Z"), + parser.isoparse("2021-08-24T23:00:00Z"), + parser.isoparse("2021-08-25T00:00:00Z"), + ], ) diff --git a/posthog/hogql_queries/web_analytics/test/test_web_overview.py b/posthog/hogql_queries/web_analytics/test/test_web_overview.py index 63a26ffea9233..dcafe660fc72d 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_overview.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_overview.py @@ -1,6 +1,11 @@ +from typing import Optional +from unittest.mock import MagicMock, patch from freezegun import freeze_time from parameterized import parameterized +from posthog.clickhouse.client.execute import sync_execute +from posthog.hogql.constants import LimitContext +from posthog.hogql.query import INCREASED_MAX_EXECUTION_TIME from posthog.hogql_queries.web_analytics.web_overview import WebOverviewQueryRunner from posthog.schema import WebOverviewQuery, DateRange from posthog.test.base import ( @@ -36,14 +41,21 @@ def _create_events(self, data, event="$pageview"): ) return person_result - def _run_web_overview_query(self, date_from, date_to, use_sessions_table=False, compare=True): + def _run_web_overview_query( + self, + date_from: str, + date_to: str, + use_sessions_table: bool = False, + compare: bool = True, + limit_context: Optional[LimitContext] = None, + ): query = WebOverviewQuery( dateRange=DateRange(date_from=date_from, date_to=date_to), properties=[], compare=compare, useSessionsTable=use_sessions_table, ) - runner = WebOverviewQueryRunner(team=self.team, query=query) + runner = WebOverviewQueryRunner(team=self.team, query=query, limit_context=limit_context) return runner.calculate() @parameterized.expand([(True,), (False,)]) @@ -185,3 +197,10 @@ def test_correctly_counts_pageviews_in_long_running_session(self, use_sessions_t sessions = results[2] self.assertEqual(1, sessions.value) + + @patch("posthog.hogql.query.sync_execute", wraps=sync_execute) + def test_limit_is_context_aware(self, mock_sync_execute: MagicMock): + self._run_web_overview_query("2023-12-01", "2023-12-03", limit_context=LimitContext.QUERY_ASYNC) + + mock_sync_execute.assert_called_once() + self.assertIn(f" max_execution_time={INCREASED_MAX_EXECUTION_TIME},", mock_sync_execute.call_args[0][0]) diff --git a/posthog/hogql_queries/web_analytics/top_clicks.py b/posthog/hogql_queries/web_analytics/top_clicks.py index 3218e68975f7a..192d7b279b704 100644 --- a/posthog/hogql_queries/web_analytics/top_clicks.py +++ b/posthog/hogql_queries/web_analytics/top_clicks.py @@ -51,6 +51,7 @@ def calculate(self): team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) return WebTopClicksQueryResponse( diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index da4f98edcbf32..12ef703271c51 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -211,6 +211,7 @@ def _get_or_calculate_sample_ratio(self) -> SamplingRate: query=event_count, team=self.team, timings=self.timings, + limit_context=self.limit_context, ) if not response.results or not response.results[0] or not response.results[0][0]: diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 38388315c8f0b..2da015a60ac4e 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -285,6 +285,7 @@ def calculate(self): team=self.team, timings=self.timings, modifiers=self.modifiers, + limit_context=self.limit_context, ) assert response.results diff --git a/posthog/management/commands/backfill_sessions_table.py b/posthog/management/commands/backfill_sessions_table.py index 798a501eb5b60..c01f4b6159749 100644 --- a/posthog/management/commands/backfill_sessions_table.py +++ b/posthog/management/commands/backfill_sessions_table.py @@ -16,6 +16,10 @@ TARGET_TABLE = "sessions" +SETTINGS = { + "max_execution_time": 3600 # 1 hour +} + @dataclass class BackfillQuery: @@ -26,6 +30,7 @@ class BackfillQuery: def execute( self, dry_run: bool = True, + print_counts: bool = True, ) -> None: def source_column(column_name: str) -> str: return get_property_string_expr( @@ -108,13 +113,14 @@ def select_query(select_date: Optional[datetime] = None) -> str: """ # print the count of entries in the main sessions table - count_query = f"SELECT count(), uniq(session_id) FROM {TARGET_TABLE}" - [(sessions_row_count, sessions_event_count)] = sync_execute(count_query) - logger.info(f"{sessions_row_count} rows and {sessions_event_count} unique session_ids in sessions table") + if print_counts: + count_query = f"SELECT count(), uniq(session_id) FROM {TARGET_TABLE}" + [(sessions_row_count, sessions_event_count)] = sync_execute(count_query, settings=SETTINGS) + logger.info(f"{sessions_row_count} rows and {sessions_event_count} unique session_ids in sessions table") if dry_run: count_query = f"SELECT count(), uniq(session_id) FROM ({select_query()})" - [(events_count, sessions_count)] = sync_execute(count_query) + [(events_count, sessions_count)] = sync_execute(count_query, settings=SETTINGS) logger.info(f"{events_count} events and {sessions_count} sessions to backfill for") logger.info(f"The first select query would be:\n{select_query(self.start_date)}") return @@ -125,12 +131,14 @@ def select_query(select_date: Optional[datetime] = None) -> str: sync_execute( query=f"""INSERT INTO writable_sessions {select_query(select_date=date)} SETTINGS max_execution_time=3600""", workload=Workload.OFFLINE if self.use_offline_workload else Workload.DEFAULT, + settings=SETTINGS, ) # print the count of entries in the main sessions table - count_query = f"SELECT count(), uniq(session_id) FROM {TARGET_TABLE}" - [(sessions_row_count, sessions_event_count)] = sync_execute(count_query) - logger.info(f"{sessions_row_count} rows and {sessions_event_count} unique session_ids in sessions table") + if print_counts: + count_query = f"SELECT count(), uniq(session_id) FROM {TARGET_TABLE}" + [(sessions_row_count, sessions_event_count)] = sync_execute(count_query, settings=SETTINGS) + logger.info(f"{sessions_row_count} rows and {sessions_event_count} unique session_ids in sessions table") class Command(BaseCommand): @@ -149,11 +157,25 @@ def add_arguments(self, parser): parser.add_argument( "--use-offline-workload", action="store_true", help="actually execute INSERT queries (default is dry-run)" ) + parser.add_argument( + "--print-counts", action="store_true", help="print events and session count beforehand and afterwards" + ) - def handle(self, *, live_run: bool, start_date: str, end_date: str, use_offline_workload: bool, **options): + def handle( + self, + *, + live_run: bool, + start_date: str, + end_date: str, + use_offline_workload: bool, + print_counts: bool, + **options, + ): logger.setLevel(logging.INFO) start_datetime = datetime.strptime(start_date, "%Y-%m-%d") end_datetime = datetime.strptime(end_date, "%Y-%m-%d") - BackfillQuery(start_datetime, end_datetime, use_offline_workload).execute(dry_run=not live_run) + BackfillQuery(start_datetime, end_datetime, use_offline_workload).execute( + dry_run=not live_run, print_counts=print_counts + ) diff --git a/posthog/migrations/0398_alter_externaldatasource_source_type.py b/posthog/migrations/0398_alter_externaldatasource_source_type.py new file mode 100644 index 0000000000000..af95cd44eef98 --- /dev/null +++ b/posthog/migrations/0398_alter_externaldatasource_source_type.py @@ -0,0 +1,25 @@ +# Generated by Django 4.1.13 on 2024-03-21 13:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0397_projects_backfill"), + ] + + operations = [ + migrations.AlterField( + model_name="externaldatasource", + name="source_type", + field=models.CharField( + choices=[ + ("Stripe", "Stripe"), + ("Hubspot", "Hubspot"), + ("Postgres", "Postgres"), + ("Zendesk", "Zendesk"), + ], + max_length=128, + ), + ), + ] diff --git a/posthog/migrations/0399_batchexportrun_records_total_count.py b/posthog/migrations/0399_batchexportrun_records_total_count.py new file mode 100644 index 0000000000000..b9301a92b4110 --- /dev/null +++ b/posthog/migrations/0399_batchexportrun_records_total_count.py @@ -0,0 +1,19 @@ +# Generated by Django 4.1.13 on 2024-03-25 14:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0398_alter_externaldatasource_source_type"), + ] + + operations = [ + migrations.AddField( + model_name="batchexportrun", + name="records_total_count", + field=models.IntegerField( + help_text="The total count of records that should be exported in this BatchExportRun.", null=True + ), + ), + ] diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index 4bd44646ec4a9..d0e0f94439cf5 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -202,7 +202,6 @@ class Property: total_periods: Optional[int] min_periods: Optional[int] negation: Optional[bool] = False - table: Optional[str] _data: Dict def __init__( @@ -225,7 +224,6 @@ def __init__( seq_time_value: Optional[int] = None, seq_time_interval: Optional[OperatorInterval] = None, negation: Optional[bool] = None, - table: Optional[str] = None, **kwargs, ) -> None: self.key = key @@ -243,7 +241,6 @@ def __init__( self.seq_time_value = seq_time_value self.seq_time_interval = seq_time_interval self.negation = None if negation is None else str_to_bool(negation) - self.table = table if value is None and self.operator in ["is_set", "is_not_set"]: self.value = self.operator diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 66b4a3ed51415..afceba0ffd728 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -357,6 +357,25 @@ def _person_on_events_v2_querying_enabled(self) -> bool: return get_instance_setting("PERSON_ON_EVENTS_V2_ENABLED") + @property + def person_on_events_v3_querying_enabled(self) -> bool: + if settings.PERSON_ON_EVENTS_V3_OVERRIDE is not None: + return settings.PERSON_ON_EVENTS_V3_OVERRIDE + + return posthoganalytics.feature_enabled( + "persons-on-events-v3-reads-enabled", + str(self.uuid), + groups={"organization": str(self.organization_id)}, + group_properties={ + "organization": { + "id": str(self.organization_id), + "created_at": self.organization.created_at, + } + }, + only_evaluate_locally=True, + send_feature_flag_events=False, + ) + @property def strict_caching_enabled(self) -> bool: enabled_teams = get_list(get_instance_setting("STRICT_CACHING_TEAMS")) diff --git a/posthog/schema.py b/posthog/schema.py index dc77da163db17..17ad11fc4f236 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -121,14 +121,14 @@ class ChartAxis(BaseModel): class ChartDisplayType(str, Enum): ActionsLineGraph = "ActionsLineGraph" - ActionsLineGraphCumulative = "ActionsLineGraphCumulative" + ActionsBar = "ActionsBar" ActionsAreaGraph = "ActionsAreaGraph" - ActionsTable = "ActionsTable" + ActionsLineGraphCumulative = "ActionsLineGraphCumulative" + BoldNumber = "BoldNumber" ActionsPie = "ActionsPie" - ActionsBar = "ActionsBar" ActionsBarValue = "ActionsBarValue" + ActionsTable = "ActionsTable" WorldMap = "WorldMap" - BoldNumber = "BoldNumber" class CohortPropertyFilter(BaseModel): @@ -180,6 +180,10 @@ class DateRange(BaseModel): date_to: Optional[str] = None +class DatetimeDay(RootModel[AwareDatetime]): + root: AwareDatetime + + class Day(RootModel[int]): root: int @@ -418,6 +422,7 @@ class PersonsOnEventsMode(str, Enum): v1_enabled = "v1_enabled" v1_mixed = "v1_mixed" v2_enabled = "v2_enabled" + v3_enabled = "v3_enabled" class HogQLQueryModifiers(BaseModel): @@ -457,7 +462,7 @@ class DayItem(BaseModel): extra="forbid", ) label: str - value: Union[str, int] + value: Union[str, AwareDatetime, int] class IntervalItem(BaseModel): @@ -1364,7 +1369,6 @@ class PersonPropertyFilter(BaseModel): key: str label: Optional[str] = None operator: PropertyOperator - table: Optional[str] = None type: Literal["person"] = Field(default="person", description="Person properties") value: Optional[Union[str, float, List[Union[str, float]]]] = None diff --git a/posthog/settings/__init__.py b/posthog/settings/__init__.py index 455b7e8dc34a1..dd72e63bcc92a 100644 --- a/posthog/settings/__init__.py +++ b/posthog/settings/__init__.py @@ -96,6 +96,7 @@ # Only written in specific scripts - do not use outside of them. PERSON_ON_EVENTS_V2_OVERRIDE = get_from_env("PERSON_ON_EVENTS_V2_OVERRIDE", optional=True, type_cast=str_to_bool) +PERSON_ON_EVENTS_V3_OVERRIDE = get_from_env("PERSON_ON_EVENTS_V3_OVERRIDE", optional=True, type_cast=str_to_bool) # Wether to use insight queries converted to HogQL. HOGQL_INSIGHTS_OVERRIDE = get_from_env("HOGQL_INSIGHTS_OVERRIDE", optional=True, type_cast=str_to_bool) diff --git a/posthog/settings/feature_flags.py b/posthog/settings/feature_flags.py index 5e1ad234e6de4..371f497376663 100644 --- a/posthog/settings/feature_flags.py +++ b/posthog/settings/feature_flags.py @@ -8,5 +8,4 @@ "simplify-actions", "historical-exports-v2", "ingestion-warnings-enabled", - "role-based-access", ] diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py index 5eff6afd33fe2..bead27cbd1eec 100644 --- a/posthog/tasks/tasks.py +++ b/posthog/tasks/tasks.py @@ -1,5 +1,5 @@ import time -from typing import Any, Optional +from typing import Optional from uuid import UUID from celery import shared_task @@ -9,6 +9,7 @@ from prometheus_client import Gauge from posthog.cloud_utils import is_cloud +from posthog.hogql.constants import LimitContext from posthog.metrics import pushed_metrics_registry from posthog.ph_client import get_ph_client from posthog.redis import get_client @@ -33,7 +34,12 @@ def redis_heartbeat() -> None: @shared_task(ignore_result=True, queue=CeleryQueue.ANALYTICS_QUERIES.value) def process_query_task( - team_id: str, query_id: str, query_json: Any, limit_context: Any = None, refresh_requested: bool = False + team_id: int, + user_id: int, + query_id: str, + query_json: dict, + limit_context: Optional[LimitContext] = None, + refresh_requested: bool = False, ) -> None: """ Kick off query @@ -43,6 +49,7 @@ def process_query_task( execute_process_query( team_id=team_id, + user_id=user_id, query_id=query_id, query_json=query_json, limit_context=limit_context, diff --git a/posthog/temporal/batch_exports/__init__.py b/posthog/temporal/batch_exports/__init__.py index 8debe181fb82f..33c1b200e6a97 100644 --- a/posthog/temporal/batch_exports/__init__.py +++ b/posthog/temporal/batch_exports/__init__.py @@ -5,9 +5,9 @@ ) from posthog.temporal.batch_exports.batch_exports import ( create_batch_export_backfill_model, - create_export_run, + finish_batch_export_run, + start_batch_export_run, update_batch_export_backfill_model_status, - update_export_run_status, ) from posthog.temporal.batch_exports.bigquery_batch_export import ( BigQueryBatchExportWorkflow, @@ -59,9 +59,10 @@ ACTIVITIES = [ backfill_schedule, create_batch_export_backfill_model, - create_export_run, + start_batch_export_run, create_table, drop_table, + finish_batch_export_run, get_schedule_frequency, insert_into_bigquery_activity, insert_into_http_activity, @@ -73,7 +74,6 @@ optimize_person_distinct_id_overrides, submit_mutation, update_batch_export_backfill_model_status, - update_export_run_status, wait_for_mutation, wait_for_table, ] diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index c776e1f245ef3..0e12fc14635b4 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -1,15 +1,10 @@ import collections.abc -import csv import dataclasses import datetime as dt -import gzip -import tempfile import typing import uuid from string import Template -import brotli -import orjson import pyarrow as pa from asgiref.sync import sync_to_async from django.conf import settings @@ -22,13 +17,13 @@ create_batch_export_backfill, create_batch_export_run, update_batch_export_backfill_status, - update_batch_export_run_status, + update_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_export_finished_metric, get_export_started_metric, ) -from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.common.clickhouse import ClickHouseClient, get_client from posthog.temporal.common.logger import bind_temporal_worker_logger SELECT_QUERY_TEMPLATE = Template( @@ -286,233 +281,75 @@ def get_data_interval(interval: str, data_interval_end: str | None) -> tuple[dt. return (data_interval_start_dt, data_interval_end_dt) -def json_dumps_bytes(d) -> bytes: - return orjson.dumps(d, default=str) - - -class BatchExportTemporaryFile: - """A TemporaryFile used to as an intermediate step while exporting data. - - This class does not implement the file-like interface but rather passes any calls - to the underlying tempfile.NamedTemporaryFile. We do override 'write' methods - to allow tracking bytes and records. - """ - - def __init__( - self, - mode: str = "w+b", - buffering=-1, - compression: str | None = None, - encoding: str | None = None, - newline: str | None = None, - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, - *, - errors: str | None = None, - ): - self._file = tempfile.NamedTemporaryFile( - mode=mode, - encoding=encoding, - newline=newline, - buffering=buffering, - suffix=suffix, - prefix=prefix, - dir=dir, - errors=errors, - ) - self.compression = compression - self.bytes_total = 0 - self.records_total = 0 - self.bytes_since_last_reset = 0 - self.records_since_last_reset = 0 - self._brotli_compressor = None - - def __getattr__(self, name): - """Pass get attr to underlying tempfile.NamedTemporaryFile.""" - return self._file.__getattr__(name) - - def __enter__(self): - """Context-manager protocol enter method.""" - self._file.__enter__() - return self - - def __exit__(self, exc, value, tb): - """Context-manager protocol exit method.""" - return self._file.__exit__(exc, value, tb) - - def __iter__(self): - yield from self._file - - @property - def brotli_compressor(self): - if self._brotli_compressor is None: - self._brotli_compressor = brotli.Compressor() - return self._brotli_compressor - - def compress(self, content: bytes | str) -> bytes: - if isinstance(content, str): - encoded = content.encode("utf-8") - else: - encoded = content - - match self.compression: - case "gzip": - return gzip.compress(encoded) - case "brotli": - self.brotli_compressor.process(encoded) - return self.brotli_compressor.flush() - case None: - return encoded - case _: - raise ValueError(f"Unsupported compression: '{self.compression}'") - - def write(self, content: bytes | str): - """Write bytes to underlying file keeping track of how many bytes were written.""" - compressed_content = self.compress(content) - - if "b" in self.mode: - result = self._file.write(compressed_content) - else: - result = self._file.write(compressed_content.decode("utf-8")) - - self.bytes_total += result - self.bytes_since_last_reset += result - - return result - - def write_record_as_bytes(self, record: bytes): - result = self.write(record) - - self.records_total += 1 - self.records_since_last_reset += 1 - - return result - - def write_records_to_jsonl(self, records): - """Write records to a temporary file as JSONL.""" - if len(records) == 1: - jsonl_dump = orjson.dumps(records[0], option=orjson.OPT_APPEND_NEWLINE, default=str) - else: - jsonl_dump = b"\n".join(map(json_dumps_bytes, records)) - - result = self.write(jsonl_dump) - - self.records_total += len(records) - self.records_since_last_reset += len(records) - - return result - - def write_records_to_csv( - self, - records, - fieldnames: None | collections.abc.Sequence[str] = None, - extrasaction: typing.Literal["raise", "ignore"] = "ignore", - delimiter: str = ",", - quotechar: str = '"', - escapechar: str | None = "\\", - lineterminator: str = "\n", - quoting=csv.QUOTE_NONE, - ): - """Write records to a temporary file as CSV.""" - if len(records) == 0: - return - - if fieldnames is None: - fieldnames = list(records[0].keys()) - - writer = csv.DictWriter( - self, - fieldnames=fieldnames, - extrasaction=extrasaction, - delimiter=delimiter, - quotechar=quotechar, - escapechar=escapechar, - quoting=quoting, - lineterminator=lineterminator, - ) - writer.writerows(records) - - self.records_total += len(records) - self.records_since_last_reset += len(records) - - def write_records_to_tsv( - self, - records, - fieldnames: None | list[str] = None, - extrasaction: typing.Literal["raise", "ignore"] = "ignore", - quotechar: str = '"', - escapechar: str | None = "\\", - lineterminator: str = "\n", - quoting=csv.QUOTE_NONE, - ): - """Write records to a temporary file as TSV.""" - return self.write_records_to_csv( - records, - fieldnames=fieldnames, - extrasaction=extrasaction, - delimiter="\t", - quotechar=quotechar, - escapechar=escapechar, - quoting=quoting, - lineterminator=lineterminator, - ) - - def rewind(self): - """Rewind the file before reading it.""" - if self.compression == "brotli": - result = self._file.write(self.brotli_compressor.finish()) - - self.bytes_total += result - self.bytes_since_last_reset += result - - self._brotli_compressor = None - - self._file.seek(0) - - def reset(self): - """Reset underlying file by truncating it. - - Also resets the tracker attributes for bytes and records since last reset. - """ - self._file.seek(0) - self._file.truncate() - - self.bytes_since_last_reset = 0 - self.records_since_last_reset = 0 - - @dataclasses.dataclass -class CreateBatchExportRunInputs: - """Inputs to the create_export_run activity. +class StartBatchExportRunInputs: + """Inputs to the 'start_batch_export_run' activity. Attributes: team_id: The id of the team the BatchExportRun belongs to. batch_export_id: The id of the BatchExport this BatchExportRun belongs to. data_interval_start: Start of this BatchExportRun's data interval. data_interval_end: End of this BatchExportRun's data interval. + exclude_events: Optionally, any event names that should be excluded. + include_events: Optionally, the event names that should only be included in the export. """ team_id: int batch_export_id: str data_interval_start: str data_interval_end: str - status: str = BatchExportRun.Status.STARTING + exclude_events: list[str] | None = None + include_events: list[str] | None = None + + +RecordsTotalCount = int +BatchExportRunId = str @activity.defn -async def create_export_run(inputs: CreateBatchExportRunInputs) -> str: - """Activity that creates an BatchExportRun. +async def start_batch_export_run(inputs: StartBatchExportRunInputs) -> tuple[BatchExportRunId, RecordsTotalCount]: + """Activity that creates an BatchExportRun and returns the count of records to export. Intended to be used in all export workflows, usually at the start, to create a model instance to represent them in our database. + + Upon seeing a count of 0 records to export, batch export workflows should finish early + (i.e. without running the insert activity), as there will be nothing to export. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id) logger.info( - "Creating batch export for range %s - %s", + "Starting batch export for range %s - %s", inputs.data_interval_start, inputs.data_interval_end, ) + + async with get_client(team_id=inputs.team_id) as client: + if not await client.is_alive(): + raise ConnectionError("Cannot establish connection to ClickHouse") + + count = await get_rows_count( + client=client, + team_id=inputs.team_id, + interval_start=inputs.data_interval_start, + interval_end=inputs.data_interval_end, + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, + ) + + if count > 0: + logger.info( + "Batch export for range %s - %s will export %s rows", + inputs.data_interval_start, + inputs.data_interval_end, + count, + ) + else: + logger.info( + "Batch export for range %s - %s has no rows to export", + inputs.data_interval_start, + inputs.data_interval_end, + ) + # 'sync_to_async' type hints are fixed in asgiref>=3.4.1 # But one of our dependencies is pinned to asgiref==3.3.2. # Remove these comments once we upgrade. @@ -520,33 +357,51 @@ async def create_export_run(inputs: CreateBatchExportRunInputs) -> str: batch_export_id=uuid.UUID(inputs.batch_export_id), data_interval_start=inputs.data_interval_start, data_interval_end=inputs.data_interval_end, - status=inputs.status, + status=BatchExportRun.Status.STARTING, + records_total_count=count, ) - return str(run.id) + return str(run.id), count @dataclasses.dataclass -class UpdateBatchExportRunStatusInputs: - """Inputs to the update_export_run_status activity.""" +class FinishBatchExportRunInputs: + """Inputs to the 'finish_batch_export_run' activity. + + Attributes: + id: The id of the batch export run. This should be a valid UUID string. + team_id: The team id of the batch export. + status: The status this batch export is finishing with. + latest_error: The latest error message captured, if any. + records_completed: Number of records successfully exported. + records_total_count: Total count of records this run noted. + """ id: str - status: str team_id: int + status: str latest_error: str | None = None - records_completed: int = 0 + records_completed: int | None = None + records_total_count: int | None = None @activity.defn -async def update_export_run_status(inputs: UpdateBatchExportRunStatusInputs) -> None: - """Activity that updates the status of an BatchExportRun.""" +async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: + """Activity that finishes a BatchExportRun. + + Finishing means a final update to the status of the BatchExportRun model. + """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id) - batch_export_run = await sync_to_async(update_batch_export_run_status)( + update_params = { + key: value + for key, value in dataclasses.asdict(inputs).items() + if key not in ("id", "team_id") and value is not None + } + batch_export_run = await sync_to_async(update_batch_export_run)( run_id=uuid.UUID(inputs.id), - status=inputs.status, - latest_error=inputs.latest_error, - records_completed=inputs.records_completed, + finished_at=dt.datetime.now(), + **update_params, ) if batch_export_run.status in (BatchExportRun.Status.FAILED, BatchExportRun.Status.FAILED_RETRYABLE): @@ -629,11 +484,15 @@ async def update_batch_export_backfill_model_status(inputs: UpdateBatchExportBac ) +RecordsCompleted = int +BatchExportActivity = collections.abc.Callable[..., collections.abc.Awaitable[RecordsCompleted]] + + async def execute_batch_export_insert_activity( - activity, + activity: BatchExportActivity, inputs, non_retryable_error_types: list[str], - update_inputs: UpdateBatchExportRunStatusInputs, + finish_inputs: FinishBatchExportRunInputs, start_to_close_timeout_seconds: int = 3600, heartbeat_timeout_seconds: int | None = 120, maximum_attempts: int = 10, @@ -650,7 +509,7 @@ async def execute_batch_export_insert_activity( activity: The 'insert_into_*' activity function to execute. inputs: The inputs to the activity. non_retryable_error_types: A list of errors to not retry on when executing the activity. - update_inputs: Inputs to the update_export_run_status to run at the end. + finish_inputs: Inputs to the 'finish_batch_export_run' to run at the end. start_to_close_timeout: A timeout for the 'insert_into_*' activity function. maximum_attempts: Maximum number of retries for the 'insert_into_*' activity function. Assuming the error that triggered the retry is not in non_retryable_error_types. @@ -673,30 +532,30 @@ async def execute_batch_export_insert_activity( heartbeat_timeout=dt.timedelta(seconds=heartbeat_timeout_seconds) if heartbeat_timeout_seconds else None, retry_policy=retry_policy, ) - update_inputs.records_completed = records_completed + finish_inputs.records_completed = records_completed except exceptions.ActivityError as e: if isinstance(e.cause, exceptions.CancelledError): - update_inputs.status = BatchExportRun.Status.CANCELLED + finish_inputs.status = BatchExportRun.Status.CANCELLED elif isinstance(e.cause, exceptions.ApplicationError) and e.cause.type not in non_retryable_error_types: - update_inputs.status = BatchExportRun.Status.FAILED_RETRYABLE + finish_inputs.status = BatchExportRun.Status.FAILED_RETRYABLE else: - update_inputs.status = BatchExportRun.Status.FAILED + finish_inputs.status = BatchExportRun.Status.FAILED - update_inputs.latest_error = str(e.cause) + finish_inputs.latest_error = str(e.cause) raise except Exception: - update_inputs.status = BatchExportRun.Status.FAILED - update_inputs.latest_error = "An unexpected error has ocurred" + finish_inputs.status = BatchExportRun.Status.FAILED + finish_inputs.latest_error = "An unexpected error has ocurred" raise finally: - get_export_finished_metric(status=update_inputs.status.lower()).add(1) + get_export_finished_metric(status=finish_inputs.status.lower()).add(1) await workflow.execute_activity( - update_export_run_status, - update_inputs, + finish_batch_export_run, + finish_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index a0469de79bb9e..f9ddd29bd528f 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -12,23 +12,30 @@ from temporalio.common import RetryPolicy from posthog.batch_exports.models import BatchExportRun -from posthog.batch_exports.service import BatchExportField, BatchExportSchema, BigQueryBatchExportInputs +from posthog.batch_exports.service import ( + BatchExportField, + BatchExportSchema, + BigQueryBatchExportInputs, +) from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, default_fields, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, +) from posthog.temporal.batch_exports.utils import peek_first_and_rewind from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -144,6 +151,7 @@ class BigQueryInsertInputs: include_events: list[str] | None = None use_json_type: bool = False batch_export_schema: BatchExportSchema | None = None + run_id: str | None = None @contextlib.contextmanager @@ -193,13 +201,16 @@ def bigquery_default_fields() -> list[BatchExportField]: @activity.defn -async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> int: +async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to BigQuery.""" logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="BigQuery") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to BigQuery: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, + inputs.project_id, + inputs.dataset_id, + inputs.table_id, ) should_resume, details = await should_resume_from_activity_heartbeat(activity, BigQueryHeartbeatDetails, logger) @@ -215,25 +226,6 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> int: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows", count) - if inputs.batch_export_schema is None: fields = bigquery_default_fields() query_parameters = None @@ -378,15 +370,17 @@ async def run(self, inputs: BigQueryBatchExportInputs): """Workflow implementation to export data to BigQuery.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -396,10 +390,30 @@ async def run(self, inputs: BigQueryBatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id ) + finish_inputs = FinishBatchExportRunInputs( + id=run_id, + status=BatchExportRun.Status.COMPLETED, + team_id=inputs.team_id, + ) + + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = BigQueryInsertInputs( team_id=inputs.team_id, table_id=inputs.table_id, @@ -415,6 +429,7 @@ async def run(self, inputs: BigQueryBatchExportInputs): include_events=inputs.include_events, use_json_type=inputs.use_json_type, batch_export_schema=inputs.batch_export_schema, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -428,5 +443,5 @@ async def run(self, inputs: BigQueryBatchExportInputs): # Usually means the dataset or project doesn't exist. "NotFound", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/http_batch_export.py b/posthog/temporal/batch_exports/http_batch_export.py index 8aca65c80ff38..993806c004c5e 100644 --- a/posthog/temporal/batch_exports/http_batch_export.py +++ b/posthog/temporal/batch_exports/http_batch_export.py @@ -9,24 +9,31 @@ from temporalio import activity, workflow from temporalio.common import RetryPolicy -from posthog.batch_exports.service import BatchExportField, BatchExportSchema, HttpBatchExportInputs +from posthog.batch_exports.service import ( + BatchExportField, + BatchExportSchema, + HttpBatchExportInputs, +) from posthog.models import BatchExportRun from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, - json_dumps_bytes, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, + json_dumps_bytes, +) from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -97,6 +104,7 @@ class HttpInsertInputs: data_interval_end: str exclude_events: list[str] | None = None include_events: list[str] | None = None + run_id: str | None = None batch_export_schema: BatchExportSchema | None = None @@ -152,38 +160,20 @@ async def post_json_file_to_url(url, batch_file, session: aiohttp.ClientSession) @activity.defn -async def insert_into_http_activity(inputs: HttpInsertInputs) -> int: +async def insert_into_http_activity(inputs: HttpInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to an HTTP Endpoint.""" logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="HTTP") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to HTTP endpoint: %s", inputs.data_interval_start, inputs.data_interval_end, + inputs.url, ) async with get_client(team_id=inputs.team_id) as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=inputs.data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows", count) - if inputs.batch_export_schema is not None: raise NotImplementedError("Batch export schema is not supported for HTTP export") @@ -327,15 +317,17 @@ async def run(self, inputs: HttpBatchExportInputs): """Workflow implementation to export data to an HTTP Endpoint.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -345,12 +337,26 @@ async def run(self, inputs: HttpBatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id, ) + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = HttpInsertInputs( team_id=inputs.team_id, url=inputs.url, @@ -360,6 +366,7 @@ async def run(self, inputs: HttpBatchExportInputs): exclude_events=inputs.exclude_events, include_events=inputs.include_events, batch_export_schema=inputs.batch_export_schema, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -368,7 +375,7 @@ async def run(self, inputs: HttpBatchExportInputs): non_retryable_error_types=[ "NonRetryableResponseError", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, # Disable heartbeat timeout until we add heartbeat support. heartbeat_timeout_seconds=None, ) diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py index 5dbfc6faa4acf..54b3f316393c2 100644 --- a/posthog/temporal/batch_exports/postgres_batch_export.py +++ b/posthog/temporal/batch_exports/postgres_batch_export.py @@ -14,24 +14,31 @@ from temporalio.common import RetryPolicy from posthog.batch_exports.models import BatchExportRun -from posthog.batch_exports.service import BatchExportField, BatchExportSchema, PostgresBatchExportInputs +from posthog.batch_exports.service import ( + BatchExportField, + BatchExportSchema, + PostgresBatchExportInputs, +) from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, default_fields, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) -from posthog.temporal.batch_exports.utils import peek_first_and_rewind +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, +) +from posthog.temporal.batch_exports.utils import peek_first_and_rewind, try_set_batch_export_run_to_running from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -231,41 +238,28 @@ class PostgresInsertInputs: exclude_events: list[str] | None = None include_events: list[str] | None = None batch_export_schema: BatchExportSchema | None = None + run_id: str | None = None @activity.defn -async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> int: +async def insert_into_postgres_activity(inputs: PostgresInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to Postgres.""" logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="PostgreSQL") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to PostgreSQL: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, + inputs.database, + inputs.schema, + inputs.table_name, ) + await try_set_batch_export_run_to_running(run_id=inputs.run_id, logger=logger) + async with get_client(team_id=inputs.team_id) as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=inputs.data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows", count) - if inputs.batch_export_schema is None: fields = postgres_default_fields() query_parameters = None @@ -383,15 +377,17 @@ async def run(self, inputs: PostgresBatchExportInputs): """Workflow implementation to export data to Postgres.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -401,12 +397,26 @@ async def run(self, inputs: PostgresBatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id, ) + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = PostgresInsertInputs( team_id=inputs.team_id, user=inputs.user, @@ -422,6 +432,7 @@ async def run(self, inputs: PostgresBatchExportInputs): exclude_events=inputs.exclude_events, include_events=inputs.include_events, batch_export_schema=inputs.batch_export_schema, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -436,7 +447,7 @@ async def run(self, inputs: PostgresBatchExportInputs): # Missing permissions to, e.g., insert into table. "InsufficientPrivilege", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, # Disable heartbeat timeout until we add heartbeat support. heartbeat_timeout_seconds=None, ) diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index bc1549cef838f..a71f292fcf30a 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -16,14 +16,15 @@ from posthog.batch_exports.service import BatchExportField, RedshiftBatchExportInputs from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, default_fields, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import get_rows_exported_metric from posthog.temporal.batch_exports.postgres_batch_export import ( @@ -271,7 +272,7 @@ class RedshiftInsertInputs(PostgresInsertInputs): @activity.defn -async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> int: +async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> RecordsCompleted: """Activity to insert data from ClickHouse to Redshift. This activity executes the following steps: @@ -289,34 +290,18 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs) -> int: """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Redshift") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to Redshift: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, + inputs.database, + inputs.schema, + inputs.table_name, ) async with get_client(team_id=inputs.team_id) as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=inputs.data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows", count) - if inputs.batch_export_schema is None: fields = redshift_default_fields() query_parameters = None @@ -421,15 +406,17 @@ async def run(self, inputs: RedshiftBatchExportInputs): """Workflow implementation to export data to Redshift.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -439,12 +426,26 @@ async def run(self, inputs: RedshiftBatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id, ) + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = RedshiftInsertInputs( team_id=inputs.team_id, user=inputs.user, @@ -461,6 +462,7 @@ async def run(self, inputs: RedshiftBatchExportInputs): include_events=inputs.include_events, properties_data_type=inputs.properties_data_type, batch_export_schema=inputs.batch_export_schema, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -475,7 +477,7 @@ async def run(self, inputs: RedshiftBatchExportInputs): # Missing permissions to, e.g., insert into table. "InsufficientPrivilege", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, # Disable heartbeat timeout until we add heartbeat support. heartbeat_timeout_seconds=None, ) diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index 4d99cbeffd7c3..a6420e95cb8b1 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -1,4 +1,5 @@ import asyncio +import collections.abc import contextlib import datetime as dt import io @@ -8,28 +9,43 @@ from dataclasses import dataclass import aioboto3 +import orjson +import pyarrow as pa from django.conf import settings from temporalio import activity, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.models import BatchExportRun -from posthog.batch_exports.service import BatchExportField, BatchExportSchema, S3BatchExportInputs +from posthog.batch_exports.service import ( + BatchExportField, + BatchExportSchema, + S3BatchExportInputs, +) from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, default_fields, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, + BatchExportWriter, + FlushCallable, + JSONLBatchExportWriter, + ParquetBatchExportWriter, + UnsupportedFileFormatError, +) +from posthog.temporal.batch_exports.utils import peek_first_and_rewind, try_set_batch_export_run_to_running from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -50,19 +66,31 @@ def get_allowed_template_variables(inputs) -> dict[str, str]: } +FILE_FORMAT_EXTENSIONS = { + "Parquet": "parquet", + "JSONLines": "jsonl", +} + +COMPRESSION_EXTENSIONS = { + "gzip": "gz", + "snappy": "sz", + "brotli": "br", + "ztsd": "zst", + "lz4": "lz4", +} + + def get_s3_key(inputs) -> str: """Return an S3 key given S3InsertInputs.""" template_variables = get_allowed_template_variables(inputs) key_prefix = inputs.prefix.format(**template_variables) + file_extension = FILE_FORMAT_EXTENSIONS[inputs.file_format] base_file_name = f"{inputs.data_interval_start}-{inputs.data_interval_end}" - match inputs.compression: - case "gzip": - file_name = base_file_name + ".jsonl.gz" - case "brotli": - file_name = base_file_name + ".jsonl.br" - case _: - file_name = base_file_name + ".jsonl" + if inputs.compression is not None: + file_name = base_file_name + f".{file_extension}.{COMPRESSION_EXTENSIONS[inputs.compression]}" + else: + file_name = base_file_name + f".{file_extension}" key = posixpath.join(key_prefix, file_name) @@ -311,6 +339,9 @@ class S3InsertInputs: kms_key_id: str | None = None batch_export_schema: BatchExportSchema | None = None endpoint_url: str | None = None + # TODO: In Python 3.11, this could be a enum.StrEnum. + file_format: str = "JSONLines" + run_id: str | None = None async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]: @@ -388,7 +419,7 @@ def s3_default_fields() -> list[BatchExportField]: @activity.defn -async def insert_into_s3_activity(inputs: S3InsertInputs) -> int: +async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: """Activity to batch export data from PostHog's ClickHouse to S3. It currently only creates a single file per run, and uploads as a multipart upload. @@ -400,34 +431,18 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> int: """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="S3") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to S3: %s", inputs.data_interval_start, inputs.data_interval_end, + get_s3_key(inputs), ) + await try_set_batch_export_run_to_running(run_id=inputs.run_id, logger=logger) + async with get_client(team_id=inputs.team_id) as client: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=inputs.data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows to S3", count) - s3_upload, interval_start = await initialize_and_resume_multipart_upload(inputs) if inputs.batch_export_schema is None: @@ -451,7 +466,7 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> int: last_uploaded_part_timestamp: str | None = None - async def worker_shutdown_handler(): + async def worker_shutdown_handler() -> None: """Handle the Worker shutting down by heart-beating our latest status.""" await activity.wait_for_worker_shutdown() logger.warn( @@ -466,50 +481,147 @@ async def worker_shutdown_handler(): asyncio.create_task(worker_shutdown_handler()) - record = None - async with s3_upload as s3_upload: - with BatchExportTemporaryFile(compression=inputs.compression) as local_results_file: + + async def flush_to_s3( + local_results_file, + records_since_last_flush: int, + bytes_since_last_flush: int, + last_inserted_at: dt.datetime, + last: bool, + ): + nonlocal last_uploaded_part_timestamp + + logger.debug( + "Uploading %s part %s containing %s records with size %s bytes", + "last " if last else "", + s3_upload.part_number + 1, + records_since_last_flush, + bytes_since_last_flush, + ) + + await s3_upload.upload_part(local_results_file) + rows_exported.add(records_since_last_flush) + bytes_exported.add(bytes_since_last_flush) + + last_uploaded_part_timestamp = str(last_inserted_at) + activity.heartbeat(last_uploaded_part_timestamp, s3_upload.to_state()) + + first_record_batch, record_iterator = peek_first_and_rewind(record_iterator) + first_record_batch = cast_record_batch_json_columns(first_record_batch) + column_names = first_record_batch.column_names + column_names.pop(column_names.index("_inserted_at")) + + schema = pa.schema( + # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other + # record batches have them as nullable. + # Until we figure it out, we set all fields to nullable. There are some fields we know + # are not nullable, but I'm opting for the more flexible option until we out why schemas differ + # between batches. + [field.with_nullable(True) for field in first_record_batch.select(column_names).schema] + ) + + writer = get_batch_export_writer( + inputs, + flush_callable=flush_to_s3, + max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, + schema=schema, + ) + + async with writer.open_temporary_file(): rows_exported = get_rows_exported_metric() bytes_exported = get_bytes_exported_metric() - async def flush_to_s3(last_uploaded_part_timestamp: str, last=False): - logger.debug( - "Uploading %s part %s containing %s records with size %s bytes", - "last " if last else "", - s3_upload.part_number + 1, - local_results_file.records_since_last_reset, - local_results_file.bytes_since_last_reset, - ) + for record_batch in record_iterator: + record_batch = cast_record_batch_json_columns(record_batch) + + await writer.write_record_batch(record_batch) - await s3_upload.upload_part(local_results_file) - rows_exported.add(local_results_file.records_since_last_reset) - bytes_exported.add(local_results_file.bytes_since_last_reset) + await s3_upload.complete() - activity.heartbeat(last_uploaded_part_timestamp, s3_upload.to_state()) + return writer.records_total - for record_batch in record_iterator: - for record in record_batch.to_pylist(): - for json_column in ("properties", "person_properties", "set", "set_once"): - if (json_str := record.get(json_column, None)) is not None: - record[json_column] = json.loads(json_str) - inserted_at = record.pop("_inserted_at") +def get_batch_export_writer( + inputs: S3InsertInputs, flush_callable: FlushCallable, max_bytes: int, schema: pa.Schema | None = None +) -> BatchExportWriter: + """Return the `BatchExportWriter` corresponding to configured `file_format`. + + Raises: + UnsupportedFileFormatError: If no writer exists for given `file_format`. + """ + writer: BatchExportWriter + + if inputs.file_format == "Parquet": + writer = ParquetBatchExportWriter( + max_bytes=max_bytes, + flush_callable=flush_callable, + compression=inputs.compression, + schema=schema, + ) + elif inputs.file_format == "JSONLines": + writer = JSONLBatchExportWriter( + max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, + flush_callable=flush_callable, + compression=inputs.compression, + ) + else: + raise UnsupportedFileFormatError(inputs.file_format, "S3") + + return writer - local_results_file.write_records_to_jsonl([record]) - if local_results_file.tell() > settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES: - last_uploaded_part_timestamp = str(inserted_at) - await flush_to_s3(last_uploaded_part_timestamp) - local_results_file.reset() +def cast_record_batch_json_columns( + record_batch: pa.RecordBatch, + json_columns: collections.abc.Sequence = ("properties", "person_properties", "set", "set_once"), +) -> pa.RecordBatch: + """Cast json_columns in record_batch to JsonType. - if local_results_file.tell() > 0 and record is not None: - last_uploaded_part_timestamp = str(inserted_at) - await flush_to_s3(last_uploaded_part_timestamp, last=True) + We return a new RecordBatch with any json_columns replaced by fields casted to JsonType. + Casting is not copying the underlying array buffers, so memory usage does not increase when creating + the new array or the new record batch. + """ + column_names = set(record_batch.column_names) + intersection = column_names & set(json_columns) + + casted_arrays = [] + for array in record_batch.select(intersection): + if pa.types.is_string(array.type): + casted_array = array.cast(JsonType()) + casted_arrays.append(casted_array) + + remaining_column_names = list(column_names - intersection) + return pa.RecordBatch.from_arrays( + record_batch.select(remaining_column_names).columns + casted_arrays, + names=remaining_column_names + list(intersection), + ) - await s3_upload.complete() - return local_results_file.records_total +class JsonScalar(pa.ExtensionScalar): + """Represents a JSON binary string.""" + + def as_py(self) -> dict | None: + if self.value: + return orjson.loads(self.value.as_py().encode("utf-8")) + else: + return None + + +class JsonType(pa.ExtensionType): + """Type for JSON binary strings.""" + + def __init__(self): + super().__init__(pa.string(), "json") + + def __arrow_ext_serialize__(self): + return b"" + + @classmethod + def __arrow_ext_deserialize__(self, storage_type, serialized): + return JsonType() + + def __arrow_ext_scalar_class__(self): + return JsonScalar @workflow.defn(name="s3-export") @@ -532,15 +644,17 @@ async def run(self, inputs: S3BatchExportInputs): """Workflow implementation to export data to S3 bucket.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -550,12 +664,26 @@ async def run(self, inputs: S3BatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id, ) + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = S3InsertInputs( bucket_name=inputs.bucket_name, region=inputs.region, @@ -572,6 +700,8 @@ async def run(self, inputs: S3BatchExportInputs): encryption=inputs.encryption, kms_key_id=inputs.kms_key_id, batch_export_schema=inputs.batch_export_schema, + file_format=inputs.file_format, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -585,5 +715,5 @@ async def run(self, inputs: S3BatchExportInputs): # An S3 bucket doesn't exist. "NoSuchBucket", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py index be94eca89a799..19b090340a9c9 100644 --- a/posthog/temporal/batch_exports/snowflake_batch_export.py +++ b/posthog/temporal/batch_exports/snowflake_batch_export.py @@ -15,23 +15,30 @@ from temporalio.common import RetryPolicy from posthog.batch_exports.models import BatchExportRun -from posthog.batch_exports.service import BatchExportField, BatchExportSchema, SnowflakeBatchExportInputs +from posthog.batch_exports.service import ( + BatchExportField, + BatchExportSchema, + SnowflakeBatchExportInputs, +) from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, + FinishBatchExportRunInputs, + RecordsCompleted, + StartBatchExportRunInputs, default_fields, execute_batch_export_insert_activity, + finish_batch_export_run, get_data_interval, - get_rows_count, iter_records, + start_batch_export_run, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, +) from posthog.temporal.batch_exports.utils import peek_first_and_rewind from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -108,6 +115,7 @@ class SnowflakeInsertInputs: exclude_events: list[str] | None = None include_events: list[str] | None = None batch_export_schema: BatchExportSchema | None = None + run_id: str | None = None def use_namespace(connection: SnowflakeConnection, database: str, schema: str) -> None: @@ -388,16 +396,19 @@ async def copy_loaded_files_to_snowflake_table( @activity.defn -async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> int: +async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to Snowflake. TODO: We're using JSON here, it's not the most efficient way to do this. """ logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Snowflake") logger.info( - "Exporting batch %s - %s", + "Batch exporting range %s - %s to Snowflake: %s.%s.%s", inputs.data_interval_start, inputs.data_interval_end, + inputs.database, + inputs.schema, + inputs.table_name, ) should_resume, details = await should_resume_from_activity_heartbeat(activity, SnowflakeHeartbeatDetails, logger) @@ -415,25 +426,6 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs) -> int: if not await client.is_alive(): raise ConnectionError("Cannot establish connection to ClickHouse") - count = await get_rows_count( - client=client, - team_id=inputs.team_id, - interval_start=data_interval_start, - interval_end=inputs.data_interval_end, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, - ) - - if count == 0: - logger.info( - "Nothing to export in batch %s - %s", - inputs.data_interval_start, - inputs.data_interval_end, - ) - return 0 - - logger.info("BatchExporting %s rows", count) - rows_exported = get_rows_exported_metric() bytes_exported = get_bytes_exported_metric() @@ -467,7 +459,7 @@ async def flush_to_snowflake( record_iterator = iter_records( client=client, team_id=inputs.team_id, - interval_start=inputs.data_interval_start, + interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, @@ -577,15 +569,17 @@ async def run(self, inputs: SnowflakeBatchExportInputs): """Workflow implementation to export data to Snowflake table.""" data_interval_start, data_interval_end = get_data_interval(inputs.interval, inputs.data_interval_end) - create_export_run_inputs = CreateBatchExportRunInputs( + start_batch_export_run_inputs = StartBatchExportRunInputs( team_id=inputs.team_id, batch_export_id=inputs.batch_export_id, data_interval_start=data_interval_start.isoformat(), data_interval_end=data_interval_end.isoformat(), + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, ) - run_id = await workflow.execute_activity( - create_export_run, - create_export_run_inputs, + run_id, records_total_count = await workflow.execute_activity( + start_batch_export_run, + start_batch_export_run_inputs, start_to_close_timeout=dt.timedelta(minutes=5), retry_policy=RetryPolicy( initial_interval=dt.timedelta(seconds=10), @@ -595,12 +589,26 @@ async def run(self, inputs: SnowflakeBatchExportInputs): ), ) - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=run_id, status=BatchExportRun.Status.COMPLETED, team_id=inputs.team_id, ) + if records_total_count == 0: + await workflow.execute_activity( + finish_batch_export_run, + finish_inputs, + start_to_close_timeout=dt.timedelta(minutes=5), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=0, + non_retryable_error_types=["NotNullViolation", "IntegrityError"], + ), + ) + return + insert_inputs = SnowflakeInsertInputs( team_id=inputs.team_id, user=inputs.user, @@ -616,6 +624,7 @@ async def run(self, inputs: SnowflakeBatchExportInputs): exclude_events=inputs.exclude_events, include_events=inputs.include_events, batch_export_schema=inputs.batch_export_schema, + run_id=run_id, ) await execute_batch_export_insert_activity( @@ -630,5 +639,5 @@ async def run(self, inputs: SnowflakeBatchExportInputs): # Raised by Snowflake with an incorrect account name. "ForbiddenError", ], - update_inputs=update_inputs, + finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py new file mode 100644 index 0000000000000..46cfb26b418ca --- /dev/null +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -0,0 +1,558 @@ +"""This module contains a temporary file to stage data in batch exports.""" + +import abc +import collections.abc +import contextlib +import csv +import datetime as dt +import gzip +import tempfile +import typing + +import brotli +import orjson +import pyarrow as pa +import pyarrow.parquet as pq + + +def replace_broken_unicode(obj): + if isinstance(obj, str): + return obj.encode("utf-8", "replace").decode("utf-8") + elif isinstance(obj, list): + return [replace_broken_unicode(item) for item in obj] + elif isinstance(obj, dict): + return {replace_broken_unicode(key): replace_broken_unicode(value) for key, value in obj.items()} + else: + return obj + + +def json_dumps_bytes(d) -> bytes: + try: + return orjson.dumps(d, default=str) + except orjson.JSONEncodeError: + # orjson is very strict about invalid unicode. This slow path protects us against + # things we've observed in practice, like single surrogate codes, e.g. "\ud83d" + cleaned_d = replace_broken_unicode(d) + return orjson.dumps(cleaned_d, default=str) + + +class BatchExportTemporaryFile: + """A TemporaryFile used to as an intermediate step while exporting data. + + This class does not implement the file-like interface but rather passes any calls + to the underlying tempfile.NamedTemporaryFile. We do override 'write' methods + to allow tracking bytes and records. + """ + + def __init__( + self, + mode: str = "w+b", + buffering=-1, + compression: str | None = None, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ): + self._file = tempfile.NamedTemporaryFile( + mode=mode, + encoding=encoding, + newline=newline, + buffering=buffering, + suffix=suffix, + prefix=prefix, + dir=dir, + errors=errors, + ) + self.compression = compression + self.bytes_total = 0 + self.records_total = 0 + self.bytes_since_last_reset = 0 + self.records_since_last_reset = 0 + self._brotli_compressor = None + + def __getattr__(self, name): + """Pass get attr to underlying tempfile.NamedTemporaryFile.""" + return self._file.__getattr__(name) + + def __enter__(self): + """Context-manager protocol enter method.""" + self._file.__enter__() + return self + + def __exit__(self, exc, value, tb): + """Context-manager protocol exit method.""" + return self._file.__exit__(exc, value, tb) + + def __iter__(self): + yield from self._file + + @property + def brotli_compressor(self): + if self._brotli_compressor is None: + self._brotli_compressor = brotli.Compressor() + return self._brotli_compressor + + def finish_brotli_compressor(self): + """Flush remaining brotli bytes.""" + # TODO: Move compression out of `BatchExportTemporaryFile` to a standard class for all writers. + if self.compression != "brotli": + raise ValueError(f"Compression is '{self.compression}', not 'brotli'") + + result = self._file.write(self.brotli_compressor.finish()) + self.bytes_total += result + self.bytes_since_last_reset += result + self._brotli_compressor = None + + def compress(self, content: bytes | str) -> bytes: + if isinstance(content, str): + encoded = content.encode("utf-8") + else: + encoded = content + + match self.compression: + case "gzip": + return gzip.compress(encoded) + case "brotli": + self.brotli_compressor.process(encoded) + return self.brotli_compressor.flush() + case None: + return encoded + case _: + raise ValueError(f"Unsupported compression: '{self.compression}'") + + def write(self, content: bytes | str): + """Write bytes to underlying file keeping track of how many bytes were written.""" + compressed_content = self.compress(content) + + if "b" in self.mode: + result = self._file.write(compressed_content) + else: + result = self._file.write(compressed_content.decode("utf-8")) + + self.bytes_total += result + self.bytes_since_last_reset += result + + return result + + def write_record_as_bytes(self, record: bytes): + result = self.write(record) + + self.records_total += 1 + self.records_since_last_reset += 1 + + return result + + def write_records_to_jsonl(self, records): + """Write records to a temporary file as JSONL.""" + if len(records) == 1: + try: + jsonl_dump = orjson.dumps(records[0], option=orjson.OPT_APPEND_NEWLINE, default=str) + except orjson.JSONEncodeError: + # orjson is very strict about invalid unicode. This slow path protects us against + # things we've observed in practice, like single surrogate codes, e.g. "\ud83d" + cleaned_record = replace_broken_unicode(records[0]) + jsonl_dump = orjson.dumps(cleaned_record, option=orjson.OPT_APPEND_NEWLINE, default=str) + else: + jsonl_dump = b"\n".join(map(json_dumps_bytes, records)) + + result = self.write(jsonl_dump) + + self.records_total += len(records) + self.records_since_last_reset += len(records) + + return result + + def write_records_to_csv( + self, + records, + fieldnames: None | collections.abc.Sequence[str] = None, + extrasaction: typing.Literal["raise", "ignore"] = "ignore", + delimiter: str = ",", + quotechar: str = '"', + escapechar: str | None = "\\", + lineterminator: str = "\n", + quoting=csv.QUOTE_NONE, + ): + """Write records to a temporary file as CSV.""" + if len(records) == 0: + return + + if fieldnames is None: + fieldnames = list(records[0].keys()) + + writer = csv.DictWriter( + self, + fieldnames=fieldnames, + extrasaction=extrasaction, + delimiter=delimiter, + quotechar=quotechar, + escapechar=escapechar, + quoting=quoting, + lineterminator=lineterminator, + ) + writer.writerows(records) + + self.records_total += len(records) + self.records_since_last_reset += len(records) + + def write_records_to_tsv( + self, + records, + fieldnames: None | list[str] = None, + extrasaction: typing.Literal["raise", "ignore"] = "ignore", + quotechar: str = '"', + escapechar: str | None = "\\", + lineterminator: str = "\n", + quoting=csv.QUOTE_NONE, + ): + """Write records to a temporary file as TSV.""" + return self.write_records_to_csv( + records, + fieldnames=fieldnames, + extrasaction=extrasaction, + delimiter="\t", + quotechar=quotechar, + escapechar=escapechar, + quoting=quoting, + lineterminator=lineterminator, + ) + + def rewind(self): + """Rewind the file before reading it.""" + self._file.seek(0) + + def reset(self): + """Reset underlying file by truncating it. + + Also resets the tracker attributes for bytes and records since last reset. + """ + self._file.seek(0) + self._file.truncate() + + self.bytes_since_last_reset = 0 + self.records_since_last_reset = 0 + + +LastInsertedAt = dt.datetime +IsLast = bool +RecordsSinceLastFlush = int +BytesSinceLastFlush = int +FlushCallable = collections.abc.Callable[ + [BatchExportTemporaryFile, RecordsSinceLastFlush, BytesSinceLastFlush, LastInsertedAt, IsLast], + collections.abc.Awaitable[None], +] + + +class UnsupportedFileFormatError(Exception): + """Raised when a writer for an unsupported file format is requested.""" + + def __init__(self, file_format: str, destination: str): + super().__init__(f"{file_format} is not a supported format for {destination} batch exports.") + + +class BatchExportWriter(abc.ABC): + """A temporary file writer to be used by batch export workflows. + + Subclasses should define `_write_record_batch` with the particular intricacies + of the format they are writing as. + + Actual writing calls are passed to the underlying `batch_export_file`. + + Attributes: + _batch_export_file: The temporary file we are writing to. + max_bytes: Flush the temporary file with the provided `flush_callable` + upon reaching or surpassing this threshold. Keep in mind we write on a RecordBatch + per RecordBatch basis, which means the threshold will be surpassed by at most the + size of a RecordBatch before a flush occurs. + flush_callable: A callback to flush the temporary file when `max_bytes` is reached. + The temporary file will be reset after calling `flush_callable`. When calling + `flush_callable` the following positional arguments will be passed: The temporary file + that must be flushed, the number of records since the last flush, the number of bytes + since the last flush, the latest recorded `_inserted_at`, and a `bool` indicating if + this is the last flush (when exiting the context manager). + file_kwargs: Optional keyword arguments passed when initializing `_batch_export_file`. + last_inserted_at: Latest `_inserted_at` written. This attribute leaks some implementation + details, as we are assuming assume `_inserted_at` is present, as it's added to all + batch export queries. + records_total: The total number of records (not RecordBatches!) written. + records_since_last_flush: The number of records written since last flush. + bytes_total: The total number of bytes written. + bytes_since_last_flush: The number of bytes written since last flush. + """ + + def __init__( + self, + flush_callable: FlushCallable, + max_bytes: int, + file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None, + ): + self.flush_callable = flush_callable + self.max_bytes = max_bytes + self.file_kwargs: collections.abc.Mapping[str, typing.Any] = file_kwargs or {} + + self._batch_export_file: BatchExportTemporaryFile | None = None + self.reset_writer_tracking() + + def reset_writer_tracking(self): + """Reset this writer's tracking state.""" + self.last_inserted_at: dt.datetime | None = None + self.records_total = 0 + self.records_since_last_flush = 0 + self.bytes_total = 0 + self.bytes_since_last_flush = 0 + + @contextlib.asynccontextmanager + async def open_temporary_file(self): + """Explicitly open the temporary file this writer is writing to. + + The underlying `BatchExportTemporaryFile` is only accessible within this context manager. This helps + us separate the lifetime of the underlying temporary file from the writer: The writer may still be + accessed even after the temporary file is closed, while on the other hand we ensure the file and all + its data is flushed and not leaked outside the context. Any relevant tracking information is copied + to the writer. + """ + self.reset_writer_tracking() + + with BatchExportTemporaryFile(**self.file_kwargs) as temp_file: + self._batch_export_file = temp_file + + try: + yield + finally: + self.track_bytes_written(temp_file) + + if self.last_inserted_at is not None and self.bytes_since_last_flush > 0: + # `bytes_since_last_flush` should be 0 unless: + # 1. The last batch wasn't flushed as it didn't reach `max_bytes`. + # 2. The last batch was flushed but there was another write after the last call to + # `write_record_batch`. For example, footer bytes. + await self.flush(self.last_inserted_at, is_last=True) + + self._batch_export_file = None + + @property + def batch_export_file(self): + """Property for underlying temporary file. + + Raises: + ValueError: if attempting to access the temporary file before it has been opened. + """ + if self._batch_export_file is None: + raise ValueError("Batch export file is closed. Did you forget to call 'open_temporary_file'?") + return self._batch_export_file + + @abc.abstractmethod + def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: + """Write a record batch to the underlying `BatchExportTemporaryFile`. + + Subclasses must override this to provide the actual implementation according to the supported + file format. + """ + pass + + def track_records_written(self, record_batch: pa.RecordBatch) -> None: + """Update this writer's state with the number of records in `record_batch`.""" + self.records_total += record_batch.num_rows + self.records_since_last_flush += record_batch.num_rows + + def track_bytes_written(self, batch_export_file: BatchExportTemporaryFile) -> None: + """Update this writer's state with the bytes in `batch_export_file`.""" + self.bytes_total = batch_export_file.bytes_total + self.bytes_since_last_flush = batch_export_file.bytes_since_last_reset + + async def write_record_batch(self, record_batch: pa.RecordBatch) -> None: + """Issue a record batch write tracking progress and flushing if required.""" + record_batch = record_batch.sort_by("_inserted_at") + last_inserted_at = record_batch.column("_inserted_at")[-1].as_py() + + column_names = record_batch.column_names + column_names.pop(column_names.index("_inserted_at")) + + self._write_record_batch(record_batch.select(column_names)) + + self.last_inserted_at = last_inserted_at + self.track_records_written(record_batch) + self.track_bytes_written(self.batch_export_file) + + if self.bytes_since_last_flush >= self.max_bytes: + await self.flush(last_inserted_at) + + async def flush(self, last_inserted_at: dt.datetime, is_last: bool = False) -> None: + """Call the provided `flush_callable` and reset underlying file. + + The underlying batch export temporary file will be reset after calling `flush_callable`. + """ + if is_last is True and self.batch_export_file.compression == "brotli": + self.batch_export_file.finish_brotli_compressor() + + self.batch_export_file.seek(0) + + await self.flush_callable( + self.batch_export_file, + self.records_since_last_flush, + self.bytes_since_last_flush, + last_inserted_at, + is_last, + ) + self.batch_export_file.reset() + + self.records_since_last_flush = 0 + self.bytes_since_last_flush = 0 + + +class JSONLBatchExportWriter(BatchExportWriter): + """A `BatchExportWriter` for JSONLines format. + + Attributes: + default: The default function to use to cast non-serializable Python objects to serializable objects. + By default, non-serializable objects will be cast to string via `str()`. + """ + + def __init__( + self, + max_bytes: int, + flush_callable: FlushCallable, + compression: None | str = None, + default: typing.Callable = str, + ): + super().__init__( + max_bytes=max_bytes, + flush_callable=flush_callable, + file_kwargs={"compression": compression}, + ) + + self.default = default + + def write(self, content: bytes) -> int: + """Write a single row of JSONL.""" + try: + n = self.batch_export_file.write(orjson.dumps(content, default=str) + b"\n") + except orjson.JSONEncodeError: + # orjson is very strict about invalid unicode. This slow path protects us against + # things we've observed in practice, like single surrogate codes, e.g. "\ud83d" + cleaned_content = replace_broken_unicode(content) + n = self.batch_export_file.write(orjson.dumps(cleaned_content, default=str) + b"\n") + return n + + def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: + """Write records to a temporary file as JSONL.""" + for record in record_batch.to_pylist(): + self.write(record) + + +class CSVBatchExportWriter(BatchExportWriter): + """A `BatchExportWriter` for CSV format.""" + + def __init__( + self, + max_bytes: int, + flush_callable: FlushCallable, + field_names: collections.abc.Sequence[str], + extras_action: typing.Literal["raise", "ignore"] = "ignore", + delimiter: str = ",", + quote_char: str = '"', + escape_char: str | None = "\\", + line_terminator: str = "\n", + quoting=csv.QUOTE_NONE, + compression: str | None = None, + ): + super().__init__( + max_bytes=max_bytes, + flush_callable=flush_callable, + file_kwargs={"compression": compression}, + ) + self.field_names = field_names + self.extras_action: typing.Literal["raise", "ignore"] = extras_action + self.delimiter = delimiter + self.quote_char = quote_char + self.escape_char = escape_char + self.line_terminator = line_terminator + self.quoting = quoting + + self._csv_writer: csv.DictWriter | None = None + + @property + def csv_writer(self) -> csv.DictWriter: + if self._csv_writer is None: + self._csv_writer = csv.DictWriter( + self.batch_export_file, + fieldnames=self.field_names, + extrasaction=self.extras_action, + delimiter=self.delimiter, + quotechar=self.quote_char, + escapechar=self.escape_char, + quoting=self.quoting, + lineterminator=self.line_terminator, + ) + + return self._csv_writer + + def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: + """Write records to a temporary file as CSV.""" + self.csv_writer.writerows(record_batch.to_pylist()) + + +class ParquetBatchExportWriter(BatchExportWriter): + """A `BatchExportWriter` for Apache Parquet format. + + We utilize and wrap a `pyarrow.parquet.ParquetWriter` to do the actual writing. We default to their + defaults for most parameters; however this class could be extended with more attributes to pass along + to `pyarrow.parquet.ParquetWriter`. + + See the pyarrow docs for more details on what parameters can the writer be configured with: + https://arrow.apache.org/docs/python/generated/pyarrow.parquet.ParquetWriter.html + + In contrast to other writers, instead of us handling compression we let `pyarrow.parquet.ParquetWriter` + handle it, so `BatchExportTemporaryFile` is always initialized with `compression=None`. + + Attributes: + schema: The schema used by the Parquet file. Should match the schema of written RecordBatches. + compression: Compression codec passed to underlying `pyarrow.parquet.ParquetWriter`. + """ + + def __init__( + self, + max_bytes: int, + flush_callable: FlushCallable, + schema: pa.Schema, + compression: str | None = "snappy", + ): + super().__init__( + max_bytes=max_bytes, + flush_callable=flush_callable, + file_kwargs={"compression": None}, # ParquetWriter handles compression + ) + self.schema = schema + self.compression = compression + + self._parquet_writer: pq.ParquetWriter | None = None + + @property + def parquet_writer(self) -> pq.ParquetWriter: + if self._parquet_writer is None: + self._parquet_writer = pq.ParquetWriter( + self.batch_export_file, + schema=self.schema, + compression="none" if self.compression is None else self.compression, + ) + return self._parquet_writer + + @contextlib.asynccontextmanager + async def open_temporary_file(self): + """Ensure underlying Parquet writer is closed before flushing and closing temporary file.""" + async with super().open_temporary_file(): + try: + yield + finally: + if self._parquet_writer is not None: + self._parquet_writer.writer.close() + self._parquet_writer = None + + def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: + """Write records to a temporary file as Parquet.""" + + self.parquet_writer.write_batch(record_batch.select(self.parquet_writer.schema.names)) diff --git a/posthog/temporal/batch_exports/utils.py b/posthog/temporal/batch_exports/utils.py index bdb2b9001feed..9cd68c60e8b94 100644 --- a/posthog/temporal/batch_exports/utils.py +++ b/posthog/temporal/batch_exports/utils.py @@ -1,5 +1,10 @@ +import asyncio import collections.abc import typing +import uuid + +from posthog.batch_exports.models import BatchExportRun +from posthog.batch_exports.service import update_batch_export_run T = typing.TypeVar("T") @@ -24,3 +29,33 @@ def rewind_gen() -> collections.abc.Generator[T, None, None]: yield i return (first, rewind_gen()) + + +async def try_set_batch_export_run_to_running(run_id: str | None, logger, timeout: float = 10.0) -> None: + """Try to set a batch export run to 'RUNNING' status, but do nothing if we fail or if 'run_id' is 'None'. + + This is intended to be used within a batch export's 'insert_*' activity. These activities cannot afford + to fail if our database is experiencing issues, as we should strive to not let issues in our infrastructure + propagate to users. So, we do a best effort update and swallow the exception if we fail. + + Even if we fail to update the status here, the 'finish_batch_export_run' activity at the end of each batch + export will retry indefinitely and wait for postgres to recover, eventually making a final update with + the status. This means that, worse case, the batch export status won't be displayed as 'RUNNING' while running. + """ + if run_id is None: + return + + try: + await asyncio.wait_for( + asyncio.to_thread( + update_batch_export_run, + uuid.UUID(run_id), + status=BatchExportRun.Status.RUNNING, + ), + timeout=timeout, + ) + except Exception as e: + logger.warn( + "Unexpected error trying to set batch export to 'RUNNING' status. Run will continue but displayed status may not be accurate until run finishes", + exc_info=e, + ) diff --git a/posthog/temporal/common/utils.py b/posthog/temporal/common/utils.py index 1b61a356dc898..022c8270d7748 100644 --- a/posthog/temporal/common/utils.py +++ b/posthog/temporal/common/utils.py @@ -119,10 +119,9 @@ async def should_resume_from_activity_heartbeat( heartbeat_details = heartbeat_type.from_activity(activity) except EmptyHeartbeatError: - # We don't log this as a warning/error because it's the expected exception when heartbeat is empty. + # We don't log this as it's the expected exception when heartbeat is empty. heartbeat_details = None received = False - logger.debug("Did not receive details from previous activity execution") except NotEnoughHeartbeatValuesError: heartbeat_details = None diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index db99eeb1de315..bf78c99e9d9e0 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -10,6 +10,7 @@ # TODO: remove dependency from posthog.temporal.batch_exports.base import PostHogWorkflow +from posthog.temporal.data_imports.pipelines.zendesk.credentials import ZendeskCredentialsToken from posthog.warehouse.data_load.source_templates import create_warehouse_templates_for_source from posthog.warehouse.data_load.validate_schema import validate_schema_and_update_table @@ -220,7 +221,20 @@ async def run_external_data_job(inputs: ExternalDataJobInputs) -> TSchemaTables: schema=schema, table_names=endpoints, ) + elif model.pipeline.source_type == ExternalDataSource.Type.ZENDESK: + from posthog.temporal.data_imports.pipelines.zendesk.helpers import zendesk_support + credentials = ZendeskCredentialsToken() + credentials.token = model.pipeline.job_inputs.get("zendesk_api_key") + credentials.subdomain = model.pipeline.job_inputs.get("zendesk_subdomain") + credentials.email = model.pipeline.job_inputs.get("zendesk_email_address") + + data_support = zendesk_support(credentials=credentials, endpoints=tuple(endpoints), team_id=inputs.team_id) + # Uncomment to support zendesk chat and talk + # data_chat = zendesk_chat() + # data_talk = zendesk_talk() + + source = data_support else: raise ValueError(f"Source type {model.pipeline.source_type} not supported") diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index 5297f2e39ac29..6a922d2d96a67 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import Literal from uuid import UUID import dlt @@ -9,7 +10,7 @@ import os from posthog.settings.base_variables import TEST from structlog.typing import FilteringBoundLogger -from dlt.sources import DltResource +from dlt.sources import DltSource @dataclass @@ -23,9 +24,9 @@ class PipelineInputs: class DataImportPipeline: - loader_file_format = "parquet" + loader_file_format: Literal["parquet"] = "parquet" - def __init__(self, inputs: PipelineInputs, source: DltResource, logger: FilteringBoundLogger): + def __init__(self, inputs: PipelineInputs, source: DltSource, logger: FilteringBoundLogger): self.inputs = inputs self.logger = logger self.source = source @@ -47,6 +48,7 @@ def _get_destination(self): credentials = { "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, + "region_name": settings.AIRBYTE_BUCKET_REGION, } return dlt.destinations.filesystem( diff --git a/posthog/temporal/data_imports/pipelines/schemas.py b/posthog/temporal/data_imports/pipelines/schemas.py index 371f8087b7966..1caea1364899a 100644 --- a/posthog/temporal/data_imports/pipelines/schemas.py +++ b/posthog/temporal/data_imports/pipelines/schemas.py @@ -1,3 +1,4 @@ +from posthog.temporal.data_imports.pipelines.zendesk.settings import BASE_ENDPOINTS, SUPPORT_ENDPOINTS from posthog.warehouse.models import ExternalDataSource from posthog.temporal.data_imports.pipelines.stripe.settings import ENDPOINTS as STRIPE_ENDPOINTS from posthog.temporal.data_imports.pipelines.hubspot.settings import ENDPOINTS as HUBSPOT_ENDPOINTS @@ -5,5 +6,8 @@ PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = { ExternalDataSource.Type.STRIPE: STRIPE_ENDPOINTS, ExternalDataSource.Type.HUBSPOT: HUBSPOT_ENDPOINTS, + ExternalDataSource.Type.ZENDESK: tuple( + list(BASE_ENDPOINTS) + [resource for resource, endpoint_url, data_key, cursor_paginated in SUPPORT_ENDPOINTS] + ), ExternalDataSource.Type.POSTGRES: (), } diff --git a/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py b/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py new file mode 100644 index 0000000000000..c6e4eb4809ee7 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/zendesk/api_helpers.py @@ -0,0 +1,103 @@ +from typing import Optional, TypedDict, Dict + +from dlt.common import pendulum +from dlt.common.time import ensure_pendulum_datetime +from dlt.common.typing import DictStrAny, DictStrStr, TDataItem + + +class TCustomFieldInfo(TypedDict): + title: str + options: DictStrStr + + +def _parse_date_or_none(value: Optional[str]) -> Optional[pendulum.DateTime]: + if not value: + return None + return ensure_pendulum_datetime(value) + + +def process_ticket( + ticket: DictStrAny, + custom_fields: Dict[str, TCustomFieldInfo], + pivot_custom_fields: bool = True, +) -> DictStrAny: + """ + Helper function that processes a ticket object and returns a dictionary of ticket data. + + Args: + ticket: The ticket dict object returned by a Zendesk API call. + custom_fields: A dictionary containing all the custom fields available for tickets. + pivot_custom_fields: A boolean indicating whether to pivot all custom fields or not. + Defaults to True. + + Returns: + DictStrAny: A dictionary containing cleaned data about a ticket. + """ + # Commented out due to how slow this processing code is, and how often it'd break the pipeline. + # to be revisited on whether we want/need this pre-processing and figure out the best way to do it. + + # pivot custom field if indicated as such + # get custom fields + # pivoted_fields = set() + # for custom_field in ticket.get("custom_fields", []): + # if pivot_custom_fields: + # cus_field_id = str(custom_field["id"]) + # field = custom_fields.get(cus_field_id, None) + # if field is None: + # logger.warning( + # "Custom field with ID %s does not exist in fields state. It may have been created after the pipeline run started.", + # cus_field_id, + # ) + # custom_field["ticket_id"] = ticket["id"] + # continue + + # pivoted_fields.add(cus_field_id) + # field_name = field["title"] + # current_value = custom_field["value"] + # options = field["options"] + # # Map dropdown values to labels + # if not current_value or not options: + # ticket[field_name] = current_value + # elif isinstance(current_value, list): # Multiple choice field has a list of values + # ticket[field_name] = [options.get(key, key) for key in current_value] + # else: + # ticket[field_name] = options.get(current_value) + # else: + # custom_field["ticket_id"] = ticket["id"] + # # delete fields that are not needed for pivoting + # if pivot_custom_fields: + # ticket["custom_fields"] = [f for f in ticket.get("custom_fields", []) if str(f["id"]) not in pivoted_fields] + # if not ticket.get("custom_fields"): + # del ticket["custom_fields"] + # del ticket["fields"] + + # modify dates to return datetime objects instead + ticket["updated_at"] = _parse_date_or_none(ticket["updated_at"]) + ticket["created_at"] = _parse_date_or_none(ticket["created_at"]) + ticket["due_at"] = _parse_date_or_none(ticket["due_at"]) + return ticket + + +def process_ticket_field(field: DictStrAny, custom_fields_state: Dict[str, TCustomFieldInfo]) -> TDataItem: + """Update custom field mapping in dlt state for the given field.""" + # grab id and update state dict + # if the id is new, add a new key to indicate that this is the initial value for title + # New dropdown options are added to existing field but existing options are not changed + return_dict = field.copy() + field_id = str(field["id"]) + + options = field.get("custom_field_options", []) + new_options = {o["value"]: o["name"] for o in options} + existing_field = custom_fields_state.get(field_id) + if existing_field: + existing_options = existing_field["options"] + if return_options := return_dict.get("custom_field_options"): + for item in return_options: + item["name"] = existing_options.get(item["value"], item["name"]) + for key, value in new_options.items(): + if key not in existing_options: + existing_options[key] = value + else: + custom_fields_state[field_id] = dict(title=field["title"], options=new_options) + return_dict["initial_title"] = field["title"] + return return_dict diff --git a/posthog/temporal/data_imports/pipelines/zendesk/credentials.py b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py new file mode 100644 index 0000000000000..1f8110ae9b911 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/zendesk/credentials.py @@ -0,0 +1,49 @@ +""" +This module handles how credentials are read in dlt sources +""" +from typing import ClassVar, List, Union +from dlt.common.configuration import configspec +from dlt.common.configuration.specs import CredentialsConfiguration +from dlt.common.typing import TSecretValue + + +@configspec +class ZendeskCredentialsBase(CredentialsConfiguration): + """ + The Base version of all the ZendeskCredential classes. + """ + + subdomain: str + __config_gen_annotations__: ClassVar[List[str]] = [] + + +@configspec +class ZendeskCredentialsEmailPass(ZendeskCredentialsBase): + """ + This class is used to store credentials for Email + Password Authentication + """ + + email: str + password: TSecretValue + + +@configspec +class ZendeskCredentialsOAuth(ZendeskCredentialsBase): + """ + This class is used to store credentials for OAuth Token Authentication + """ + + oauth_token: TSecretValue + + +@configspec +class ZendeskCredentialsToken(ZendeskCredentialsBase): + """ + This class is used to store credentials for Token Authentication + """ + + email: str + token: TSecretValue + + +TZendeskCredentials = Union[ZendeskCredentialsEmailPass, ZendeskCredentialsToken, ZendeskCredentialsOAuth] diff --git a/posthog/temporal/data_imports/pipelines/zendesk/helpers.py b/posthog/temporal/data_imports/pipelines/zendesk/helpers.py new file mode 100644 index 0000000000000..a3e0328c8ab28 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/zendesk/helpers.py @@ -0,0 +1,444 @@ +from typing import Iterator, List, Optional, Iterable, Tuple +from itertools import chain + +import dlt +from dlt.common import pendulum +from dlt.common.time import ensure_pendulum_datetime +from dlt.common.typing import TDataItem, TAnyDateTime, TDataItems +from dlt.sources import DltResource + +from .api_helpers import process_ticket, process_ticket_field +from .talk_api import PaginationType, ZendeskAPIClient +from .credentials import TZendeskCredentials, ZendeskCredentialsOAuth + +from .settings import ( + DEFAULT_START_DATE, + CUSTOM_FIELDS_STATE_KEY, + SUPPORT_ENDPOINTS, + TALK_ENDPOINTS, + INCREMENTAL_TALK_ENDPOINTS, + SUPPORT_EXTRA_ENDPOINTS, +) + + +@dlt.source(max_table_nesting=0) +def zendesk_talk( + credentials: TZendeskCredentials = dlt.secrets.value, + start_date: Optional[TAnyDateTime] = DEFAULT_START_DATE, + end_date: Optional[TAnyDateTime] = None, +) -> Iterable[DltResource]: + """ + Retrieves data from Zendesk Talk for phone calls and voicemails. + + `start_date` argument can be used on its own or together with `end_date`. When both are provided + data is limited to items updated in that time range. + The range is "half-open", meaning elements equal and higher than `start_date` and elements lower than `end_date` are included. + All resources opt-in to use Airflow scheduler if run as Airflow task + + Args: + credentials: The credentials for authentication. Defaults to the value in the `dlt.secrets` object. + start_date: The start time of the range for which to load. Defaults to January 1st 2000. + end_date: The end time of the range for which to load data. + If end time is not provided, the incremental loading will be enabled and after initial run, only new data will be retrieved + Yields: + DltResource: Data resources from Zendesk Talk. + """ + + # use the credentials to authenticate with the ZendeskClient + zendesk_client = ZendeskAPIClient(credentials) + start_date_obj = ensure_pendulum_datetime(start_date) + end_date_obj = ensure_pendulum_datetime(end_date) if end_date else None + + # regular endpoints + for key, talk_endpoint, item_name, cursor_paginated in TALK_ENDPOINTS: + yield dlt.resource( + talk_resource( + zendesk_client, + key, + item_name or talk_endpoint, + PaginationType.CURSOR if cursor_paginated else PaginationType.OFFSET, + ), + name=key, + write_disposition="replace", + ) + + # adding incremental endpoints + for key, talk_incremental_endpoint in INCREMENTAL_TALK_ENDPOINTS.items(): + yield dlt.resource( + talk_incremental_resource, + name=f"{key}_incremental", + primary_key="id", + write_disposition="merge", + )( + zendesk_client=zendesk_client, + talk_endpoint_name=key, + talk_endpoint=talk_incremental_endpoint, + updated_at=dlt.sources.incremental[str]( + "updated_at", + initial_value=start_date_obj.isoformat(), + end_value=end_date_obj.isoformat() if end_date_obj else None, + allow_external_schedulers=True, + ), + ) + + +def talk_resource( + zendesk_client: ZendeskAPIClient, + talk_endpoint_name: str, + talk_endpoint: str, + pagination_type: PaginationType, +) -> Iterator[TDataItem]: + """ + Loads data from a Zendesk Talk endpoint. + + Args: + zendesk_client: An instance of ZendeskAPIClient for making API calls to Zendesk Talk. + talk_endpoint_name: The name of the talk_endpoint. + talk_endpoint: The actual URL ending of the endpoint. + pagination: Type of pagination type used by endpoint + + Yields: + TDataItem: Dictionary containing the data from the endpoint. + """ + # send query and process it + yield from zendesk_client.get_pages(talk_endpoint, talk_endpoint_name, pagination_type) + + +def talk_incremental_resource( + zendesk_client: ZendeskAPIClient, + talk_endpoint_name: str, + talk_endpoint: str, + updated_at: dlt.sources.incremental[str], +) -> Iterator[TDataItem]: + """ + Loads data from a Zendesk Talk endpoint with incremental loading. + + Args: + zendesk_client: An instance of ZendeskAPIClient for making API calls to Zendesk Talk. + talk_endpoint_name: The name of the talk_endpoint. + talk_endpoint: The actual URL ending of the endpoint. + updated_at: Source for the last updated timestamp. + + Yields: + TDataItem: Dictionary containing the data from the endpoint. + """ + # send the request and process it + for page in zendesk_client.get_pages( + talk_endpoint, + talk_endpoint_name, + PaginationType.START_TIME, + params={"start_time": ensure_pendulum_datetime(updated_at.last_value).int_timestamp}, + ): + yield page + if updated_at.end_out_of_range: + return + + +@dlt.source(max_table_nesting=0) +def zendesk_chat( + credentials: ZendeskCredentialsOAuth = dlt.secrets.value, + start_date: Optional[TAnyDateTime] = DEFAULT_START_DATE, + end_date: Optional[TAnyDateTime] = None, +) -> Iterable[DltResource]: + """ + Retrieves data from Zendesk Chat for chat interactions. + + `start_date` argument can be used on its own or together with `end_date`. When both are provided + data is limited to items updated in that time range. + The range is "half-open", meaning elements equal and higher than `start_date` and elements lower than `end_date` are included. + All resources opt-in to use Airflow scheduler if run as Airflow task + + Args: + credentials: The credentials for authentication. Defaults to the value in the `dlt.secrets` object. + start_date: The start time of the range for which to load. Defaults to January 1st 2000. + end_date: The end time of the range for which to load data. + If end time is not provided, the incremental loading will be enabled and after initial run, only new data will be retrieved + + Yields: + DltResource: Data resources from Zendesk Chat. + """ + + # Authenticate + zendesk_client = ZendeskAPIClient(credentials, url_prefix="https://www.zopim.com") + start_date_obj = ensure_pendulum_datetime(start_date) + end_date_obj = ensure_pendulum_datetime(end_date) if end_date else None + + yield dlt.resource(chats_table_resource, name="chats", write_disposition="merge")( + zendesk_client, + dlt.sources.incremental[str]( + "update_timestamp|updated_timestamp", + initial_value=start_date_obj.isoformat(), + end_value=end_date_obj.isoformat() if end_date_obj else None, + allow_external_schedulers=True, + ), + ) + + +def chats_table_resource( + zendesk_client: ZendeskAPIClient, + update_timestamp: dlt.sources.incremental[str], +) -> Iterator[TDataItems]: + """ + Resource for Chats + + Args: + zendesk_client: The Zendesk API client instance, used to make calls to Zendesk API. + update_timestamp: Incremental source specifying the timestamp for incremental loading. + + Yields: + dict: A dictionary representing each row of data. + """ + chat_pages = zendesk_client.get_pages( + "/api/v2/incremental/chats", + "chats", + PaginationType.START_TIME, + params={ + "start_time": ensure_pendulum_datetime(update_timestamp.last_value).int_timestamp, + "fields": "chats(*)", + }, + ) + for page in chat_pages: + yield page + + if update_timestamp.end_out_of_range: + return + + +@dlt.source(max_table_nesting=0) +def zendesk_support( + team_id: int, + credentials: TZendeskCredentials = dlt.secrets.value, + endpoints: Tuple[str, ...] = (), + pivot_ticket_fields: bool = True, + start_date: Optional[TAnyDateTime] = DEFAULT_START_DATE, + end_date: Optional[TAnyDateTime] = None, +) -> Iterable[DltResource]: + """ + Retrieves data from Zendesk Support for tickets, users, brands, organizations, and groups. + + `start_date` argument can be used on its own or together with `end_date`. When both are provided + data is limited to items updated in that time range. + The range is "half-open", meaning elements equal and higher than `start_date` and elements lower than `end_date` are included. + All resources opt-in to use Airflow scheduler if run as Airflow task + + Args: + credentials: The credentials for authentication. Defaults to the value in the `dlt.secrets` object. + load_all: Whether to load extra resources for the API. Defaults to True. + pivot_ticket_fields: Whether to pivot the custom fields in tickets. Defaults to True. + start_date: The start time of the range for which to load. Defaults to January 1st 2000. + end_date: The end time of the range for which to load data. + If end time is not provided, the incremental loading will be enabled and after initial run, only new data will be retrieved + + Returns: + Sequence[DltResource]: Multiple dlt resources. + """ + + start_date_obj = ensure_pendulum_datetime(start_date) + end_date_obj = ensure_pendulum_datetime(end_date) if end_date else None + + start_date_ts = start_date_obj.int_timestamp + start_date_iso_str = start_date_obj.isoformat() + end_date_ts: Optional[int] = None + end_date_iso_str: Optional[str] = None + if end_date_obj: + end_date_ts = end_date_obj.int_timestamp + end_date_iso_str = end_date_obj.isoformat() + + @dlt.resource(name="ticket_events", primary_key="id", write_disposition="append") + def ticket_events( + zendesk_client: ZendeskAPIClient, + timestamp: dlt.sources.incremental[int] = dlt.sources.incremental( # noqa: B008 + "timestamp", + initial_value=start_date_ts, + end_value=end_date_ts, + allow_external_schedulers=True, + ), + ) -> Iterator[TDataItem]: + # URL For ticket events + # 'https://d3v-dlthub.zendesk.com/api/v2/incremental/ticket_events.json?start_time=946684800' + event_pages = zendesk_client.get_pages( + "/api/v2/incremental/ticket_events.json", + "ticket_events", + PaginationType.STREAM, + params={"start_time": timestamp.last_value}, + ) + for page in event_pages: + yield page + if timestamp.end_out_of_range: + return + + @dlt.resource( + name="tickets", + primary_key="id", + write_disposition="merge", + columns={ + "tags": {"data_type": "complex"}, + "custom_fields": {"data_type": "complex"}, + }, + ) + def ticket_table( + zendesk_client: ZendeskAPIClient, + pivot_fields: bool = True, + updated_at: dlt.sources.incremental[pendulum.DateTime] = dlt.sources.incremental( # noqa: B008 + "updated_at", + initial_value=start_date_obj, + end_value=end_date_obj, + allow_external_schedulers=True, + ), + ) -> Iterator[TDataItem]: + """ + Resource for tickets table. Uses DLT state to handle column renaming of custom fields to prevent changing the names of said columns. + This resource uses pagination, loading and side loading to make API calls more efficient. + + Args: + zendesk_client: The Zendesk API client instance, used to make calls to Zendesk API. + pivot_fields: Indicates whether to pivot the custom fields in tickets. Defaults to True. + per_page: The number of Ticket objects to load per page. Defaults to 1000. + updated_at: Incremental source for the 'updated_at' column. + Defaults to dlt.sources.incremental("updated_at", initial_value=start_date). + + Yields: + TDataItem: Dictionary containing the ticket data. + """ + # grab the custom fields from dlt state if any + if pivot_fields: + load_ticket_fields_state(zendesk_client) + fields_dict = dlt.current.source_state().setdefault(CUSTOM_FIELDS_STATE_KEY, {}) + # include_objects = ["users", "groups", "organisation", "brands"] + ticket_pages = zendesk_client.get_pages( + "/api/v2/incremental/tickets", + "tickets", + PaginationType.STREAM, + params={"start_time": updated_at.last_value.int_timestamp}, + ) + for page in ticket_pages: + yield [process_ticket(ticket, fields_dict, pivot_custom_fields=pivot_fields) for ticket in page] + + # stop loading when using end_value and end is reached + if updated_at.end_out_of_range: + return + + @dlt.resource(name="ticket_metric_events", primary_key="id", write_disposition="append") + def ticket_metric_table( + zendesk_client: ZendeskAPIClient, + time: dlt.sources.incremental[str] = dlt.sources.incremental( # noqa: B008 + "time", + initial_value=start_date_iso_str, + end_value=end_date_iso_str, + allow_external_schedulers=True, + ), + ) -> Iterator[TDataItem]: + """ + Resource for ticket metric events table. Returns all the ticket metric events from the starting date, + with the default starting date being January 1st of the current year. + + Args: + zendesk_client: The Zendesk API client instance, used to make calls to Zendesk API. + time: Incremental source for the 'time' column, + indicating the starting date for retrieving ticket metric events. + Defaults to dlt.sources.incremental("time", initial_value=start_date_iso_str). + + Yields: + TDataItem: Dictionary containing the ticket metric event data. + """ + # "https://example.zendesk.com/api/v2/incremental/ticket_metric_events?start_time=1332034771" + metric_event_pages = zendesk_client.get_pages( + "/api/v2/incremental/ticket_metric_events", + "ticket_metric_events", + PaginationType.CURSOR, + params={ + "start_time": ensure_pendulum_datetime(time.last_value).int_timestamp, + }, + ) + for page in metric_event_pages: + yield page + + if time.end_out_of_range: + return + + def ticket_fields_table(zendesk_client: ZendeskAPIClient) -> Iterator[TDataItem]: + """ + Loads ticket fields data from Zendesk API. + + Args: + zendesk_client: The Zendesk API client instance, used to make calls to Zendesk API. + + Yields: + TDataItem: Dictionary containing the ticket fields data. + """ + # get dlt state + ticket_custom_fields = dlt.current.source_state().setdefault(CUSTOM_FIELDS_STATE_KEY, {}) + # get all custom fields and update state if needed, otherwise just load dicts into tables + all_fields = list( + chain.from_iterable( + zendesk_client.get_pages("/api/v2/ticket_fields.json", "ticket_fields", PaginationType.OFFSET) + ) + ) + # all_fields = zendesk_client.ticket_fields() + for field in all_fields: + yield process_ticket_field(field, ticket_custom_fields) + + def load_ticket_fields_state( + zendesk_client: ZendeskAPIClient, + ) -> None: + for _ in ticket_fields_table(zendesk_client): + pass + + ticket_fields_resource = dlt.resource(name="ticket_fields", write_disposition="replace")(ticket_fields_table) + + # Authenticate + zendesk_client = ZendeskAPIClient(credentials) + + all_endpoints = SUPPORT_ENDPOINTS + SUPPORT_EXTRA_ENDPOINTS + resource_list: List[DltResource] = [] + + for endpoint in endpoints: + # loading base tables + if endpoint == "ticket_fields": + resource_list.append(ticket_fields_resource(zendesk_client=zendesk_client)) + elif endpoint == "ticket_events": + resource_list.append(ticket_events(zendesk_client=zendesk_client)) + elif endpoint == "tickets": + resource_list.append(ticket_table(zendesk_client=zendesk_client, pivot_fields=pivot_ticket_fields)) + elif endpoint == "ticket_metric_events": + resource_list.append(ticket_metric_table(zendesk_client=zendesk_client)) + else: + # other tables to be loaded + for resource, endpoint_url, data_key, cursor_paginated in all_endpoints: + if endpoint == resource: + resource_list.append( + dlt.resource( + basic_resource(zendesk_client, endpoint_url, data_key or resource, cursor_paginated), + name=resource, + write_disposition="replace", + ) + ) + break + + return resource_list + + +def basic_resource( + zendesk_client: ZendeskAPIClient, + endpoint_url: str, + data_key: str, + cursor_paginated: bool, +) -> Iterator[TDataItem]: + """ + Basic loader for most endpoints offered by Zenpy. Supports pagination. Expects to be called as a DLT Resource. + + Args: + zendesk_client: The Zendesk API client instance, used to make calls to Zendesk API. + resource: The Zenpy endpoint to retrieve data from, usually directly linked to a Zendesk API endpoint. + cursor_paginated: Tells to use CURSOR pagination or OFFSET/no pagination + + Yields: + TDataItem: Dictionary containing the resource data. + """ + + pages = zendesk_client.get_pages( + endpoint_url, + data_key, + PaginationType.CURSOR if cursor_paginated else PaginationType.OFFSET, + ) + yield from pages diff --git a/posthog/temporal/data_imports/pipelines/zendesk/settings.py b/posthog/temporal/data_imports/pipelines/zendesk/settings.py new file mode 100644 index 0000000000000..aa44df7c20297 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/zendesk/settings.py @@ -0,0 +1,73 @@ +"""Zendesk source settings and constants""" + +from dlt.common import pendulum + +DEFAULT_START_DATE = pendulum.datetime(year=2000, month=1, day=1) +PAGE_SIZE = 100 +INCREMENTAL_PAGE_SIZE = 1000 + + +CUSTOM_FIELDS_STATE_KEY = "ticket_custom_fields_v2" + +# Resources that will always get pulled +BASE_ENDPOINTS = ["ticket_fields", "ticket_events", "tickets", "ticket_metric_events"] + +# Tuples of (Resource name, endpoint URL, data_key, supports pagination) +# data_key is the key which data list is nested under in responses +# if the data key is None it is assumed to be the same as the resource name +# The last element of the tuple says if endpoint supports cursor pagination +SUPPORT_ENDPOINTS = [ + ("users", "/api/v2/users.json", "users", True), + ("sla_policies", "/api/v2/slas/policies.json", None, False), + ("groups", "/api/v2/groups.json", None, True), + ("organizations", "/api/v2/organizations.json", None, True), + ("brands", "/api/v2/brands.json", None, True), +] + +SUPPORT_EXTRA_ENDPOINTS = [ + ("activities", "/api/v2/activities.json", None, True), + ("automations", "/api/v2/automations.json", None, True), + ("custom_agent_roles", "/api/v2/custom_roles.json", "custom_roles", False), + ("dynamic_content", "/api/v2/dynamic_content/items.json", "items", True), + ("group_memberships", "/api/v2/group_memberships.json", None, True), + ("job_status", "/api/v2/job_statuses.json", "job_statuses", True), + ("macros", "/api/v2/macros.json", None, True), + ("organization_fields", "/api/v2/organization_fields.json", None, True), + ("organization_memberships", "/api/v2/organization_memberships.json", None, True), + ("recipient_addresses", "/api/v2/recipient_addresses.json", None, True), + ("requests", "/api/v2/requests.json", None, True), + ("satisfaction_ratings", "/api/v2/satisfaction_ratings.json", None, True), + ("sharing_agreements", "/api/v2/sharing_agreements.json", None, False), + ("skips", "/api/v2/skips.json", None, True), + ("suspended_tickets", "/api/v2/suspended_tickets.json", None, True), + ("targets", "/api/v2/targets.json", None, False), + ("ticket_forms", "/api/v2/ticket_forms.json", None, False), + ("ticket_metrics", "/api/v2/ticket_metrics.json", None, True), + ("triggers", "/api/v2/triggers.json", None, True), + ("user_fields", "/api/v2/user_fields.json", None, True), + ("views", "/api/v2/views.json", None, True), + ("tags", "/api/v2/tags.json", None, True), +] + +TALK_ENDPOINTS = [ + ("calls", "/api/v2/channels/voice/calls", None, False), + ("addresses", "/api/v2/channels/voice/addresses", None, False), + ("greeting_categories", "/api/v2/channels/voice/greeting_categories", None, False), + ("greetings", "/api/v2/channels/voice/greetings", None, False), + ("ivrs", "/api/v2/channels/voice/ivr", None, False), + ("phone_numbers", "/api/v2/channels/voice/phone_numbers", None, False), + ("settings", "/api/v2/channels/voice/settings", None, False), + ("lines", "/api/v2/channels/voice/lines", None, False), + ("agents_activity", "/api/v2/channels/voice/stats/agents_activity", None, False), + ( + "current_queue_activity", + "/api/v2/channels/voice/stats/current_queue_activity", + None, + False, + ), +] + +INCREMENTAL_TALK_ENDPOINTS = { + "calls": "/api/v2/channels/voice/stats/incremental/calls.json", + "legs": "/api/v2/channels/voice/stats/incremental/legs.json", +} diff --git a/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py b/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py new file mode 100644 index 0000000000000..5db9a28eafc74 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/zendesk/talk_api.py @@ -0,0 +1,114 @@ +from enum import Enum +from typing import Dict, Iterator, Optional, Tuple, Any +from dlt.common.typing import DictStrStr, TDataItems, TSecretValue +from dlt.sources.helpers.requests import client + +from . import settings +from .credentials import ( + ZendeskCredentialsEmailPass, + ZendeskCredentialsOAuth, + ZendeskCredentialsToken, + TZendeskCredentials, +) + + +class PaginationType(Enum): + OFFSET = 0 + CURSOR = 1 + STREAM = 2 + START_TIME = 3 + + +class ZendeskAPIClient: + """ + API client used to make requests to Zendesk talk, support and chat API + """ + + subdomain: str = "" + url: str = "" + headers: Optional[DictStrStr] + auth: Optional[Tuple[str, TSecretValue]] + + def __init__(self, credentials: TZendeskCredentials, url_prefix: Optional[str] = None) -> None: + """ + Initializer for the API client which is then used to make API calls to the ZendeskAPI + + Args: + credentials: ZendeskCredentials object which contains the necessary credentials to authenticate to ZendeskAPI + """ + # oauth token is the preferred way to authenticate, followed by api token and then email + password combo + # fill headers and auth for every possibility of credentials given, raise error if credentials are of incorrect type + if isinstance(credentials, ZendeskCredentialsOAuth): + self.headers = {"Authorization": f"Bearer {credentials.oauth_token}"} + self.auth = None + elif isinstance(credentials, ZendeskCredentialsToken): + self.headers = None + self.auth = (f"{credentials.email}/token", credentials.token) + elif isinstance(credentials, ZendeskCredentialsEmailPass): + self.auth = (credentials.email, credentials.password) + self.headers = None + else: + raise TypeError( + "Wrong credentials type provided to ZendeskAPIClient. The credentials need to be of type: ZendeskCredentialsOAuth, ZendeskCredentialsToken or ZendeskCredentialsEmailPass" + ) + + # If url_prefix is set it overrides the default API URL (e.g. chat api uses zopim.com domain) + if url_prefix: + self.url = url_prefix + else: + self.subdomain = credentials.subdomain + self.url = f"https://{self.subdomain}.zendesk.com" + + def get_pages( + self, + endpoint: str, + data_point_name: str, + pagination: PaginationType, + params: Optional[Dict[str, Any]] = None, + ) -> Iterator[TDataItems]: + """ + Makes a request to a paginated endpoint and returns a generator of data items per page. + + Args: + endpoint: The url to the endpoint, e.g. /api/v2/calls + data_point_name: The key which data items are nested under in the response object (e.g. calls) + params: Optional dict of query params to include in the request + pagination: Type of pagination type used by endpoint + + Returns: + Generator of pages, each page is a list of dict data items + """ + + # update the page size to enable cursor pagination + params = params or {} + if pagination == PaginationType.CURSOR: + params["page[size]"] = settings.PAGE_SIZE + elif pagination == PaginationType.STREAM: + params["per_page"] = settings.INCREMENTAL_PAGE_SIZE + elif pagination == PaginationType.START_TIME: + params["limit"] = settings.INCREMENTAL_PAGE_SIZE + + # make request and keep looping until there is no next page + get_url = f"{self.url}{endpoint}" + while get_url: + response = client.get(get_url, headers=self.headers, auth=self.auth, params=params) + response.raise_for_status() + response_json = response.json() + result = response_json[data_point_name] + yield result + + get_url = None + if pagination == PaginationType.CURSOR: + if response_json["meta"]["has_more"]: + get_url = response_json["links"]["next"] + elif pagination == PaginationType.OFFSET: + get_url = response_json.get("next_page", None) + elif pagination == PaginationType.STREAM: + # See https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/#json-format + if not response_json["end_of_stream"]: + get_url = response_json["next_page"] + elif pagination == PaginationType.START_TIME: + if response_json["count"] > 0: + get_url = response_json["next_page"] + + params = {} diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index 0afbfcabb71cb..756c07e442e4f 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -1,6 +1,4 @@ -import csv import datetime as dt -import io import json import operator from random import randint @@ -9,11 +7,9 @@ from django.test import override_settings from posthog.temporal.batch_exports.batch_exports import ( - BatchExportTemporaryFile, get_data_interval, get_rows_count, iter_records, - json_dumps_bytes, ) from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse @@ -558,181 +554,3 @@ def test_get_data_interval(interval, data_interval_end, expected): """Test get_data_interval returns the expected data interval tuple.""" result = get_data_interval(interval, data_interval_end) assert result == expected - - -@pytest.mark.parametrize( - "to_write", - [ - (b"",), - (b"", b""), - (b"12345",), - (b"12345", b"12345"), - (b"abbcccddddeeeee",), - (b"abbcccddddeeeee", b"abbcccddddeeeee"), - ], -) -def test_batch_export_temporary_file_tracks_bytes(to_write): - """Test the bytes written by BatchExportTemporaryFile match expected.""" - with BatchExportTemporaryFile() as be_file: - for content in to_write: - be_file.write(content) - - assert be_file.bytes_total == sum(len(content) for content in to_write) - assert be_file.bytes_since_last_reset == sum(len(content) for content in to_write) - - be_file.reset() - - assert be_file.bytes_total == sum(len(content) for content in to_write) - assert be_file.bytes_since_last_reset == 0 - - -TEST_RECORDS = [ - [], - [ - {"id": "record-1", "property": "value", "property_int": 1}, - {"id": "record-2", "property": "another-value", "property_int": 2}, - { - "id": "record-3", - "property": {"id": "nested-record", "property": "nested-value"}, - "property_int": 3, - }, - ], -] - - -@pytest.mark.parametrize( - "records", - TEST_RECORDS, -) -def test_batch_export_temporary_file_write_records_to_jsonl(records): - """Test JSONL records written by BatchExportTemporaryFile match expected.""" - jsonl_dump = b"\n".join(map(json_dumps_bytes, records)) - - with BatchExportTemporaryFile() as be_file: - be_file.write_records_to_jsonl(records) - - assert be_file.bytes_total == len(jsonl_dump) - assert be_file.bytes_since_last_reset == len(jsonl_dump) - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == len(records) - - be_file.seek(0) - lines = be_file.readlines() - assert len(lines) == len(records) - - for line_index, jsonl_record in enumerate(lines): - json_loaded = json.loads(jsonl_record) - assert json_loaded == records[line_index] - - be_file.reset() - - assert be_file.bytes_total == len(jsonl_dump) - assert be_file.bytes_since_last_reset == 0 - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == 0 - - -@pytest.mark.parametrize( - "records", - TEST_RECORDS, -) -def test_batch_export_temporary_file_write_records_to_csv(records): - """Test CSV written by BatchExportTemporaryFile match expected.""" - in_memory_file_obj = io.StringIO() - writer = csv.DictWriter( - in_memory_file_obj, - fieldnames=records[0].keys() if len(records) > 0 else [], - delimiter=",", - quotechar='"', - escapechar="\\", - lineterminator="\n", - quoting=csv.QUOTE_NONE, - ) - writer.writerows(records) - - with BatchExportTemporaryFile(mode="w+") as be_file: - be_file.write_records_to_csv(records) - - assert be_file.bytes_total == in_memory_file_obj.tell() - assert be_file.bytes_since_last_reset == in_memory_file_obj.tell() - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == len(records) - - be_file.seek(0) - reader = csv.reader( - be_file._file, - delimiter=",", - quotechar='"', - escapechar="\\", - quoting=csv.QUOTE_NONE, - ) - - rows = [row for row in reader] - assert len(rows) == len(records) - - for row_index, csv_record in enumerate(rows): - for value_index, value in enumerate(records[row_index].values()): - # Everything returned by csv.reader is a str. - # This means type information is lost when writing to CSV - # but this just a limitation of the format. - assert csv_record[value_index] == str(value) - - be_file.reset() - - assert be_file.bytes_total == in_memory_file_obj.tell() - assert be_file.bytes_since_last_reset == 0 - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == 0 - - -@pytest.mark.parametrize( - "records", - TEST_RECORDS, -) -def test_batch_export_temporary_file_write_records_to_tsv(records): - """Test TSV written by BatchExportTemporaryFile match expected.""" - in_memory_file_obj = io.StringIO() - writer = csv.DictWriter( - in_memory_file_obj, - fieldnames=records[0].keys() if len(records) > 0 else [], - delimiter="\t", - quotechar='"', - escapechar="\\", - lineterminator="\n", - quoting=csv.QUOTE_NONE, - ) - writer.writerows(records) - - with BatchExportTemporaryFile(mode="w+") as be_file: - be_file.write_records_to_tsv(records) - - assert be_file.bytes_total == in_memory_file_obj.tell() - assert be_file.bytes_since_last_reset == in_memory_file_obj.tell() - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == len(records) - - be_file.seek(0) - reader = csv.reader( - be_file._file, - delimiter="\t", - quotechar='"', - escapechar="\\", - quoting=csv.QUOTE_NONE, - ) - - rows = [row for row in reader] - assert len(rows) == len(records) - - for row_index, csv_record in enumerate(rows): - for value_index, value in enumerate(records[row_index].values()): - # Everything returned by csv.reader is a str. - # This means type information is lost when writing to CSV - # but this just a limitation of the format. - assert csv_record[value_index] == str(value) - - be_file.reset() - - assert be_file.bytes_total == in_memory_file_obj.tell() - assert be_file.bytes_since_last_reset == 0 - assert be_file.records_total == len(records) - assert be_file.records_since_last_reset == 0 diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index b2c46f6344dbc..3652c1caf19aa 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -21,9 +21,9 @@ from posthog.batch_exports.service import BatchExportSchema, BigQueryBatchExportInputs from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.bigquery_batch_export import ( BigQueryBatchExportWorkflow, @@ -33,6 +33,7 @@ insert_into_bigquery_activity, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -433,9 +434,9 @@ async def test_bigquery_export_workflow( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[BigQueryBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_bigquery_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -454,6 +455,7 @@ async def test_bigquery_export_workflow( run = runs[0] assert run.status == "Completed" assert run.records_completed == 100 + assert run.records_total_count == 100 ingested_timestamp = frozen_time().replace(tzinfo=dt.timezone.utc) assert_clickhouse_records_in_bigquery( @@ -495,9 +497,9 @@ async def insert_into_bigquery_activity_mocked(_: BigQueryInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[BigQueryBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_bigquery_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -546,9 +548,9 @@ class RefreshError(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[BigQueryBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_bigquery_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -567,7 +569,8 @@ class RefreshError(Exception): run = runs[0] assert run.status == "Failed" assert run.latest_error == "RefreshError: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 async def test_bigquery_export_workflow_handles_cancellation(ateam, bigquery_batch_export, interval): @@ -595,9 +598,9 @@ async def never_finish_activity(_: BigQueryInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[BigQueryBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, never_finish_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): diff --git a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py index 6267577472125..451e3e03c4484 100644 --- a/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_http_batch_export_workflow.py @@ -16,9 +16,9 @@ from temporalio.worker import UnsandboxedWorkflowRunner, Worker from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.http_batch_export import ( HeartbeatDetails, @@ -31,6 +31,7 @@ insert_into_http_activity, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -345,9 +346,9 @@ async def test_http_export_workflow( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[HttpBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_http_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -405,9 +406,9 @@ async def insert_into_http_activity_mocked(_: HttpInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[HttpBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_http_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -426,7 +427,8 @@ async def insert_into_http_activity_mocked(_: HttpInsertInputs) -> str: run = runs[0] assert run.status == "FailedRetryable" assert run.latest_error == "ValueError: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 async def test_http_export_workflow_handles_insert_activity_non_retryable_errors(ateam, http_batch_export, interval): @@ -455,9 +457,9 @@ class NonRetryableResponseError(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[HttpBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_http_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -476,6 +478,8 @@ class NonRetryableResponseError(Exception): run = runs[0] assert run.status == "Failed" assert run.latest_error == "NonRetryableResponseError: A useful error message" + assert run.records_completed is None + assert run.records_total_count == 1 async def test_http_export_workflow_handles_cancellation(ateam, http_batch_export, interval): @@ -503,9 +507,9 @@ async def never_finish_activity(_: HttpInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[HttpBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, never_finish_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): diff --git a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py index c486cc2747fcc..d63e04a7812d7 100644 --- a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py @@ -1,8 +1,8 @@ import asyncio import datetime as dt import json +import uuid from random import randint -from uuid import uuid4 import psycopg import pytest @@ -18,9 +18,9 @@ from posthog.batch_exports.service import BatchExportSchema from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.postgres_batch_export import ( PostgresBatchExportInputs, @@ -30,6 +30,7 @@ postgres_default_fields, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -348,7 +349,7 @@ async def test_postgres_export_workflow( event_name=event_name, ) - workflow_id = str(uuid4()) + workflow_id = str(uuid.uuid4()) inputs = PostgresBatchExportInputs( team_id=ateam.pk, batch_export_id=str(postgres_batch_export.id), @@ -364,9 +365,9 @@ async def test_postgres_export_workflow( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[PostgresBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_postgres_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -386,6 +387,7 @@ async def test_postgres_export_workflow( run = runs[0] assert run.status == "Completed" assert run.records_completed == 100 + assert run.records_total_count == 100 await assert_clickhouse_records_in_postgres( postgres_connection=postgres_connection, @@ -404,7 +406,7 @@ async def test_postgres_export_workflow_handles_insert_activity_errors(ateam, po """Test that Postgres Export Workflow can gracefully handle errors when inserting Postgres data.""" data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") - workflow_id = str(uuid4()) + workflow_id = str(uuid.uuid4()) inputs = PostgresBatchExportInputs( team_id=ateam.pk, batch_export_id=str(postgres_batch_export.id), @@ -423,9 +425,9 @@ async def insert_into_postgres_activity_mocked(_: PostgresInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[PostgresBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_postgres_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -444,6 +446,8 @@ async def insert_into_postgres_activity_mocked(_: PostgresInsertInputs) -> str: run = runs[0] assert run.status == "FailedRetryable" assert run.latest_error == "ValueError: A useful error message" + assert run.records_completed is None + assert run.records_total_count == 1 async def test_postgres_export_workflow_handles_insert_activity_non_retryable_errors( @@ -452,7 +456,7 @@ async def test_postgres_export_workflow_handles_insert_activity_non_retryable_er """Test that Postgres Export Workflow can gracefully handle non-retryable errors when inserting Postgres data.""" data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") - workflow_id = str(uuid4()) + workflow_id = str(uuid.uuid4()) inputs = PostgresBatchExportInputs( team_id=ateam.pk, batch_export_id=str(postgres_batch_export.id), @@ -474,9 +478,9 @@ class InsufficientPrivilege(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[PostgresBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_postgres_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -495,14 +499,15 @@ class InsufficientPrivilege(Exception): run = runs[0] assert run.status == "Failed" assert run.latest_error == "InsufficientPrivilege: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 async def test_postgres_export_workflow_handles_cancellation(ateam, postgres_batch_export, interval): """Test that Postgres Export Workflow can gracefully handle cancellations when inserting Postgres data.""" data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") - workflow_id = str(uuid4()) + workflow_id = str(uuid.uuid4()) inputs = PostgresBatchExportInputs( team_id=ateam.pk, batch_export_id=str(postgres_batch_export.id), @@ -523,9 +528,9 @@ async def never_finish_activity(_: PostgresInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[PostgresBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, never_finish_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -548,3 +553,5 @@ async def never_finish_activity(_: PostgresInsertInputs) -> str: run = runs[0] assert run.status == "Cancelled" assert run.latest_error == "Cancelled" + assert run.records_completed is None + assert run.records_total_count == 1 diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py index 173bed3a69bb3..eb454a7be3a4a 100644 --- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py @@ -20,9 +20,9 @@ from posthog.batch_exports.service import BatchExportSchema from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.redshift_batch_export import ( RedshiftBatchExportInputs, @@ -33,6 +33,7 @@ remove_escaped_whitespace_recursive, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -412,9 +413,9 @@ async def test_redshift_export_workflow( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[RedshiftBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_redshift_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -488,9 +489,9 @@ async def insert_into_redshift_activity_mocked(_: RedshiftInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[RedshiftBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_redshift_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -509,6 +510,8 @@ async def insert_into_redshift_activity_mocked(_: RedshiftInsertInputs) -> str: run = runs[0] assert run.status == "FailedRetryable" assert run.latest_error == "ValueError: A useful error message" + assert run.records_completed is None + assert run.records_total_count == 1 async def test_redshift_export_workflow_handles_insert_activity_non_retryable_errors( @@ -539,9 +542,9 @@ class InsufficientPrivilege(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[RedshiftBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_redshift_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -560,4 +563,5 @@ class InsufficientPrivilege(Exception): run = runs[0] assert run.status == "Failed" assert run.latest_error == "InsufficientPrivilege: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 diff --git a/posthog/temporal/tests/batch_exports/test_run_updates.py b/posthog/temporal/tests/batch_exports/test_run_updates.py index fc03d26cbda0a..7269b3455d8f1 100644 --- a/posthog/temporal/tests/batch_exports/test_run_updates.py +++ b/posthog/temporal/tests/batch_exports/test_run_updates.py @@ -11,10 +11,10 @@ Team, ) from posthog.temporal.batch_exports.batch_exports import ( - CreateBatchExportRunInputs, - UpdateBatchExportRunStatusInputs, - create_export_run, - update_export_run_status, + FinishBatchExportRunInputs, + StartBatchExportRunInputs, + finish_batch_export_run, + start_batch_export_run, ) @@ -74,58 +74,64 @@ def batch_export(destination, team): @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio -async def test_create_export_run(activity_environment, team, batch_export): - """Test the create_export_run activity. +async def test_start_batch_export_run(activity_environment, team, batch_export): + """Test the 'start_batch_export_run' activity. - We check if an BatchExportRun is created after the activity runs. + We check if a 'BatchExportRun' is created after the activity runs. """ start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) - inputs = CreateBatchExportRunInputs( + inputs = StartBatchExportRunInputs( team_id=team.id, batch_export_id=str(batch_export.id), data_interval_start=start.isoformat(), data_interval_end=end.isoformat(), ) - run_id = await activity_environment.run(create_export_run, inputs) + run_id, records_total_count = await activity_environment.run(start_batch_export_run, inputs) runs = BatchExportRun.objects.filter(id=run_id) assert await sync_to_async(runs.exists)() # type:ignore run = await sync_to_async(runs.first)() # type:ignore + assert run is not None assert run.data_interval_start == start assert run.data_interval_end == end + assert run.records_total_count == records_total_count @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio -async def test_update_export_run_status(activity_environment, team, batch_export): +async def test_finish_batch_export_run(activity_environment, team, batch_export): """Test the export_run_status activity.""" start = dt.datetime(2023, 4, 24, tzinfo=dt.timezone.utc) end = dt.datetime(2023, 4, 25, tzinfo=dt.timezone.utc) - inputs = CreateBatchExportRunInputs( + inputs = StartBatchExportRunInputs( team_id=team.id, batch_export_id=str(batch_export.id), data_interval_start=start.isoformat(), data_interval_end=end.isoformat(), ) - run_id = await activity_environment.run(create_export_run, inputs) + run_id, records_total_count = await activity_environment.run(start_batch_export_run, inputs) runs = BatchExportRun.objects.filter(id=run_id) run = await sync_to_async(runs.first)() # type:ignore + assert run is not None assert run.status == "Starting" + assert run.records_total_count == records_total_count - update_inputs = UpdateBatchExportRunStatusInputs( + finish_inputs = FinishBatchExportRunInputs( id=str(run_id), status="Completed", team_id=inputs.team_id, ) - await activity_environment.run(update_export_run_status, update_inputs) + await activity_environment.run(finish_batch_export_run, finish_inputs) runs = BatchExportRun.objects.filter(id=run_id) run = await sync_to_async(runs.first)() # type:ignore + assert run is not None assert run.status == "Completed" + assert run.records_total_count == records_total_count diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index e04e345d11245..a58fb54d67901 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -10,10 +10,12 @@ import aioboto3 import botocore.exceptions import brotli +import pyarrow.parquet as pq import pytest import pytest_asyncio from django.conf import settings from django.test import override_settings +from pyarrow import fs from temporalio import activity from temporalio.client import WorkflowFailureError from temporalio.common import RetryPolicy @@ -22,11 +24,12 @@ from posthog.batch_exports.service import BatchExportSchema from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.s3_batch_export import ( + FILE_FORMAT_EXTENSIONS, HeartbeatDetails, S3BatchExportInputs, S3BatchExportWorkflow, @@ -36,6 +39,7 @@ s3_default_fields, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import ( generate_test_events_in_clickhouse, ) @@ -107,6 +111,15 @@ def s3_key_prefix(): return f"posthog-events-{str(uuid4())}" +@pytest.fixture +def file_format(request) -> str: + """S3 file format.""" + try: + return request.param + except AttributeError: + return f"JSONLines" + + async def delete_all_from_s3(minio_client, bucket_name: str, key_prefix: str): """Delete all objects in bucket_name under key_prefix.""" response = await minio_client.list_objects_v2(Bucket=bucket_name, Prefix=key_prefix) @@ -138,6 +151,61 @@ async def minio_client(bucket_name): await minio_client.delete_bucket(Bucket=bucket_name) +async def read_parquet_from_s3(bucket_name: str, key: str, json_columns) -> list: + async with aioboto3.Session().client("sts") as sts: + try: + await sts.get_caller_identity() + except botocore.exceptions.NoCredentialsError: + s3 = fs.S3FileSystem( + access_key="object_storage_root_user", + secret_key="object_storage_root_password", + endpoint_override=settings.OBJECT_STORAGE_ENDPOINT, + ) + + else: + if os.getenv("S3_TEST_BUCKET") is not None: + s3 = fs.S3FileSystem() + else: + s3 = fs.S3FileSystem( + access_key="object_storage_root_user", + secret_key="object_storage_root_password", + endpoint_override=settings.OBJECT_STORAGE_ENDPOINT, + ) + + table = pq.read_table(f"{bucket_name}/{key}", filesystem=s3) + + parquet_data = [] + for batch in table.to_batches(): + for record in batch.to_pylist(): + casted_record = {} + for k, v in record.items(): + if isinstance(v, dt.datetime): + # We read data from clickhouse as string, but parquet already casts them as dates. + # To facilitate comparison, we isoformat the dates. + casted_record[k] = v.isoformat() + elif k in json_columns and v is not None: + # Parquet doesn't have a variable map type, so JSON fields are just strings. + casted_record[k] = json.loads(v) + else: + casted_record[k] = v + parquet_data.append(casted_record) + + return parquet_data + + +def read_s3_data_as_json(data: bytes, compression: str | None) -> list: + match compression: + case "gzip": + data = gzip.decompress(data) + case "brotli": + data = brotli.decompress(data) + case _: + pass + + json_data = [json.loads(line) for line in data.decode("utf-8").split("\n") if line] + return json_data + + async def assert_clickhouse_records_in_s3( s3_compatible_client, clickhouse_client: ClickHouseClient, @@ -150,6 +218,7 @@ async def assert_clickhouse_records_in_s3( include_events: list[str] | None = None, batch_export_schema: BatchExportSchema | None = None, compression: str | None = None, + file_format: str = "JSONLines", ): """Assert ClickHouse records are written to JSON in key_prefix in S3 bucket_name. @@ -175,28 +244,24 @@ async def assert_clickhouse_records_in_s3( # Get the object. key = objects["Contents"][0].get("Key") assert key - s3_object = await s3_compatible_client.get_object(Bucket=bucket_name, Key=key) - data = await s3_object["Body"].read() - # Check that the data is correct. - match compression: - case "gzip": - data = gzip.decompress(data) - case "brotli": - data = brotli.decompress(data) - case _: - pass + json_columns = ("properties", "person_properties", "set", "set_once") - json_data = [json.loads(line) for line in data.decode("utf-8").split("\n") if line] - # Pull out the fields we inserted only + if file_format == "Parquet": + s3_data = await read_parquet_from_s3(bucket_name, key, json_columns) + + elif file_format == "JSONLines": + s3_object = await s3_compatible_client.get_object(Bucket=bucket_name, Key=key) + data = await s3_object["Body"].read() + s3_data = read_s3_data_as_json(data, compression) + else: + raise ValueError(f"Unsupported file format: {file_format}") if batch_export_schema is not None: schema_column_names = [field["alias"] for field in batch_export_schema["fields"]] else: schema_column_names = [field["alias"] for field in s3_default_fields()] - json_columns = ("properties", "person_properties", "set", "set_once") - expected_records = [] for record_batch in iter_records( client=clickhouse_client, @@ -225,9 +290,9 @@ async def assert_clickhouse_records_in_s3( expected_records.append(expected_record) - assert len(json_data) == len(expected_records) - assert json_data[0] == expected_records[0] - assert json_data == expected_records + assert len(s3_data) == len(expected_records) + assert s3_data[0] == expected_records[0] + assert s3_data == expected_records TEST_S3_SCHEMAS: list[BatchExportSchema | None] = [ @@ -255,6 +320,7 @@ async def assert_clickhouse_records_in_s3( @pytest.mark.parametrize("compression", [None, "gzip", "brotli"], indirect=True) @pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) @pytest.mark.parametrize("batch_export_schema", TEST_S3_SCHEMAS) +@pytest.mark.parametrize("file_format", FILE_FORMAT_EXTENSIONS.keys()) async def test_insert_into_s3_activity_puts_data_into_s3( clickhouse_client, bucket_name, @@ -262,6 +328,7 @@ async def test_insert_into_s3_activity_puts_data_into_s3( activity_environment, compression, exclude_events, + file_format, batch_export_schema: BatchExportSchema | None, ): """Test that the insert_into_s3_activity function ends up with data into S3. @@ -339,12 +406,15 @@ async def test_insert_into_s3_activity_puts_data_into_s3( compression=compression, exclude_events=exclude_events, batch_export_schema=batch_export_schema, + file_format=file_format, ) with override_settings( BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2 ): # 5MB, the minimum for Multipart uploads - await activity_environment.run(insert_into_s3_activity, insert_inputs) + records_exported = await activity_environment.run(insert_into_s3_activity, insert_inputs) + + assert records_exported == 10005 await assert_clickhouse_records_in_s3( s3_compatible_client=minio_client, @@ -358,6 +428,7 @@ async def test_insert_into_s3_activity_puts_data_into_s3( exclude_events=exclude_events, include_events=None, compression=compression, + file_format=file_format, ) @@ -371,6 +442,7 @@ async def s3_batch_export( exclude_events, temporal_client, encryption, + file_format, ): destination_data = { "type": "S3", @@ -385,6 +457,7 @@ async def s3_batch_export( "exclude_events": exclude_events, "encryption": encryption, "kms_key_id": os.getenv("S3_TEST_KMS_KEY_ID") if encryption == "aws:kms" else None, + "file_format": file_format, }, } @@ -410,6 +483,7 @@ async def s3_batch_export( @pytest.mark.parametrize("compression", [None, "gzip", "brotli"], indirect=True) @pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) @pytest.mark.parametrize("batch_export_schema", TEST_S3_SCHEMAS) +@pytest.mark.parametrize("file_format", FILE_FORMAT_EXTENSIONS.keys(), indirect=True) async def test_s3_export_workflow_with_minio_bucket( clickhouse_client, minio_client, @@ -421,6 +495,7 @@ async def test_s3_export_workflow_with_minio_bucket( exclude_events, s3_key_prefix, batch_export_schema, + file_format, ): """Test S3BatchExport Workflow end-to-end by using a local MinIO bucket instead of S3. @@ -476,9 +551,9 @@ async def test_s3_export_workflow_with_minio_bucket( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_s3_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -508,6 +583,7 @@ async def test_s3_export_workflow_with_minio_bucket( batch_export_schema=batch_export_schema, exclude_events=exclude_events, compression=compression, + file_format=file_format, ) @@ -537,6 +613,7 @@ async def s3_client(bucket_name, s3_key_prefix): @pytest.mark.parametrize("encryption", [None, "AES256", "aws:kms"], indirect=True) @pytest.mark.parametrize("bucket_name", [os.getenv("S3_TEST_BUCKET")], indirect=True) @pytest.mark.parametrize("batch_export_schema", TEST_S3_SCHEMAS) +@pytest.mark.parametrize("file_format", FILE_FORMAT_EXTENSIONS.keys(), indirect=True) async def test_s3_export_workflow_with_s3_bucket( s3_client, clickhouse_client, @@ -549,6 +626,7 @@ async def test_s3_export_workflow_with_s3_bucket( exclude_events, ateam, batch_export_schema, + file_format, ): """Test S3 Export Workflow end-to-end by using an S3 bucket. @@ -613,9 +691,9 @@ async def test_s3_export_workflow_with_s3_bucket( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_s3_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -646,6 +724,7 @@ async def test_s3_export_workflow_with_s3_bucket( exclude_events=exclude_events, include_events=None, compression=compression, + file_format=file_format, ) @@ -696,9 +775,9 @@ async def test_s3_export_workflow_with_minio_bucket_and_a_lot_of_data( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_s3_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -771,9 +850,9 @@ async def test_s3_export_workflow_defaults_to_timestamp_on_null_inserted_at( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_s3_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -792,6 +871,7 @@ async def test_s3_export_workflow_defaults_to_timestamp_on_null_inserted_at( run = runs[0] assert run.status == "Completed" assert run.records_completed == 100 + assert run.records_total_count == 100 await assert_clickhouse_records_in_s3( s3_compatible_client=minio_client, @@ -856,9 +936,9 @@ async def test_s3_export_workflow_with_minio_bucket_and_custom_key_prefix( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_s3_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -877,6 +957,7 @@ async def test_s3_export_workflow_with_minio_bucket_and_custom_key_prefix( run = runs[0] assert run.status == "Completed" assert run.records_completed == 100 + assert run.records_total_count == 100 expected_key_prefix = s3_key_prefix.format( table="events", @@ -931,9 +1012,9 @@ async def insert_into_s3_activity_mocked(_: S3InsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_s3_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -952,7 +1033,8 @@ async def insert_into_s3_activity_mocked(_: S3InsertInputs) -> str: run = runs[0] assert run.status == "FailedRetryable" assert run.latest_error == "ValueError: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 async def test_s3_export_workflow_handles_insert_activity_non_retryable_errors(ateam, s3_batch_export, interval): @@ -984,9 +1066,9 @@ class ParamValidationError(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_s3_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -1036,9 +1118,9 @@ async def never_finish_activity(_: S3InsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, never_finish_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -1064,11 +1146,7 @@ async def never_finish_activity(_: S3InsertInputs) -> str: # We don't care about these for the next test, just need something to be defined. -base_inputs = { - "bucket_name": "test", - "region": "test", - "team_id": 1, -} +base_inputs = {"bucket_name": "test", "region": "test", "team_id": 1} @pytest.mark.parametrize( @@ -1206,6 +1284,49 @@ async def never_finish_activity(_: S3InsertInputs) -> str: ), "nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.jsonl.br", ), + ( + S3InsertInputs( + prefix="/nested/prefix/", + data_interval_start="2023-01-01 00:00:00", + data_interval_end="2023-01-01 01:00:00", + file_format="Parquet", + compression="snappy", + **base_inputs, # type: ignore + ), + "nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.parquet.sz", + ), + ( + S3InsertInputs( + prefix="/nested/prefix/", + data_interval_start="2023-01-01 00:00:00", + data_interval_end="2023-01-01 01:00:00", + file_format="Parquet", + **base_inputs, # type: ignore + ), + "nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.parquet", + ), + ( + S3InsertInputs( + prefix="/nested/prefix/", + data_interval_start="2023-01-01 00:00:00", + data_interval_end="2023-01-01 01:00:00", + compression="gzip", + file_format="Parquet", + **base_inputs, # type: ignore + ), + "nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.parquet.gz", + ), + ( + S3InsertInputs( + prefix="/nested/prefix/", + data_interval_start="2023-01-01 00:00:00", + data_interval_end="2023-01-01 01:00:00", + compression="brotli", + file_format="Parquet", + **base_inputs, # type: ignore + ), + "nested/prefix/2023-01-01 00:00:00-2023-01-01 01:00:00.parquet.br", + ), ], ) def test_get_s3_key(inputs, expected): @@ -1271,7 +1392,7 @@ def assert_heartbeat_details(*details): endpoint_url=settings.OBJECT_STORAGE_ENDPOINT, ) - with override_settings(BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=5 * 1024**2): + with override_settings(BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES=1, CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT=1): await activity_environment.run(insert_into_s3_activity, insert_inputs) # This checks that the assert_heartbeat_details function was actually called. diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index f8c12a3d1369f..fffbb50534530 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -26,9 +26,9 @@ from posthog.batch_exports.service import BatchExportSchema from posthog.temporal.batch_exports.batch_exports import ( - create_export_run, + finish_batch_export_run, iter_records, - update_export_run_status, + start_batch_export_run, ) from posthog.temporal.batch_exports.snowflake_batch_export import ( SnowflakeBatchExportInputs, @@ -39,6 +39,7 @@ snowflake_default_fields, ) from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -407,9 +408,9 @@ async def test_snowflake_export_workflow_exports_events( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -475,9 +476,9 @@ async def test_snowflake_export_workflow_without_events(ateam, snowflake_batch_e task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -558,9 +559,9 @@ async def test_snowflake_export_workflow_raises_error_on_put_fail( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -624,9 +625,9 @@ async def test_snowflake_export_workflow_raises_error_on_copy_fail( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -675,9 +676,9 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_snowflake_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -696,7 +697,8 @@ async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str run = runs[0] assert run.status == "FailedRetryable" assert run.latest_error == "ValueError: A useful error message" - assert run.records_completed == 0 + assert run.records_completed is None + assert run.records_total_count == 1 async def test_snowflake_export_workflow_handles_insert_activity_non_retryable_errors(ateam, snowflake_batch_export): @@ -722,9 +724,9 @@ class ForbiddenError(Exception): task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, insert_into_snowflake_activity_mocked, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -743,6 +745,8 @@ class ForbiddenError(Exception): run = runs[0] assert run.status == "Failed" assert run.latest_error == "ForbiddenError: A useful error message" + assert run.records_completed is None + assert run.records_total_count == 1 async def test_snowflake_export_workflow_handles_cancellation_mocked(ateam, snowflake_batch_export): @@ -770,9 +774,9 @@ async def never_finish_activity(_: SnowflakeInsertInputs) -> str: task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + mocked_start_batch_export_run, never_finish_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -1087,9 +1091,9 @@ async def test_snowflake_export_workflow( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -1172,9 +1176,9 @@ async def test_snowflake_export_workflow_with_many_files( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): @@ -1242,9 +1246,9 @@ async def test_snowflake_export_workflow_handles_cancellation( task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[SnowflakeBatchExportWorkflow], activities=[ - create_export_run, + start_batch_export_run, insert_into_snowflake_activity, - update_export_run_status, + finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), ): diff --git a/posthog/temporal/tests/batch_exports/test_temporary_file.py b/posthog/temporal/tests/batch_exports/test_temporary_file.py new file mode 100644 index 0000000000000..8995486ec90e4 --- /dev/null +++ b/posthog/temporal/tests/batch_exports/test_temporary_file.py @@ -0,0 +1,398 @@ +import csv +import datetime as dt +import io +import json + +import pyarrow as pa +import pyarrow.parquet as pq +import pytest + +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, + CSVBatchExportWriter, + JSONLBatchExportWriter, + ParquetBatchExportWriter, + json_dumps_bytes, +) + + +@pytest.mark.parametrize( + "to_write", + [ + (b"",), + (b"", b""), + (b"12345",), + (b"12345", b"12345"), + (b"abbcccddddeeeee",), + (b"abbcccddddeeeee", b"abbcccddddeeeee"), + ], +) +def test_batch_export_temporary_file_tracks_bytes(to_write): + """Test the bytes written by BatchExportTemporaryFile match expected.""" + with BatchExportTemporaryFile() as be_file: + for content in to_write: + be_file.write(content) + + assert be_file.bytes_total == sum(len(content) for content in to_write) + assert be_file.bytes_since_last_reset == sum(len(content) for content in to_write) + + be_file.reset() + + assert be_file.bytes_total == sum(len(content) for content in to_write) + assert be_file.bytes_since_last_reset == 0 + + +TEST_RECORDS = [ + [], + [ + {"id": "record-1", "property": "value", "property_int": 1}, + {"id": "record-2", "property": "another-value", "property_int": 2}, + { + "id": "record-3", + "property": {"id": "nested-record", "property": "nested-value"}, + "property_int": 3, + }, + ], +] + + +@pytest.mark.parametrize( + "records", + TEST_RECORDS, +) +def test_batch_export_temporary_file_write_records_to_jsonl(records): + """Test JSONL records written by BatchExportTemporaryFile match expected.""" + jsonl_dump = b"\n".join(map(json_dumps_bytes, records)) + + with BatchExportTemporaryFile() as be_file: + be_file.write_records_to_jsonl(records) + + assert be_file.bytes_total == len(jsonl_dump) + assert be_file.bytes_since_last_reset == len(jsonl_dump) + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == len(records) + + be_file.seek(0) + lines = be_file.readlines() + assert len(lines) == len(records) + + for line_index, jsonl_record in enumerate(lines): + json_loaded = json.loads(jsonl_record) + assert json_loaded == records[line_index] + + be_file.reset() + + assert be_file.bytes_total == len(jsonl_dump) + assert be_file.bytes_since_last_reset == 0 + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == 0 + + +def test_batch_export_temporary_file_write_records_to_jsonl_invalid_unicode(): + with BatchExportTemporaryFile() as be_file: + be_file.write_records_to_jsonl(["hello\ud83dworld"]) + + be_file.seek(0) + # Invalid single surrogate is replaced with a question mark. + assert json.loads(be_file.readlines()[0]) == "hello?world" + + +@pytest.mark.parametrize( + "records", + TEST_RECORDS, +) +def test_batch_export_temporary_file_write_records_to_csv(records): + """Test CSV written by BatchExportTemporaryFile match expected.""" + in_memory_file_obj = io.StringIO() + writer = csv.DictWriter( + in_memory_file_obj, + fieldnames=records[0].keys() if len(records) > 0 else [], + delimiter=",", + quotechar='"', + escapechar="\\", + lineterminator="\n", + quoting=csv.QUOTE_NONE, + ) + writer.writerows(records) + + with BatchExportTemporaryFile(mode="w+") as be_file: + be_file.write_records_to_csv(records) + + assert be_file.bytes_total == in_memory_file_obj.tell() + assert be_file.bytes_since_last_reset == in_memory_file_obj.tell() + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == len(records) + + be_file.seek(0) + reader = csv.reader( + be_file._file, + delimiter=",", + quotechar='"', + escapechar="\\", + quoting=csv.QUOTE_NONE, + ) + + rows = [row for row in reader] + assert len(rows) == len(records) + + for row_index, csv_record in enumerate(rows): + for value_index, value in enumerate(records[row_index].values()): + # Everything returned by csv.reader is a str. + # This means type information is lost when writing to CSV + # but this just a limitation of the format. + assert csv_record[value_index] == str(value) + + be_file.reset() + + assert be_file.bytes_total == in_memory_file_obj.tell() + assert be_file.bytes_since_last_reset == 0 + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == 0 + + +@pytest.mark.parametrize( + "records", + TEST_RECORDS, +) +def test_batch_export_temporary_file_write_records_to_tsv(records): + """Test TSV written by BatchExportTemporaryFile match expected.""" + in_memory_file_obj = io.StringIO() + writer = csv.DictWriter( + in_memory_file_obj, + fieldnames=records[0].keys() if len(records) > 0 else [], + delimiter="\t", + quotechar='"', + escapechar="\\", + lineterminator="\n", + quoting=csv.QUOTE_NONE, + ) + writer.writerows(records) + + with BatchExportTemporaryFile(mode="w+") as be_file: + be_file.write_records_to_tsv(records) + + assert be_file.bytes_total == in_memory_file_obj.tell() + assert be_file.bytes_since_last_reset == in_memory_file_obj.tell() + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == len(records) + + be_file.seek(0) + reader = csv.reader( + be_file._file, + delimiter="\t", + quotechar='"', + escapechar="\\", + quoting=csv.QUOTE_NONE, + ) + + rows = [row for row in reader] + assert len(rows) == len(records) + + for row_index, csv_record in enumerate(rows): + for value_index, value in enumerate(records[row_index].values()): + # Everything returned by csv.reader is a str. + # This means type information is lost when writing to CSV + # but this just a limitation of the format. + assert csv_record[value_index] == str(value) + + be_file.reset() + + assert be_file.bytes_total == in_memory_file_obj.tell() + assert be_file.bytes_since_last_reset == 0 + assert be_file.records_total == len(records) + assert be_file.records_since_last_reset == 0 + + +TEST_RECORD_BATCHES = [ + pa.RecordBatch.from_pydict( + { + "event": pa.array(["test-event-0", "test-event-1", "test-event-2"]), + "properties": pa.array(['{"prop_0": 1, "prop_1": 2}', "{}", "null"]), + "_inserted_at": pa.array([0, 1, 2]), + } + ) +] + + +@pytest.mark.parametrize( + "record_batch", + TEST_RECORD_BATCHES, +) +@pytest.mark.asyncio +async def test_jsonl_writer_writes_record_batches(record_batch): + """Test record batches are written as valid JSONL.""" + in_memory_file_obj = io.BytesIO() + inserted_ats_seen = [] + + async def store_in_memory_on_flush( + batch_export_file, records_since_last_flush, bytes_since_last_flush, last_inserted_at, is_last + ): + in_memory_file_obj.write(batch_export_file.read()) + inserted_ats_seen.append(last_inserted_at) + + writer = JSONLBatchExportWriter(max_bytes=1, flush_callable=store_in_memory_on_flush) + + record_batch = record_batch.sort_by("_inserted_at") + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + lines = in_memory_file_obj.readlines() + for index, line in enumerate(lines): + written_jsonl = json.loads(line) + + single_record_batch = record_batch.slice(offset=index, length=1) + expected_jsonl = single_record_batch.to_pylist()[0] + + assert "_inserted_at" not in written_jsonl + assert written_jsonl == expected_jsonl + + assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] + + +@pytest.mark.parametrize( + "record_batch", + TEST_RECORD_BATCHES, +) +@pytest.mark.asyncio +async def test_csv_writer_writes_record_batches(record_batch): + """Test record batches are written as valid CSV.""" + in_memory_file_obj = io.StringIO() + inserted_ats_seen = [] + + async def store_in_memory_on_flush( + batch_export_file, records_since_last_flush, bytes_since_last_flush, last_inserted_at, is_last + ): + in_memory_file_obj.write(batch_export_file.read().decode("utf-8")) + inserted_ats_seen.append(last_inserted_at) + + schema_columns = [column_name for column_name in record_batch.column_names if column_name != "_inserted_at"] + writer = CSVBatchExportWriter(max_bytes=1, field_names=schema_columns, flush_callable=store_in_memory_on_flush) + + record_batch = record_batch.sort_by("_inserted_at") + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + reader = csv.reader( + in_memory_file_obj, + delimiter=",", + quotechar='"', + escapechar="\\", + quoting=csv.QUOTE_NONE, + ) + for index, written_csv_row in enumerate(reader): + single_record_batch = record_batch.slice(offset=index, length=1) + expected_csv = single_record_batch.to_pylist()[0] + + assert "_inserted_at" not in written_csv_row + assert written_csv_row == expected_csv + + assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] + + +@pytest.mark.parametrize( + "record_batch", + TEST_RECORD_BATCHES, +) +@pytest.mark.asyncio +async def test_parquet_writer_writes_record_batches(record_batch): + """Test record batches are written as valid Parquet.""" + in_memory_file_obj = io.BytesIO() + inserted_ats_seen = [] + + async def store_in_memory_on_flush( + batch_export_file, records_since_last_flush, bytes_since_last_flush, last_inserted_at, is_last + ): + in_memory_file_obj.write(batch_export_file.read()) + inserted_ats_seen.append(last_inserted_at) + + schema_columns = [column_name for column_name in record_batch.column_names if column_name != "_inserted_at"] + + writer = ParquetBatchExportWriter( + max_bytes=1, + flush_callable=store_in_memory_on_flush, + schema=record_batch.select(schema_columns).schema, + ) + + record_batch = record_batch.sort_by("_inserted_at") + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + written_parquet = pq.read_table(in_memory_file_obj) + + for index, written_row_as_dict in enumerate(written_parquet.to_pylist()): + single_record_batch = record_batch.slice(offset=index, length=1) + expected_row_as_dict = single_record_batch.select(schema_columns).to_pylist()[0] + + assert "_inserted_at" not in written_row_as_dict + assert written_row_as_dict == expected_row_as_dict + + # NOTE: Parquet gets flushed twice due to the extra flush at the end for footer bytes, so our mock function + # will see this value twice. + assert inserted_ats_seen == [ + record_batch.column("_inserted_at")[-1].as_py(), + record_batch.column("_inserted_at")[-1].as_py(), + ] + + +@pytest.mark.parametrize( + "record_batch", + TEST_RECORD_BATCHES, +) +@pytest.mark.asyncio +async def test_writing_out_of_scope_of_temporary_file_raises(record_batch): + """Test attempting a write out of temporary file scope raises a `ValueError`.""" + + async def do_nothing(*args, **kwargs): + pass + + schema_columns = [column_name for column_name in record_batch.column_names if column_name != "_inserted_at"] + writer = ParquetBatchExportWriter( + max_bytes=10, + flush_callable=do_nothing, + schema=record_batch.select(schema_columns).schema, + ) + + async with writer.open_temporary_file(): + pass + + with pytest.raises(ValueError, match="Batch export file is closed"): + await writer.write_record_batch(record_batch) + + +@pytest.mark.parametrize( + "record_batch", + TEST_RECORD_BATCHES, +) +@pytest.mark.asyncio +async def test_flushing_parquet_writer_resets_underlying_file(record_batch): + """Test flushing a writer resets underlying file.""" + flush_counter = 0 + + async def track_flushes(*args, **kwargs): + nonlocal flush_counter + flush_counter += 1 + + schema_columns = [column_name for column_name in record_batch.column_names if column_name != "_inserted_at"] + writer = ParquetBatchExportWriter( + max_bytes=10000000, + flush_callable=track_flushes, + schema=record_batch.select(schema_columns).schema, + ) + + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + assert writer.batch_export_file.tell() > 0 + assert writer.bytes_since_last_flush > 0 + assert writer.bytes_since_last_flush == writer.batch_export_file.bytes_since_last_reset + assert writer.records_since_last_flush == record_batch.num_rows + + await writer.flush(dt.datetime.now()) + + assert flush_counter == 1 + assert writer.batch_export_file.tell() == 0 + assert writer.bytes_since_last_flush == 0 + assert writer.bytes_since_last_flush == writer.batch_export_file.bytes_since_last_reset + assert writer.records_since_last_flush == 0 + + assert flush_counter == 2 diff --git a/posthog/temporal/tests/batch_exports/utils.py b/posthog/temporal/tests/batch_exports/utils.py new file mode 100644 index 0000000000000..7c7140983bc7f --- /dev/null +++ b/posthog/temporal/tests/batch_exports/utils.py @@ -0,0 +1,22 @@ +import uuid + +from asgiref.sync import sync_to_async +from temporalio import activity + +from posthog.batch_exports.models import BatchExportRun +from posthog.batch_exports.service import create_batch_export_run +from posthog.temporal.batch_exports.batch_exports import StartBatchExportRunInputs + + +@activity.defn(name="start_batch_export_run") +async def mocked_start_batch_export_run(inputs: StartBatchExportRunInputs) -> tuple[str, int]: + """Create a run and return some count >0 to avoid early return.""" + run = await sync_to_async(create_batch_export_run)( + batch_export_id=uuid.UUID(inputs.batch_export_id), + data_interval_start=inputs.data_interval_start, + data_interval_end=inputs.data_interval_end, + status=BatchExportRun.Status.STARTING, + records_total_count=1, + ) + + return str(run.id), 1 diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 1586f1051379e..6eb06abcad70f 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -136,6 +136,8 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: new_source_model = self._handle_stripe_source(request, *args, **kwargs) elif source_type == ExternalDataSource.Type.HUBSPOT: new_source_model = self._handle_hubspot_source(request, *args, **kwargs) + elif source_type == ExternalDataSource.Type.ZENDESK: + new_source_model = self._handle_zendesk_source(request, *args, **kwargs) elif source_type == ExternalDataSource.Type.POSTGRES: try: new_source_model, table_names = self._handle_postgres_source(request, *args, **kwargs) @@ -190,6 +192,33 @@ def _handle_stripe_source(self, request: Request, *args: Any, **kwargs: Any) -> return new_source_model + def _handle_zendesk_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: + payload = request.data["payload"] + api_key = payload.get("api_key") + subdomain = payload.get("subdomain") + email_address = payload.get("email_address") + prefix = request.data.get("prefix", None) + source_type = request.data["source_type"] + + # TODO: remove dummy vars + new_source_model = ExternalDataSource.objects.create( + source_id=str(uuid.uuid4()), + connection_id=str(uuid.uuid4()), + destination_id=str(uuid.uuid4()), + team=self.team, + status="Running", + source_type=source_type, + job_inputs={ + "zendesk_login_method": "api_key", # We should support the Zendesk OAuth flow in the future, and so with this we can do backwards compatibility + "zendesk_api_key": api_key, + "zendesk_subdomain": subdomain, + "zendesk_email_address": email_address, + }, + prefix=prefix, + ) + + return new_source_model + def _handle_hubspot_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: payload = request.data["payload"] code = payload.get("code") diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py index df668c5abfc54..0a044c0b06315 100644 --- a/posthog/warehouse/models/external_data_source.py +++ b/posthog/warehouse/models/external_data_source.py @@ -12,6 +12,7 @@ class Type(models.TextChoices): STRIPE = "Stripe", "Stripe" HUBSPOT = "Hubspot", "Hubspot" POSTGRES = "Postgres", "Postgres" + ZENDESK = "Zendesk", "Zendesk" class Status(models.TextChoices): RUNNING = "Running", "Running" diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index 91c6f61709d6e..23cc5a7ce9541 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -1,4 +1,4 @@ -from typing import Dict +from typing import Dict, Optional from django.db import models from posthog.client import sync_execute @@ -175,6 +175,17 @@ def hogql_definition(self) -> S3Table: structure=", ".join(structure), ) + def get_clickhouse_column_type(self, column_name: str) -> Optional[str]: + clickhouse_type = self.columns.get(column_name, None) + + if isinstance(clickhouse_type, dict) and self.columns[column_name].get("clickhouse"): + clickhouse_type = self.columns[column_name].get("clickhouse") + + if clickhouse_type.startswith("Nullable("): + clickhouse_type = clickhouse_type.replace("Nullable(", "")[:-1] + + return clickhouse_type + def _safe_expose_ch_error(self, err): err = wrap_query_error(err) for key, value in ExtractErrors.items(): diff --git a/requirements.in b/requirements.in index 800abfa76f036..e108c9bc6e8c2 100644 --- a/requirements.in +++ b/requirements.in @@ -101,5 +101,5 @@ phonenumberslite==8.13.6 openai==1.10.0 tiktoken==0.6.0 nh3==0.2.14 -hogql-parser==1.0.3 +hogql-parser==1.0.4 urllib3[secure,socks]==1.26.18 diff --git a/requirements.txt b/requirements.txt index a08984a29ff22..9d40bf3f4bc26 100644 --- a/requirements.txt +++ b/requirements.txt @@ -291,7 +291,7 @@ h11==0.13.0 # wsproto hexbytes==1.0.0 # via dlt -hogql-parser==1.0.3 +hogql-parser==1.0.4 # via -r requirements.in httpcore==1.0.2 # via httpx diff --git a/unit.json b/unit.json index 472189f0c2880..0b8de8774edf1 100644 --- a/unit.json +++ b/unit.json @@ -6,7 +6,7 @@ }, "listeners": { "*:8000": { - "pass": "routes/posthog" + "pass": "applications/posthog" }, "*:8001": { "pass": "routes/metrics" @@ -16,13 +16,6 @@ } }, "routes": { - "posthog": [ - { - "action": { - "pass": "applications/posthog" - } - } - ], "metrics": [ { "match": {