diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index 7bbe4b5147942..34c0a34cebeda 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -167,6 +167,12 @@ runs: --durations=100 --durations-min=1.0 --store-durations \ $PYTEST_ARGS + # Uncomment this code to create an ssh-able console so you can debug issues with github actions + # (Consider changing the timeout in ci-backend.yml to have more time) + # - name: Setup tmate session + # if: failure() + # uses: mxschmitt/action-tmate@v3 + - name: Run /decide read replica tests id: run-decide-read-replica-tests if: ${{ inputs.segment == 'Core' && inputs.group == 1 && inputs.person-on-events != 'true' }} diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 6155740676e03..fb15ed052a75a 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -224,6 +224,7 @@ jobs: django: needs: changes + # increase for tmate testing timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) diff --git a/docker-compose.dev-full.yml b/docker-compose.dev-full.yml index 96db120b76660..606d5f6a3bc9f 100644 --- a/docker-compose.dev-full.yml +++ b/docker-compose.dev-full.yml @@ -47,6 +47,8 @@ services: - ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml - ./docker/clickhouse/users-dev.xml:/etc/clickhouse-server/users.xml + - ./docker/clickhouse/user_defined_function.xml:/etc/clickhouse-server/user_defined_function.xml + - ./posthog/user_scripts:/var/lib/clickhouse/user_scripts depends_on: - kafka - zookeeper diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index d21b1af2ee75d..e9d698c2d3584 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -61,10 +61,15 @@ services: - '9440:9440' - '9009:9009' volumes: + # this new entrypoint file is to fix a bug detailed here https://github.com/ClickHouse/ClickHouse/pull/59991 + # revert this when we upgrade clickhouse + - ./docker/clickhouse/entrypoint.sh:/entrypoint.sh - ./posthog/idl:/idl - ./docker/clickhouse/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./docker/clickhouse/config.xml:/etc/clickhouse-server/config.xml - ./docker/clickhouse/users-dev.xml:/etc/clickhouse-server/users.xml + - ./docker/clickhouse/user_defined_function.xml:/etc/clickhouse-server/user_defined_function.xml + - ./posthog/user_scripts:/var/lib/clickhouse/user_scripts extra_hosts: - 'host.docker.internal:host-gateway' depends_on: diff --git a/docker-compose.hobby.yml b/docker-compose.hobby.yml index 0b00d510ad754..7df84ca8035cd 100644 --- a/docker-compose.hobby.yml +++ b/docker-compose.hobby.yml @@ -121,6 +121,7 @@ services: OBJECT_STORAGE_ENDPOINT: http://objectstorage:19000 OBJECT_STORAGE_ENABLED: true CDP_REDIS_HOST: redis7 + CDP_REDIS_PORT: 6379 depends_on: - db - redis diff --git a/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh b/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh index 0d4f4332c98ad..4141e3345d05b 100755 --- a/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh +++ b/docker/clickhouse/docker-entrypoint-initdb.d/init-db.sh @@ -1,4 +1,5 @@ #!/bin/bash set -e +apk add python3 cp -r /idl/* /var/lib/clickhouse/format_schemas/ diff --git a/docker/clickhouse/entrypoint.sh b/docker/clickhouse/entrypoint.sh new file mode 100755 index 0000000000000..7d247ab14ea2a --- /dev/null +++ b/docker/clickhouse/entrypoint.sh @@ -0,0 +1,207 @@ +#!/bin/bash + +set -eo pipefail +shopt -s nullglob + +DO_CHOWN=1 +if [ "${CLICKHOUSE_DO_NOT_CHOWN:-0}" = "1" ]; then + DO_CHOWN=0 +fi + +CLICKHOUSE_UID="${CLICKHOUSE_UID:-"$(id -u clickhouse)"}" +CLICKHOUSE_GID="${CLICKHOUSE_GID:-"$(id -g clickhouse)"}" + +# support --user +if [ "$(id -u)" = "0" ]; then + USER=$CLICKHOUSE_UID + GROUP=$CLICKHOUSE_GID +else + USER="$(id -u)" + GROUP="$(id -g)" + DO_CHOWN=0 +fi + +# set some vars +CLICKHOUSE_CONFIG="${CLICKHOUSE_CONFIG:-/etc/clickhouse-server/config.xml}" + +# get CH directories locations +DATA_DIR="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=path || true)" +TMP_DIR="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=tmp_path || true)" +USER_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=user_files_path || true)" +LOG_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=logger.log || true)" +LOG_DIR="" +if [ -n "$LOG_PATH" ]; then LOG_DIR="$(dirname "$LOG_PATH")"; fi +ERROR_LOG_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=logger.errorlog || true)" +ERROR_LOG_DIR="" +if [ -n "$ERROR_LOG_PATH" ]; then ERROR_LOG_DIR="$(dirname "$ERROR_LOG_PATH")"; fi +FORMAT_SCHEMA_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=format_schema_path || true)" + +# There could be many disks declared in config +readarray -t DISKS_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.path' || true) +readarray -t DISKS_METADATA_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.metadata_path' || true) + +CLICKHOUSE_USER="${CLICKHOUSE_USER:-default}" +CLICKHOUSE_PASSWORD="${CLICKHOUSE_PASSWORD:-}" +CLICKHOUSE_DB="${CLICKHOUSE_DB:-}" +CLICKHOUSE_ACCESS_MANAGEMENT="${CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT:-0}" + +for dir in "$DATA_DIR" \ + "$ERROR_LOG_DIR" \ + "$LOG_DIR" \ + "$TMP_DIR" \ + "$USER_PATH" \ + "$FORMAT_SCHEMA_PATH" \ + "${DISKS_PATHS[@]}" \ + "${DISKS_METADATA_PATHS[@]}" +do + # check if variable not empty + [ -z "$dir" ] && continue + # ensure directories exist + if [ "$DO_CHOWN" = "1" ]; then + mkdir="mkdir" + else + # if DO_CHOWN=0 it means that the system does not map root user to "admin" permissions + # it mainly happens on NFS mounts where root==nobody for security reasons + # thus mkdir MUST run with user id/gid and not from nobody that has zero permissions + mkdir="/usr/bin/clickhouse su "${USER}:${GROUP}" mkdir" + fi + if ! $mkdir -p "$dir"; then + echo "Couldn't create necessary directory: $dir" + exit 1 + fi + + if [ "$DO_CHOWN" = "1" ]; then + # ensure proper directories permissions + # but skip it for if directory already has proper premissions, cause recursive chown may be slow + if [ "$(stat -c %u "$dir")" != "$USER" ] || [ "$(stat -c %g "$dir")" != "$GROUP" ]; then + chown -R "$USER:$GROUP" "$dir" + fi + fi +done + +# if clickhouse user is defined - create it (user "default" already exists out of box) +if [ -n "$CLICKHOUSE_USER" ] && [ "$CLICKHOUSE_USER" != "default" ] || [ -n "$CLICKHOUSE_PASSWORD" ] || [ "$CLICKHOUSE_ACCESS_MANAGEMENT" != "0" ]; then + echo "$0: create new user '$CLICKHOUSE_USER' instead 'default'" + cat < /etc/clickhouse-server/users.d/default-user.xml + + + + + + + + <${CLICKHOUSE_USER}> + default + + ::/0 + + ${CLICKHOUSE_PASSWORD} + default + ${CLICKHOUSE_ACCESS_MANAGEMENT} + + + +EOT +fi + +CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS="${CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS:-}" + +# checking $DATA_DIR for initialization +if [ -d "${DATA_DIR%/}/data" ]; then + DATABASE_ALREADY_EXISTS='true' +fi + +# run initialization if flag CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS is not empty or data directory is empty +if [[ -n "${CLICKHOUSE_ALWAYS_RUN_INITDB_SCRIPTS}" || -z "${DATABASE_ALREADY_EXISTS}" ]]; then + RUN_INITDB_SCRIPTS='true' +fi + +if [ -n "${RUN_INITDB_SCRIPTS}" ]; then + if [ -n "$(ls /docker-entrypoint-initdb.d/)" ] || [ -n "$CLICKHOUSE_DB" ]; then + # port is needed to check if clickhouse-server is ready for connections + HTTP_PORT="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=http_port --try)" + HTTPS_PORT="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key=https_port --try)" + + if [ -n "$HTTP_PORT" ]; then + URL="http://127.0.0.1:$HTTP_PORT/ping" + else + URL="https://127.0.0.1:$HTTPS_PORT/ping" + fi + + # Listen only on localhost until the initialization is done + /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" -- --listen_host=127.0.0.1 & + pid="$!" + + # check if clickhouse is ready to accept connections + # will try to send ping clickhouse via http_port (max 1000 retries by default, with 1 sec timeout and 1 sec delay between retries) + tries=${CLICKHOUSE_INIT_TIMEOUT:-1000} + while ! wget --spider --no-check-certificate -T 1 -q "$URL" 2>/dev/null; do + if [ "$tries" -le "0" ]; then + echo >&2 'ClickHouse init process failed.' + exit 1 + fi + tries=$(( tries-1 )) + sleep 1 + done + + clickhouseclient=( clickhouse-client --multiquery --host "127.0.0.1" -u "$CLICKHOUSE_USER" --password "$CLICKHOUSE_PASSWORD" ) + + echo + + # create default database, if defined + if [ -n "$CLICKHOUSE_DB" ]; then + echo "$0: create database '$CLICKHOUSE_DB'" + "${clickhouseclient[@]}" -q "CREATE DATABASE IF NOT EXISTS $CLICKHOUSE_DB"; + fi + + for f in /docker-entrypoint-initdb.d/*; do + case "$f" in + *.sh) + if [ -x "$f" ]; then + echo "$0: running $f" + "$f" + else + echo "$0: sourcing $f" + # shellcheck source=/dev/null + . "$f" + fi + ;; + *.sql) echo "$0: running $f"; "${clickhouseclient[@]}" < "$f" ; echo ;; + *.sql.gz) echo "$0: running $f"; gunzip -c "$f" | "${clickhouseclient[@]}"; echo ;; + *) echo "$0: ignoring $f" ;; + esac + echo + done + + if ! kill -s TERM "$pid" || ! wait "$pid"; then + echo >&2 'Finishing of ClickHouse init process failed.' + exit 1 + fi + fi +else + echo "ClickHouse Database directory appears to contain a database; Skipping initialization" +fi + +# if no args passed to `docker run` or first argument start with `--`, then the user is passing clickhouse-server arguments +if [[ $# -lt 1 ]] || [[ "$1" == "--"* ]]; then + # Watchdog is launched by default, but does not send SIGINT to the main process, + # so the container can't be finished by ctrl+c + CLICKHOUSE_WATCHDOG_ENABLE=${CLICKHOUSE_WATCHDOG_ENABLE:-0} + export CLICKHOUSE_WATCHDOG_ENABLE + + # An option for easy restarting and replacing clickhouse-server in a container, especially in Kubernetes. + # For example, you can replace the clickhouse-server binary to another and restart it while keeping the container running. + if [[ "${CLICKHOUSE_DOCKER_RESTART_ON_EXIT:-0}" -eq "1" ]]; then + while true; do + # This runs the server as a child process of the shell script: + /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" "$@" ||: + echo >&2 'ClickHouse Server exited, and the environment variable CLICKHOUSE_DOCKER_RESTART_ON_EXIT is set to 1. Restarting the server.' + done + else + # This replaces the shell script with the server: + exec /usr/bin/clickhouse su "${USER}:${GROUP}" /usr/bin/clickhouse-server --config-file="$CLICKHOUSE_CONFIG" "$@" + fi +fi + +# Otherwise, we assume the user want to run his own process, for example a `bash` shell to explore this image +exec "$@" diff --git a/docker/clickhouse/user_defined_function.xml b/docker/clickhouse/user_defined_function.xml new file mode 100644 index 0000000000000..9617d2495ced1 --- /dev/null +++ b/docker/clickhouse/user_defined_function.xml @@ -0,0 +1,287 @@ + + + executable + aggregate_funnel + Array(Tuple(Int8, Nullable(String), Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel.py + + + + executable + aggregate_funnel_cohort + Array(Tuple(Int8, UInt64, Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel_cohort.py + + + + executable + aggregate_funnel_array + Array(Tuple(Int8, Array(String), Array(Float64))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array.py + + + + executable + aggregate_funnel_test + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_test.py + + + + executable + aggregate_funnel_trends + Array(Tuple(DateTime, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_trends.py + + + + executable + aggregate_funnel_array_trends + + Array(Tuple(DateTime, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends.py + + + + executable + aggregate_funnel_cohort_trends + + Array(Tuple(DateTime, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel_cohort_trends.py + + + + executable + aggregate_funnel_array_trends_test + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends_test.py + + \ No newline at end of file diff --git a/ee/billing/quota_limiting.py b/ee/billing/quota_limiting.py index fbf35a09e2dd3..e5f3d641b5364 100644 --- a/ee/billing/quota_limiting.py +++ b/ee/billing/quota_limiting.py @@ -122,7 +122,7 @@ def org_quota_limited_until( if organization.never_drop_data or trust_score == 15: return None - team_tokens = get_team_attribute_by_quota_resource(organization, resource) + team_tokens = get_team_attribute_by_quota_resource(organization) team_being_limited = any(x in previously_quota_limited_team_tokens for x in team_tokens) if team_being_limited: @@ -134,7 +134,7 @@ def org_quota_limited_until( if posthoganalytics.feature_enabled( QUOTA_LIMIT_DATA_RETENTION_FLAG, - organization.id, + str(organization.id), groups={"organization": str(organization.id)}, group_properties={"organization": {"id": str(organization.id)}}, ): @@ -237,7 +237,7 @@ def sync_org_quota_limits(organization: Organization): previously_quota_limited_team_tokens = list_limited_team_attributes( resource, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY ) - team_attributes = get_team_attribute_by_quota_resource(organization, resource) + team_attributes = get_team_attribute_by_quota_resource(organization) result = org_quota_limited_until(organization, resource, previously_quota_limited_team_tokens) if result: @@ -264,24 +264,14 @@ def sync_org_quota_limits(organization: Organization): remove_limited_team_tokens(resource, team_attributes, QuotaLimitingCaches.QUOTA_LIMITING_SUSPENDED_KEY) -def get_team_attribute_by_quota_resource(organization: Organization, resource: QuotaResource): - if resource in [QuotaResource.EVENTS, QuotaResource.RECORDINGS]: - team_tokens: list[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] +def get_team_attribute_by_quota_resource(organization: Organization): + team_tokens: list[str] = [x for x in list(organization.teams.values_list("api_token", flat=True)) if x] - if not team_tokens: - capture_exception(Exception(f"quota_limiting: No team tokens found for organization: {organization.id}")) - return + if not team_tokens: + capture_exception(Exception(f"quota_limiting: No team tokens found for organization: {organization.id}")) + return - return team_tokens - - if resource == QuotaResource.ROWS_SYNCED: - team_ids: list[str] = [x for x in list(organization.teams.values_list("id", flat=True)) if x] - - if not team_ids: - capture_exception(Exception(f"quota_limiting: No team ids found for organization: {organization.id}")) - return - - return team_ids + return team_tokens def set_org_usage_summary( diff --git a/ee/billing/test/test_quota_limiting.py b/ee/billing/test/test_quota_limiting.py index 3e8b5105767d3..fedf7b15a54ec 100644 --- a/ee/billing/test/test_quota_limiting.py +++ b/ee/billing/test/test_quota_limiting.py @@ -69,9 +69,9 @@ def test_quota_limiting_feature_flag_enabled(self, patch_feature_enabled, patch_ quota_limited_orgs, quota_limiting_suspended_orgs = update_all_org_billing_quotas() patch_feature_enabled.assert_called_with( QUOTA_LIMIT_DATA_RETENTION_FLAG, - self.organization.id, + str(self.organization.id), groups={"organization": org_id}, - group_properties={"organization": {"id": org_id}}, + group_properties={"organization": {"id": str(org_id)}}, ) patch_capture.assert_called_once_with( org_id, @@ -92,7 +92,7 @@ def test_quota_limiting_feature_flag_enabled(self, patch_feature_enabled, patch_ patch_capture.reset_mock() # Add this org to the redis cache. - team_tokens = get_team_attribute_by_quota_resource(self.organization, QuotaResource.EVENTS) + team_tokens = get_team_attribute_by_quota_resource(self.organization) add_limited_team_tokens( QuotaResource.EVENTS, {x: 1612137599 for x in team_tokens}, @@ -101,7 +101,7 @@ def test_quota_limiting_feature_flag_enabled(self, patch_feature_enabled, patch_ quota_limited_orgs, quota_limiting_suspended_orgs = update_all_org_billing_quotas() patch_feature_enabled.assert_called_with( QUOTA_LIMIT_DATA_RETENTION_FLAG, - self.organization.id, + str(self.organization.id), groups={"organization": org_id}, group_properties={"organization": {"id": org_id}}, ) @@ -715,7 +715,7 @@ def test_sync_org_quota_limits(self): # rows_synced uses teams, not tokens assert sorted( list_limited_team_attributes(QuotaResource.ROWS_SYNCED, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY) - ) == sorted(["1337", str(self.team.pk), str(other_team.pk)]) + ) == sorted(["1337", str(self.team.api_token), str(other_team.api_token)]) self.organization.usage["events"]["usage"] = 80 self.organization.usage["rows_synced"]["usage"] = 36 @@ -748,7 +748,7 @@ def test_sync_org_quota_limits(self): list_limited_team_attributes( QuotaResource.ROWS_SYNCED, QuotaLimitingCaches.QUOTA_LIMITING_SUSPENDED_KEY ) - ) == sorted([str(self.team.pk), str(other_team.pk)]) + ) == sorted([str(self.team.api_token), str(other_team.api_token)]) self.organization.usage["events"]["usage"] = 80 self.organization.usage["rows_synced"]["usage"] = 36 diff --git a/ee/hogai/__init__.py b/ee/hogai/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/generate_trends_agent.py b/ee/hogai/generate_trends_agent.py new file mode 100644 index 0000000000000..9980ff82dbeba --- /dev/null +++ b/ee/hogai/generate_trends_agent.py @@ -0,0 +1,55 @@ +from typing import Literal, Optional + +from langchain_core.output_parsers.openai_tools import PydanticToolsParser +from langchain_core.prompts import ChatPromptTemplate +from langchain_openai import ChatOpenAI +from pydantic import BaseModel, Field + +from ee.hogai.system_prompt import trends_system_prompt +from ee.hogai.team_prompt import TeamPrompt +from ee.hogai.trends_function import TrendsFunction +from posthog.models.team.team import Team +from posthog.schema import ExperimentalAITrendsQuery + + +class output_insight_schema(BaseModel): + reasoning_steps: Optional[list[str]] = None + answer: ExperimentalAITrendsQuery + + +class ChatMessage(BaseModel): + role: Literal["user", "assistant"] + content: str = Field(..., max_length=2500) + + +class Conversation(BaseModel): + messages: list[ChatMessage] = Field(..., max_length=20) + session_id: str + + +class GenerateTrendsAgent: + _team: Team + + def __init__(self, team: Team): + self._team = team + + def bootstrap(self, messages: list[ChatMessage], user_prompt: str | None = None): + llm = ChatOpenAI(model="gpt-4o-2024-08-06", stream_usage=True).bind_tools( + [TrendsFunction().generate_function()], tool_choice="output_insight_schema" + ) + user_prompt = ( + user_prompt + or "Answer to my question:\n{{question}}\n" + TeamPrompt(self._team).generate_prompt() + ) + + prompts = ChatPromptTemplate.from_messages( + [ + ("system", trends_system_prompt), + ("user", user_prompt), + *[(message.role, message.content) for message in messages[1:]], + ], + template_format="mustache", + ) + + chain = prompts | llm | PydanticToolsParser(tools=[output_insight_schema]) # type: ignore + return chain diff --git a/ee/hogai/hardcoded_definitions.py b/ee/hogai/hardcoded_definitions.py new file mode 100644 index 0000000000000..ee13c49c3ca63 --- /dev/null +++ b/ee/hogai/hardcoded_definitions.py @@ -0,0 +1,1027 @@ +hardcoded_prop_defs: dict = { + "events": { + "": { + "label": "All events", + "description": "This is a wildcard that matches all events.", + }, + "$pageview": { + "label": "Pageview", + "description": "When a user loads (or reloads) a page.", + }, + "$pageleave": { + "label": "Pageleave", + "description": "When a user leaves a page.", + }, + "$autocapture": { + "label": "Autocapture", + "description": "User interactions that were automatically captured.", + "examples": ["clicked button"], + }, + "$copy_autocapture": { + "label": "Clipboard autocapture", + "description": "Selected text automatically captured when a user copies or cuts.", + }, + "$screen": { + "label": "Screen", + "description": "When a user loads a screen in a mobile app.", + }, + "$set": { + "label": "Set", + "description": "Setting person properties.", + }, + "$opt_in": { + "label": "Opt In", + "description": "When a user opts into analytics.", + }, + "$feature_flag_called": { + "label": "Feature Flag Called", + "description": ( + 'The feature flag that was called.\n\nWarning! This only works in combination with the $feature_flag event. If you want to filter other events, try "Active Feature Flags".' + ), + "examples": ["beta-feature"], + }, + "$feature_view": { + "label": "Feature View", + "description": "When a user views a feature.", + }, + "$feature_interaction": { + "label": "Feature Interaction", + "description": "When a user interacts with a feature.", + }, + "$capture_metrics": { + "label": "Capture Metrics", + "description": "Metrics captured with values pertaining to your systems at a specific point in time", + }, + "$identify": { + "label": "Identify", + "description": "A user has been identified with properties", + }, + "$create_alias": { + "label": "Alias", + "description": "An alias ID has been added to a user", + }, + "$merge_dangerously": { + "label": "Merge", + "description": "An alias ID has been added to a user", + }, + "$groupidentify": { + "label": "Group Identify", + "description": "A group has been identified with properties", + }, + "$rageclick": { + "label": "Rageclick", + "description": "A user has rapidly and repeatedly clicked in a single place", + }, + "$exception": { + "label": "Exception", + "description": "Automatically captured exceptions from the client Sentry integration", + }, + "$web_vitals": { + "label": "Web vitals", + "description": "Automatically captured web vitals data", + }, + "Application Opened": { + "label": "Application Opened", + "description": "When a user opens the app either for the first time or from the foreground.", + }, + "Application Backgrounded": { + "label": "Application Backgrounded", + "description": "When a user puts the app in the background.", + }, + "Application Updated": { + "label": "Application Updated", + "description": "When a user upgrades the app.", + }, + "Application Installed": { + "label": "Application Installed", + "description": "When a user installs the app.", + }, + "Application Became Active": { + "label": "Application Became Active", + "description": "When a user puts the app in the foreground.", + }, + "Deep Link Opened": { + "label": "Deep Link Opened", + "description": "When a user opens the app via a deep link.", + }, + }, + "elements": { + "tag_name": { + "label": "Tag Name", + "description": "HTML tag name of the element which you want to filter.", + "examples": ["a", "button", "input"], + }, + "selector": { + "label": "CSS Selector", + "description": "Select any element by CSS selector.", + "examples": ["div > a", "table td:nth-child(2)", ".my-class"], + }, + "text": { + "label": "Text", + "description": "Filter on the inner text of the HTML element.", + }, + "href": { + "label": "Target (href)", + "description": "Filter on the href attribute of the element.", + "examples": ["https://posthog.com/about"], + }, + }, + "metadata": { + "distinct_id": { + "label": "Distinct ID", + "description": "The current distinct ID of the user", + "examples": ["16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767"], + }, + }, + "event_properties": { + "distinct_id": {}, + "$session_duration": {}, + "$copy_type": { + "label": "Copy Type", + "description": "Type of copy event.", + "examples": ["copy", "cut"], + }, + "$selected_content": { + "label": "Copied content", + "description": "The content that was selected when the user copied or cut.", + }, + "$set": { + "label": "Set", + "description": "Person properties to be set", + }, + "$set_once": { + "label": "Set Once", + "description": "Person properties to be set if not set already (i.e. first-touch)", + }, + "$pageview_id": { + "label": "Pageview ID", + "description": "PostHog's internal ID for matching events to a pageview.", + "system": True, + }, + "$autocapture_disabled_server_side": { + "label": "Autocapture Disabled Server-Side", + "description": "If autocapture has been disabled server-side.", + "system": True, + }, + "$console_log_recording_enabled_server_side": { + "label": "Console Log Recording Enabled Server-Side", + "description": "If console log recording has been enabled server-side.", + "system": True, + }, + "$session_recording_recorder_version_server_side": { + "label": "Session Recording Recorder Version Server-Side", + "description": "The version of the session recording recorder that is enabled server-side.", + "examples": ["v2"], + "system": True, + }, + "$feature_flag_payloads": { + "label": "Feature Flag Payloads", + "description": "Feature flag payloads active in the environment.", + }, + "$capture_failed_request": { + "label": "Capture Failed Request", + "description": "", + }, + "$sentry_exception": { + "label": "Sentry exception", + "description": "Raw Sentry exception data", + "system": True, + }, + "$sentry_exception_message": { + "label": "Sentry exception message", + }, + "$sentry_exception_type": { + "label": "Sentry exception type", + "description": "Class name of the exception object", + }, + "$sentry_tags": { + "label": "Sentry tags", + "description": "Tags sent to Sentry along with the exception", + }, + "$exception_type": { + "label": "Exception type", + "description": 'Exception categorized into types. E.g. "Error"', + }, + "$exception_message": { + "label": "Exception Message", + "description": "The message detected on the error.", + }, + "$exception_source": { + "label": "Exception source", + "description": "The source of the exception. E.g. JS file.", + }, + "$exception_lineno": { + "label": "Exception source line number", + "description": "Which line in the exception source that caused the exception.", + }, + "$exception_colno": { + "label": "Exception source column number", + "description": "Which column of the line in the exception source that caused the exception.", + }, + "$exception_DOMException_code": { + "label": "DOMException code", + "description": "If a DOMException was thrown, it also has a DOMException code.", + }, + "$exception_is_synthetic": { + "label": "Exception is synthetic", + "description": "Whether this was detected as a synthetic exception", + }, + "$exception_stack_trace_raw": { + "label": "Exception raw stack trace", + "description": "The exception's stack trace, as a string.", + }, + "$exception_handled": { + "label": "Exception was handled", + "description": "Whether this was a handled or unhandled exception", + }, + "$exception_personURL": { + "label": "Exception person URL", + "description": "The PostHog person that experienced the exception", + }, + "$ce_version": { + "label": "$ce_version", + "description": "", + "system": True, + }, + "$anon_distinct_id": { + "label": "Anon Distinct ID", + "description": "If the user was previously anonymous, their anonymous ID will be set here.", + "examples": ["16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767"], + "system": True, + }, + "$event_type": { + "label": "Event Type", + "description": "When the event is an $autocapture event, this specifies what the action was against the element.", + "examples": ["click", "submit", "change"], + }, + "$insert_id": { + "label": "Insert ID", + "description": "Unique insert ID for the event.", + "system": True, + }, + "$time": { + "label": "$time (deprecated)", + "description": "Use the HogQL field `timestamp` instead. This field was previously set on some client side events.", + "system": True, + "examples": ["1681211521.345"], + }, + "$device_id": { + "label": "Device ID", + "description": "Unique ID for that device, consistent even if users are logging in/out.", + "examples": ["16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767"], + "system": True, + }, + "$geoip_city_name": { + "label": "City Name", + "description": "Name of the city matched to this event's IP address.", + "examples": ["Sydney", "Chennai", "Brooklyn"], + }, + "$geoip_country_name": { + "label": "Country Name", + "description": "Name of the country matched to this event's IP address.", + "examples": ["Australia", "India", "United States"], + }, + "$geoip_country_code": { + "label": "Country Code", + "description": "Code of the country matched to this event's IP address.", + "examples": ["AU", "IN", "US"], + }, + "$geoip_continent_name": { + "label": "Continent Name", + "description": "Name of the continent matched to this event's IP address.", + "examples": ["Oceania", "Asia", "North America"], + }, + "$geoip_continent_code": { + "label": "Continent Code", + "description": "Code of the continent matched to this event's IP address.", + "examples": ["OC", "AS", "NA"], + }, + "$geoip_postal_code": { + "label": "Postal Code", + "description": "Approximated postal code matched to this event's IP address.", + "examples": ["2000", "600004", "11211"], + }, + "$geoip_latitude": { + "label": "Latitude", + "description": "Approximated latitude matched to this event's IP address.", + "examples": ["-33.8591", "13.1337", "40.7"], + }, + "$geoip_longitude": { + "label": "Longitude", + "description": "Approximated longitude matched to this event's IP address.", + "examples": ["151.2", "80.8008", "-73.9"], + }, + "$geoip_time_zone": { + "label": "Timezone", + "description": "Timezone matched to this event's IP address.", + "examples": ["Australia/Sydney", "Asia/Kolkata", "America/New_York"], + }, + "$geoip_subdivision_1_name": { + "label": "Subdivision 1 Name", + "description": "Name of the subdivision matched to this event's IP address.", + "examples": ["New South Wales", "Tamil Nadu", "New York"], + }, + "$geoip_subdivision_1_code": { + "label": "Subdivision 1 Code", + "description": "Code of the subdivision matched to this event's IP address.", + "examples": ["NSW", "TN", "NY"], + }, + "$geoip_subdivision_2_name": { + "label": "Subdivision 2 Name", + "description": "Name of the second subdivision matched to this event's IP address.", + }, + "$geoip_subdivision_2_code": { + "label": "Subdivision 2 Code", + "description": "Code of the second subdivision matched to this event's IP address.", + }, + "$geoip_subdivision_3_name": { + "label": "Subdivision 3 Name", + "description": "Name of the third subdivision matched to this event's IP address.", + }, + "$geoip_subdivision_3_code": { + "label": "Subdivision 3 Code", + "description": "Code of the third subdivision matched to this event's IP address.", + }, + "$geoip_disable": { + "label": "GeoIP Disabled", + "description": "Whether to skip GeoIP processing for the event.", + }, + "$el_text": { + "label": "Element Text", + "description": "The text of the element that was clicked. Only sent with Autocapture events.", + "examples": ["Click here!"], + }, + "$app_build": { + "label": "App Build", + "description": "The build number for the app.", + }, + "$app_name": { + "label": "App Name", + "description": "The name of the app.", + }, + "$app_namespace": { + "label": "App Namespace", + "description": "The namespace of the app as identified in the app store.", + "examples": ["com.posthog.app"], + }, + "$app_version": { + "label": "App Version", + "description": "The version of the app.", + }, + "$device_manufacturer": { + "label": "Device Manufacturer", + "description": "The manufacturer of the device", + "examples": ["Apple", "Samsung"], + }, + "$device_name": { + "label": "Device Name", + "description": "Name of the device", + "examples": ["iPhone 12 Pro", "Samsung Galaxy 10"], + }, + "$locale": { + "label": "Locale", + "description": "The locale of the device", + "examples": ["en-US", "de-DE"], + }, + "$os_name": { + "label": "OS Name", + "description": "The Operating System name", + "examples": ["iOS", "Android"], + }, + "$os_version": { + "label": "OS Version", + "description": "The Operating System version.", + "examples": ["15.5"], + }, + "$timezone": { + "label": "Timezone", + "description": "The timezone as reported by the device", + }, + "$touch_x": { + "label": "Touch X", + "description": "The location of a Touch event on the X axis", + }, + "$touch_y": { + "label": "Touch Y", + "description": "The location of a Touch event on the Y axis", + }, + "$plugins_succeeded": { + "label": "Plugins Succeeded", + "description": "Plugins that successfully processed the event, e.g. edited properties (plugin method processEvent).", + }, + "$groups": { + "label": "Groups", + "description": "Relevant groups", + }, + "$group_0": { + "label": "Group 1", + "system": True, + }, + "$group_1": { + "label": "Group 2", + "system": True, + }, + "$group_2": { + "label": "Group 3", + "system": True, + }, + "$group_3": { + "label": "Group 4", + "system": True, + }, + "$group_4": { + "label": "Group 5", + "system": True, + }, + "$group_set": { + "label": "Group Set", + "description": "Group properties to be set", + }, + "$group_key": { + "label": "Group Key", + "description": "Specified group key", + }, + "$group_type": { + "label": "Group Type", + "description": "Specified group type", + }, + "$window_id": { + "label": "Window ID", + "description": "Unique window ID for session recording disambiguation", + "system": True, + }, + "$session_id": { + "label": "Session ID", + "description": "Unique session ID for session recording disambiguation", + "system": True, + }, + "$plugins_failed": { + "label": "Plugins Failed", + "description": "Plugins that failed to process the event (plugin method processEvent).", + }, + "$plugins_deferred": { + "label": "Plugins Deferred", + "description": "Plugins to which the event was handed off post-ingestion, e.g. for export (plugin method onEvent).", + }, + "$$plugin_metrics": { + "label": "Plugin Metric", + "description": "Performance metrics for a given plugin.", + }, + "$creator_event_uuid": { + "label": "Creator Event ID", + "description": "Unique ID for the event, which created this person.", + "examples": ["16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767"], + }, + "utm_source": { + "label": "UTM Source", + "description": "UTM source tag.", + "examples": ["Google", "Bing", "Twitter", "Facebook"], + }, + "$initial_utm_source": { + "label": "Initial UTM Source", + "description": "UTM source tag.", + "examples": ["Google", "Bing", "Twitter", "Facebook"], + }, + "utm_medium": { + "label": "UTM Medium", + "description": "UTM medium tag.", + "examples": ["Social", "Organic", "Paid", "Email"], + }, + "utm_campaign": { + "label": "UTM Campaign", + "description": "UTM campaign tag.", + "examples": ["feature launch", "discount"], + }, + "utm_name": { + "label": "UTM Name", + "description": "UTM campaign tag, sent via Segment.", + "examples": ["feature launch", "discount"], + }, + "utm_content": { + "label": "UTM Content", + "description": "UTM content tag.", + "examples": ["bottom link", "second button"], + }, + "utm_term": { + "label": "UTM Term", + "description": "UTM term tag.", + "examples": ["free goodies"], + }, + "$performance_page_loaded": { + "label": "Page Loaded", + "description": "The time taken until the browser's page load event in milliseconds.", + }, + "$performance_raw": { + "label": "Browser Performance", + "description": "The browser performance entries for navigation (the page), paint, and resources. That were available when the page view event fired", + "system": True, + }, + "$had_persisted_distinct_id": { + "label": "$had_persisted_distinct_id", + "description": "", + "system": True, + }, + "$sentry_event_id": { + "label": "Sentry Event ID", + "description": "This is the Sentry key for an event.", + "examples": ["byroc2ar9ee4ijqp"], + "system": True, + }, + "$timestamp": { + "label": "Timestamp", + "description": "Time the event happened.", + "examples": ["2023-05-20T15:30:00Z"], + }, + "$sent_at": { + "label": "Sent At", + "description": "Time the event was sent to PostHog. Used for correcting the event timestamp when the device clock is off.", + "examples": ["2023-05-20T15:31:00Z"], + }, + "$browser": { + "label": "Browser", + "description": "Name of the browser the user has used.", + "examples": ["Chrome", "Firefox"], + }, + "$os": { + "label": "OS", + "description": "The operating system of the user.", + "examples": ["Windows", "Mac OS X"], + }, + "$browser_language": { + "label": "Browser Language", + "description": "Language.", + "examples": ["en", "en-US", "cn", "pl-PL"], + }, + "$current_url": { + "label": "Current URL", + "description": "The URL visited at the time of the event.", + "examples": ["https://example.com/interesting-article?parameter=true"], + }, + "$browser_version": { + "label": "Browser Version", + "description": "The version of the browser that was used. Used in combination with Browser.", + "examples": ["70", "79"], + }, + "$raw_user_agent": { + "label": "Raw User Agent", + "description": "PostHog process information like browser, OS, and device type from the user agent string. This is the raw user agent string.", + "examples": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko)"], + }, + "$user_agent": { + "label": "Raw User Agent", + "description": "Some SDKs (like Android) send the raw user agent as $user_agent.", + "examples": ["Dalvik/2.1.0 (Linux; U; Android 11; Pixel 3 Build/RQ2A.210505.002)"], + }, + "$screen_height": { + "label": "Screen Height", + "description": "The height of the user's entire screen (in pixels).", + "examples": ["2160", "1050"], + }, + "$screen_width": { + "label": "Screen Width", + "description": "The width of the user's entire screen (in pixels).", + "examples": ["1440", "1920"], + }, + "$screen_name": { + "label": "Screen Name", + "description": "The name of the active screen.", + }, + "$viewport_height": { + "label": "Viewport Height", + "description": "The height of the user's actual browser window (in pixels).", + "examples": ["2094", "1031"], + }, + "$viewport_width": { + "label": "Viewport Width", + "description": "The width of the user's actual browser window (in pixels).", + "examples": ["1439", "1915"], + }, + "$lib": { + "label": "Library", + "description": "What library was used to send the event.", + "examples": ["web", "posthog-ios"], + }, + "$lib_custom_api_host": { + "label": "Library Custom API Host", + "description": "The custom API host used to send the event.", + "examples": ["https://ph.example.com"], + }, + "$lib_version": { + "label": "Library Version", + "description": "Version of the library used to send the event. Used in combination with Library.", + "examples": ["1.0.3"], + }, + "$lib_version__major": { + "label": "Library Version (Major)", + "description": "Major version of the library used to send the event.", + "examples": [1], + }, + "$lib_version__minor": { + "label": "Library Version (Minor)", + "description": "Minor version of the library used to send the event.", + "examples": [0], + }, + "$lib_version__patch": { + "label": "Library Version (Patch)", + "description": "Patch version of the library used to send the event.", + "examples": [3], + }, + "$referrer": { + "label": "Referrer URL", + "description": "URL of where the user came from.", + "examples": ["https://google.com/search?q=posthog&rlz=1C..."], + }, + "$referring_domain": { + "label": "Referring Domain", + "description": "Domain of where the user came from.", + "examples": ["google.com", "facebook.com"], + }, + "$user_id": { + "label": "User ID", + "description": "This variable will be set to the distinct ID if you've called posthog.identify('distinct id'). If the user is anonymous, it'll be empty.", + }, + "$ip": { + "label": "IP Address", + "description": "IP address for this user when the event was sent.", + "examples": ["203.0.113.0"], + }, + "$host": { + "label": "Host", + "description": "The hostname of the Current URL.", + "examples": ["example.com", "localhost:8000"], + }, + "$pathname": { + "label": "Path Name", + "description": "The path of the Current URL, which means everything in the url after the domain.", + "examples": ["/pricing", "/about-us/team"], + }, + "$search_engine": { + "label": "Search Engine", + "description": "The search engine the user came in from (if any).", + "examples": ["Google", "DuckDuckGo"], + }, + "$active_feature_flags": { + "label": "Active Feature Flags", + "description": "Keys of the feature flags that were active while this event was sent.", + "examples": ["['beta-feature']"], + }, + "$enabled_feature_flags": { + "label": "Enabled Feature Flags", + "description": "Keys and multivariate values of the feature flags that were active while this event was sent.", + "examples": ['{"flag": "value"}'], + }, + "$feature_flag_response": { + "label": "Feature Flag Response", + "description": "What the call to feature flag responded with.", + "examples": ["true", "false"], + }, + "$feature_flag": { + "label": "Feature Flag", + "description": 'The feature flag that was called.\n\nWarning! This only works in combination with the $feature_flag_called event. If you want to filter other events, try "Active Feature Flags".', + "examples": ["beta-feature"], + }, + "$survey_response": { + "label": "Survey Response", + "description": "The response value for the first question in the survey.", + "examples": ["I love it!", 5, "['choice 1', 'choice 3']"], + }, + "$survey_name": { + "label": "Survey Name", + "description": "The name of the survey.", + "examples": ["Product Feedback for New Product", "Home page NPS"], + }, + "$survey_questions": { + "label": "Survey Questions", + "description": "The questions asked in the survey.", + }, + "$survey_id": { + "label": "Survey ID", + "description": "The unique identifier for the survey.", + }, + "$survey_iteration": { + "label": "Survey Iteration Number", + "description": "The iteration number for the survey.", + }, + "$survey_iteration_start_date": { + "label": "Survey Iteration Start Date", + "description": "The start date for the current iteration of the survey.", + }, + "$device": { + "label": "Device", + "description": "The mobile device that was used.", + "examples": ["iPad", "iPhone", "Android"], + }, + "$sentry_url": { + "label": "Sentry URL", + "description": "Direct link to the exception in Sentry", + "examples": ["https://sentry.io/..."], + }, + "$device_type": { + "label": "Device Type", + "description": "The type of device that was used.", + "examples": ["Mobile", "Tablet", "Desktop"], + }, + "$screen_density": { + "label": "Screen density", + "description": 'The logical density of the display. This is a scaling factor for the Density Independent Pixel unit, where one DIP is one pixel on an approximately 160 dpi screen (for example a 240x320, 1.5"x2" screen), providing the baseline of the system\'s display. Thus on a 160dpi screen this density value will be 1; on a 120 dpi screen it would be .75; etc.', + "examples": [2.75], + }, + "$device_model": { + "label": "Device Model", + "description": "The model of the device that was used.", + "examples": ["iPhone9,3", "SM-G965W"], + }, + "$network_wifi": { + "label": "Network WiFi", + "description": "Whether the user was on WiFi when the event was sent.", + "examples": ["true", "false"], + }, + "$network_bluetooth": { + "label": "Network Bluetooth", + "description": "Whether the user was on Bluetooth when the event was sent.", + "examples": ["true", "false"], + }, + "$network_cellular": { + "label": "Network Cellular", + "description": "Whether the user was on cellular when the event was sent.", + "examples": ["true", "false"], + }, + "$client_session_initial_referring_host": { + "label": "Referrer Host", + "description": "Host that the user came from. (First-touch, session-scoped)", + "examples": ["google.com", "facebook.com"], + }, + "$client_session_initial_pathname": { + "label": "Initial Path", + "description": "Path that the user started their session on. (First-touch, session-scoped)", + "examples": ["/register", "/some/landing/page"], + }, + "$client_session_initial_utm_source": { + "label": "Initial UTM Source", + "description": "UTM Source. (First-touch, session-scoped)", + "examples": ["Google", "Bing", "Twitter", "Facebook"], + }, + "$client_session_initial_utm_campaign": { + "label": "Initial UTM Campaign", + "description": "UTM Campaign. (First-touch, session-scoped)", + "examples": ["feature launch", "discount"], + }, + "$client_session_initial_utm_medium": { + "label": "Initial UTM Medium", + "description": "UTM Medium. (First-touch, session-scoped)", + "examples": ["Social", "Organic", "Paid", "Email"], + }, + "$client_session_initial_utm_content": { + "label": "Initial UTM Source", + "description": "UTM Source. (First-touch, session-scoped)", + "examples": ["bottom link", "second button"], + }, + "$client_session_initial_utm_term": { + "label": "Initial UTM Source", + "description": "UTM Source. (First-touch, session-scoped)", + "examples": ["free goodies"], + }, + "$network_carrier": { + "label": "Network Carrier", + "description": "The network carrier that the user is on.", + "examples": ["cricket", "telecom"], + }, + "from_background": { + "label": "From Background", + "description": "Whether the app was opened for the first time or from the background.", + "examples": ["true", "false"], + }, + "url": { + "label": "URL", + "description": "The deep link URL that the app was opened from.", + "examples": ["https://open.my.app"], + }, + "referring_application": { + "label": "Referrer Application", + "description": "The namespace of the app that made the request.", + "examples": ["com.posthog.app"], + }, + "version": { + "label": "App Version", + "description": "The version of the app", + "examples": ["1.0.0"], + }, + "previous_version": { + "label": "App Previous Version", + "description": "The previous version of the app", + "examples": ["1.0.0"], + }, + "build": { + "label": "App Build", + "description": "The build number for the app", + "examples": ["1"], + }, + "previous_build": { + "label": "App Previous Build", + "description": "The previous build number for the app", + "examples": ["1"], + }, + "gclid": { + "label": "gclid", + "description": "Google Click ID", + }, + "rdt_cid": { + "label": "rdt_cid", + "description": "Reddit Click ID", + }, + "gad_source": { + "label": "gad_source", + "description": "Google Ads Source", + }, + "gclsrc": { + "label": "gclsrc", + "description": "Google Click Source", + }, + "dclid": { + "label": "dclid", + "description": "DoubleClick ID", + }, + "gbraid": { + "label": "gbraid", + "description": "Google Ads, web to app", + }, + "wbraid": { + "label": "wbraid", + "description": "Google Ads, app to web", + }, + "fbclid": { + "label": "fbclid", + "description": "Facebook Click ID", + }, + "msclkid": { + "label": "msclkid", + "description": "Microsoft Click ID", + }, + "twclid": { + "label": "twclid", + "description": "Twitter Click ID", + }, + "li_fat_id": { + "label": "li_fat_id", + "description": "LinkedIn First-Party Ad Tracking ID", + }, + "mc_cid": { + "label": "mc_cid", + "description": "Mailchimp Campaign ID", + }, + "igshid": { + "label": "igshid", + "description": "Instagram Share ID", + }, + "ttclid": { + "label": "ttclid", + "description": "TikTok Click ID", + }, + "$is_identified": { + "label": "Is Identified", + "description": "When the person was identified", + }, + "$web_vitals_enabled_server_side": { + "label": "Web vitals enabled server side", + "description": "Whether web vitals was enabled in remote config", + }, + "$web_vitals_FCP_event": { + "label": "Web vitals FCP measure event details", + }, + "$web_vitals_FCP_value": { + "label": "Web vitals FCP value", + }, + "$web_vitals_LCP_event": { + "label": "Web vitals LCP measure event details", + }, + "$web_vitals_LCP_value": { + "label": "Web vitals LCP value", + }, + "$web_vitals_INP_event": { + "label": "Web vitals INP measure event details", + }, + "$web_vitals_INP_value": { + "label": "Web vitals INP value", + }, + "$web_vitals_CLS_event": { + "label": "Web vitals CLS measure event details", + }, + "$web_vitals_CLS_value": { + "label": "Web vitals CLS value", + }, + }, + "numerical_event_properties": {}, + "person_properties": {}, + "session_properties": { + "$session_duration": { + "label": "Session duration", + "description": "The duration of the session being tracked. Learn more about how PostHog tracks sessions in our documentation.\n\nNote, if the duration is formatted as a single number (not 'HH:MM:SS'), it's in seconds.", + "examples": ["01:04:12"], + "type": "Numeric", + }, + "$start_timestamp": { + "label": "Start timestamp", + "description": "The timestamp of the first event from this session.", + "examples": ["2023-05-20T15:30:00Z"], + "type": "DateTime", + }, + "$end_timestamp": { + "label": "End timestamp", + "description": "The timestamp of the last event from this session", + "examples": ["2023-05-20T16:30:00Z"], + "type": "DateTime", + }, + "$entry_current_url": { + "label": "Entry URL", + "description": "The first URL visited in this session", + "examples": ["https://example.com/interesting-article?parameter=true"], + "type": "String", + }, + "$entry_pathname": { + "label": "Entry pathname", + "description": "The first pathname visited in this session", + "examples": ["/interesting-article?parameter=true"], + "type": "String", + }, + "$end_current_url": { + "label": "Entry URL", + "description": "The first URL visited in this session", + "examples": ["https://example.com/interesting-article?parameter=true"], + "type": "String", + }, + "$end_pathname": { + "label": "Entry pathname", + "description": "The first pathname visited in this session", + "examples": ["/interesting-article?parameter=true"], + "type": "String", + }, + "$exit_current_url": { + "label": "Exit URL", + "description": "The last URL visited in this session", + "examples": ["https://example.com/interesting-article?parameter=true"], + "type": "String", + }, + "$exit_pathname": { + "label": "Exit pathname", + "description": "The last pathname visited in this session", + "examples": ["/interesting-article?parameter=true"], + "type": "String", + }, + "$pageview_count": { + "label": "Pageview count", + "description": "The number of page view events in this session", + "examples": ["123"], + "type": "Numeric", + }, + "$autocapture_count": { + "label": "Autocapture count", + "description": "The number of autocapture events in this session", + "examples": ["123"], + "type": "Numeric", + }, + "$screen_count": { + "label": "Screen count", + "description": "The number of screen events in this session", + "examples": ["123"], + "type": "Numeric", + }, + "$channel_type": { + "label": "Channel type", + "description": "What type of acquisition channel this traffic came from.", + "examples": ["Paid Search", "Organic Video", "Direct"], + "type": "String", + }, + "$is_bounce": { + "label": "Is bounce", + "description": "Whether the session was a bounce.", + "examples": ["true", "false"], + "type": "Boolean", + }, + }, + "groups": { + "$group_key": { + "label": "Group Key", + "description": "Specified group key", + }, + }, + "replay": { + "snapshot_source": { + "label": "Platform", + "description": "Platform the session was recorded on", + "examples": ["web", "mobile"], + }, + "console_log_level": { + "label": "Log level", + "description": "Level of console logs captured", + "examples": ["info", "warn", "error"], + }, + "console_log_query": { + "label": "Console log", + "description": "Text of console logs captured", + }, + "visited_page": { + "label": "Visited page", + "description": "URL a user visited during their session", + }, + }, +} diff --git a/ee/hogai/system_prompt.py b/ee/hogai/system_prompt.py new file mode 100644 index 0000000000000..fb00b35825867 --- /dev/null +++ b/ee/hogai/system_prompt.py @@ -0,0 +1,77 @@ +trends_system_prompt = """ +As a recognized head of product growth acting as a top-tier data engineer, your task is to write queries of trends insights for customers using a JSON schema. + +Follow these instructions to create a query: +* Identify the events or actions the user wants to analyze. +* Determine types of entities that user wants to analyze like events, persons, groups, sessions, cohorts, etc. +* Determine a vistualization type that best suits the user's needs. +* Determine if the user wants to name the series or use the default names. +* Choose the date range and the interval the user wants to analyze. +* Determine if the user wants to compare the results to a previous period or use smoothing. +* Determine if the user wants to use property filters for all series. +* Determine math types for all series. +* Determine property filters for individual series. +* Check operators of property filters for individual and all series. Make sure the operators correspond to the user's request. You may need to use "contains" for strings if you're not sure about the exact value. +* Determine if the user wants to use a breakdown filter. +* Determine if the user wants to filter out internal and test users. If the user didn't specify, filter out internal and test users by default. +* Determine if the user wants to use sampling factor. +* Determine if it's useful to show a legend, values of series, units, y-axis scale type, etc. +* Use your judgement if there are any other parameters that the user might want to adjust that aren't listed here. + +Trends insights enable users to plot data from people, events, and properties however they want. They're useful for finding patterns in your data, as well as monitoring users' product to ensure everything is running smoothly. For example, using trends, users can analyze: +- How product's most important metrics change over time. +- Long-term patterns, or cycles in product's usage. +- How a specific change affects usage. +- The usage of different features side-by-side. +- How the properties of events vary using aggregation (sum, average, etc). +- Users can also visualize the same data points in a variety of ways. + +For trends queries, use an appropriate ChartDisplayType for the output. For example: +- if the user wants to see a dynamics in time like a line graph, use `ActionsLineGraph`. +- if the user wants to see cumulative dynamics across time, use `ActionsLineGraphCumulative`. +- if the user asks a question where you can answer with a single number, use `BoldNumber`. +- if the user wants a table, use `ActionsTable`. +- if the data is categorical, use `ActionsBar`. +- if the data is easy to understand in a pie chart, use `ActionsPie`. +- if the user has only one series and they want to see data from particular countries, use `WorldMap`. + +The user might want to get insights for groups. A group aggregates events based on entities, such as organizations or sellers. The user might provide a list of group names and their numeric indexes. Instead of a group's name, always use its numeric index. + +Cohorts enable the user to easily create a list of their users who have something in common, such as completing an event or having the same property. The user might want to use cohorts for filtering events. Instead of a cohort's name, always use its ID. + +If you want to apply Y-Axis unit, make sure it will display data correctly. Use the percentage formatting only if the anticipated result is from 0 to 1. + +Learn on these examples: +Q: How many users do I have? +A: {"dateRange":{"date_from":"all"},"interval":"month","kind":"TrendsQuery","series":[{"event":"user signed up","kind":"EventsNode","math":"total"}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"BoldNumber"}} +Q: Show a bar chart of the organic search traffic for the last month grouped by week. +A: {"dateRange":{"date_from":"-30d","date_to":null,"explicitDate":false},"interval":"week","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"dau","properties":[{"key":"$referring_domain","operator":"icontains","type":"event","value":"google"},{"key":"utm_source","operator":"is_not_set","type":"event","value":"is_not_set"}]}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsBar"}} +Q: insight created unique users & first-time users for the last 12m) +A: {"dateRange":{"date_from":"-12m","date_to":""},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"insight created","kind":"EventsNode","math":"dau","custom_name":"insight created"},{"event":"insight created","kind":"EventsNode","math":"first_time_for_user","custom_name":"insight created"}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsLineGraph"}} +Q: What are the top 10 referring domains for the last month? +A: {"breakdownFilter":{"breakdown_type":"event","breakdowns":[{"group_type_index":null,"histogram_bin_count":null,"normalize_url":null,"property":"$referring_domain","type":"event"}]},"dateRange":{"date_from":"-30d"},"interval":"day","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"total","custom_name":"$pageview"}]} +Q: What is the DAU to MAU ratio of users from the US and Australia that viewed a page in the last 7 days? Compare it to the previous period. +A: {"compareFilter":{"compare":true,"compare_to":null},"dateRange":{"date_from":"-7d"},"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_name","operator":"exact","type":"event","value":["United States","Australia"]}]}]},"series":[{"event":"$pageview","kind":"EventsNode","math":"dau","custom_name":"$pageview"},{"event":"$pageview","kind":"EventsNode","math":"monthly_active","custom_name":"$pageview"}],"trendsFilter":{"aggregationAxisFormat":"percentage_scaled","display":"ActionsLineGraph","formula":"A/B"}} +Q: I want to understand how old are dashboard results when viewed from the beginning of this year grouped by a month. Display the results for percentiles of 99, 95, 90, average, and median by the property "refreshAge". +A: {"dateRange":{"date_from":"yStart","date_to":null,"explicitDate":false},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"viewed dashboard","kind":"EventsNode","math":"p99","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p95","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p90","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"avg","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"median","math_property":"refreshAge","custom_name":"viewed dashboard"}],"trendsFilter":{"aggregationAxisFormat":"duration","display":"ActionsLineGraph"}} +Q: organizations joined in the last 30 days by day from the google search +A: {"dateRange":{"date_from":"-30d"},"filterTestAccounts":false,"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"OR","values":[{"key":"$initial_utm_source","operator":"exact","type":"person","value":["google"]}]}]},"series":[{"event":"user signed up","kind":"EventsNode","math":"unique_group","math_group_type_index":0,"name":"user signed up","properties":[{"key":"is_organization_first_user","operator":"exact","type":"person","value":["true"]}]}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsLineGraph"}} +Q: trends for the last two weeks of the onboarding completed event by unique projects with a session duration more than 5 minutes and the insight analyzed event by unique projects with a breakdown by event's Country Name. exclude the US. +A: {"kind":"TrendsQuery","series":[{"kind":"EventsNode","event":"onboarding completed","name":"onboarding completed","properties":[{"key":"$session_duration","value":300,"operator":"gt","type":"session"}],"math":"unique_group","math_group_type_index":2},{"kind":"EventsNode","event":"insight analyzed","name":"insight analyzed","math":"unique_group","math_group_type_index":2}],"trendsFilter":{"display":"ActionsBar","showValuesOnSeries":true,"showPercentStackView":false,"showLegend":false},"breakdownFilter":{"breakdowns":[{"property":"$geoip_country_name","type":"event"}],"breakdown_limit":5},"properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_code","value":["US"],"operator":"is_not","type":"event"}]}]},"dateRange":{"date_from":"-14d","date_to":null},"interval":"day"} + +Obey these rules: +- if the date range is not specified, use the best judgement to select a reasonable date range. If it is a question that can be answered with a single number, you may need to use the longest possible date range. +- Filter internal users by default if the user doesn't specify. +- Only use events and properties defined by the user. You can't create new events or property definitions. + +For your reference, there is a description of the data model. + +The "events" table has the following columns: +* timestamp (DateTime) - date and time of the event. Events are sorted by timestamp in ascending order. +* uuid (UUID) - unique identifier of the event. +* person_id (UUID) - unique identifier of the person who performed the event. +* event (String) - name of the event. +* properties (custom type) - additional properties of the event. Properties can be of multiple types: String, Int, Decimal, Float, and Bool. A property can be an array of thosee types. A property always has only ONE type. If the property starts with a $, it is a system-defined property. If the property doesn't start with a $, it is a user-defined property. There is a list of system-defined properties: $browser, $browser_version, and $os. User-defined properties can have any name. + +Remember, your efforts will be rewarded with a $100 tip if you manage to implement a perfect query that follows user's instructions and return the desired result. Do not hallucinate. +""" diff --git a/ee/hogai/team_prompt.py b/ee/hogai/team_prompt.py new file mode 100644 index 0000000000000..6ab987b992363 --- /dev/null +++ b/ee/hogai/team_prompt.py @@ -0,0 +1,137 @@ +import collections +from datetime import timedelta + +from django.utils import timezone + +from posthog.models.cohort.cohort import Cohort +from posthog.models.event_definition import EventDefinition +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.models.property_definition import PropertyDefinition +from posthog.models.team.team import Team + +from .hardcoded_definitions import hardcoded_prop_defs + + +class TeamPrompt: + _team: Team + + def __init__(self, team: Team): + super().__init__() + self._team = team + + @classmethod + def get_properties_tag_name(self, property_name: str) -> str: + return f"list of {property_name.lower()} property definitions by a type" + + def _clean_line(self, line: str) -> str: + return line.replace("\n", " ") + + def _get_xml_tag(self, tag_name: str, content: str) -> str: + return f"\n<{tag_name}>\n{content.strip()}\n\n" + + def _generate_cohorts_prompt(self) -> str: + cohorts = Cohort.objects.filter(team=self._team, last_calculation__gte=timezone.now() - timedelta(days=60)) + return self._get_xml_tag( + "list of defined cohorts", + "\n".join([f'name "{cohort.name}", ID {cohort.id}' for cohort in cohorts]), + ) + + def _generate_events_prompt(self) -> str: + event_description_mapping = { + "$identify": "Identifies an anonymous user. This event doesn't show how many users you have but rather how many users used an account." + } + + tags: list[str] = [] + for event in EventDefinition.objects.filter( + team=self._team, last_seen_at__gte=timezone.now() - timedelta(days=60) + ): + event_tag = event.name + if event.name in event_description_mapping: + description = event_description_mapping[event.name] + event_tag += f" - {description}" + elif event.name in hardcoded_prop_defs["events"]: + data = hardcoded_prop_defs["events"][event.name] + event_tag += f" - {data['label']}. {data['description']}" + if "examples" in data: + event_tag += f" Examples: {data['examples']}." + tags.append(self._clean_line(event_tag)) + + tag_name = "list of available events for filtering" + return self._get_xml_tag(tag_name, "\n".join(sorted(tags))) + + def _generate_groups_prompt(self) -> str: + user_groups = GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") + return self._get_xml_tag( + "list of defined groups", + "\n".join([f'name "{group.group_type}", index {group.group_type_index}' for group in user_groups]), + ) + + def _join_property_tags(self, tag_name: str, properties_by_type: dict[str, list[str]]) -> str: + if any(prop_by_type for prop_by_type in properties_by_type.values()): + tags = "\n".join( + self._get_xml_tag(prop_type, "\n".join(tags)) for prop_type, tags in properties_by_type.items() + ) + return self._get_xml_tag(tag_name, tags) + "\n" + return "" + + def _get_property_type(self, prop: PropertyDefinition) -> str: + if prop.name.startswith("$feature/"): + return "feature" + return PropertyDefinition.Type(prop.type).label.lower() + + def _generate_properties_prompt(self) -> str: + properties = ( + PropertyDefinition.objects.filter(team=self._team) + .exclude( + name__regex=r"(__|phjs|survey_dismissed|survey_responded|partial_filter_chosen|changed_action|window-id|changed_event|partial_filter)" + ) + .distinct("name") + ).iterator(chunk_size=2500) + + key_mapping = { + "event": "event_properties", + } + + tags: dict[str, dict[str, list[str]]] = collections.defaultdict(lambda: collections.defaultdict(list)) + + for prop in properties: + category = self._get_property_type(prop) + property_type = prop.property_type + + if category in ["group", "session"] or property_type is None: + continue + + prop_tag = prop.name + + if category in key_mapping and prop.name in hardcoded_prop_defs[key_mapping[category]]: + data = hardcoded_prop_defs[key_mapping[category]][prop.name] + if "label" in data: + prop_tag += f" - {data['label']}." + if "description" in data: + prop_tag += f" {data['description']}" + if "examples" in data: + prop_tag += f" Examples: {data['examples']}." + + tags[category][property_type].append(self._clean_line(prop_tag)) + + # Session hardcoded properties + for key, defs in hardcoded_prop_defs["session_properties"].items(): + prop_tag += f"{key} - {defs['label']}. {defs['description']}." + if "examples" in defs: + prop_tag += f" Examples: {defs['examples']}." + tags["session"][defs["type"]].append(self._clean_line(prop_tag)) + + prompt = "\n".join( + [self._join_property_tags(self.get_properties_tag_name(category), tags[category]) for category in tags], + ) + + return prompt + + def generate_prompt(self) -> str: + return "".join( + [ + self._generate_groups_prompt(), + self._generate_events_prompt(), + self._generate_properties_prompt(), + ] + ) diff --git a/ee/hogai/trends_function.py b/ee/hogai/trends_function.py new file mode 100644 index 0000000000000..6f57b47506578 --- /dev/null +++ b/ee/hogai/trends_function.py @@ -0,0 +1,71 @@ +import json +from functools import cached_property +from typing import Any + +from ee.hogai.team_prompt import TeamPrompt +from posthog.models.property_definition import PropertyDefinition +from posthog.schema import ExperimentalAITrendsQuery + + +class TrendsFunction: + def _replace_value_in_dict(self, item: Any, original_schema: Any): + if isinstance(item, list): + return [self._replace_value_in_dict(i, original_schema) for i in item] + elif isinstance(item, dict): + if list(item.keys()) == ["$ref"]: + definitions = item["$ref"][2:].split("/") + res = original_schema.copy() + for definition in definitions: + res = res[definition] + return res + else: + return {key: self._replace_value_in_dict(i, original_schema) for key, i in item.items()} + else: + return item + + @cached_property + def _flat_schema(self): + schema = ExperimentalAITrendsQuery.model_json_schema() + + # Patch `numeric` types + schema["$defs"]["MathGroupTypeIndex"]["type"] = "number" + + # Clean up the property filters + for key, title in ( + ("EventPropertyFilter", PropertyDefinition.Type.EVENT.label), + ("PersonPropertyFilter", PropertyDefinition.Type.PERSON.label), + ("SessionPropertyFilter", PropertyDefinition.Type.SESSION.label), + ("FeaturePropertyFilter", "feature"), + ("CohortPropertyFilter", "cohort"), + ): + property_schema = schema["$defs"][key] + property_schema["properties"]["key"]["description"] = ( + f"Use one of the properties the user has provided in the <{TeamPrompt.get_properties_tag_name(title)}> tag." + ) + + for _ in range(100): + if "$ref" not in json.dumps(schema): + break + schema = self._replace_value_in_dict(schema.copy(), schema.copy()) + del schema["$defs"] + return schema + + def generate_function(self): + return { + "type": "function", + "function": { + "name": "output_insight_schema", + "description": "Outputs the JSON schema of a product analytics insight", + "parameters": { + "type": "object", + "properties": { + "reasoning_steps": { + "type": "array", + "items": {"type": "string"}, + "description": "The reasoning steps leading to the final conclusion that will be shown to the user. Use 'you' if you want to refer to the user.", + }, + "answer": self._flat_schema, + }, + }, + }, + } diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index 28d3353c0576f..8947e1c270ee4 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -258,7 +258,7 @@ def modify_recordings( return response.Response({"success": True}) if request.method == "DELETE": - playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) # type: ignore + playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) if playlist_item: playlist_item.delete() diff --git a/frontend/__snapshots__/components-command-bar--actions--dark.png b/frontend/__snapshots__/components-command-bar--actions--dark.png index 44ab7e3741e09..62965c348e773 100644 Binary files a/frontend/__snapshots__/components-command-bar--actions--dark.png and b/frontend/__snapshots__/components-command-bar--actions--dark.png differ diff --git a/frontend/__snapshots__/components-command-bar--actions--light.png b/frontend/__snapshots__/components-command-bar--actions--light.png index de75dfff4c388..4857817df6f2e 100644 Binary files a/frontend/__snapshots__/components-command-bar--actions--light.png and b/frontend/__snapshots__/components-command-bar--actions--light.png differ diff --git a/frontend/__snapshots__/components-command-bar--search--dark.png b/frontend/__snapshots__/components-command-bar--search--dark.png index 83c14db3090ab..599e3c20f7aea 100644 Binary files a/frontend/__snapshots__/components-command-bar--search--dark.png and b/frontend/__snapshots__/components-command-bar--search--dark.png differ diff --git a/frontend/__snapshots__/components-command-bar--search--light.png b/frontend/__snapshots__/components-command-bar--search--light.png index beb9e54a072c0..75bd57cddaff1 100644 Binary files a/frontend/__snapshots__/components-command-bar--search--light.png and b/frontend/__snapshots__/components-command-bar--search--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 1b8480c6221a3..3f48b36bd39f5 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png index 8b41666303c28..79db1daa2edf6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index 105b30153eae0..95204eb281d62 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 0a0cb2ed46c66..3c87296382dea 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/public/services/loops.png b/frontend/public/services/loops.png new file mode 100644 index 0000000000000..822763a10e108 Binary files /dev/null and b/frontend/public/services/loops.png differ diff --git a/frontend/public/services/rudderstack.png b/frontend/public/services/rudderstack.png new file mode 100644 index 0000000000000..11ebb99b677b5 Binary files /dev/null and b/frontend/public/services/rudderstack.png differ diff --git a/frontend/public/services/vitally.png b/frontend/public/services/vitally.png new file mode 100644 index 0000000000000..867ed5e10e908 Binary files /dev/null and b/frontend/public/services/vitally.png differ diff --git a/frontend/src/layout/navigation-3000/components/TopBar.scss b/frontend/src/layout/navigation-3000/components/TopBar.scss index 1628b3135ea86..3f05046fa8599 100644 --- a/frontend/src/layout/navigation-3000/components/TopBar.scss +++ b/frontend/src/layout/navigation-3000/components/TopBar.scss @@ -49,9 +49,14 @@ .TopBar3000__trail { display: flex; align-items: center; - height: 1rem; margin-top: calc(0.25rem * (1 - var(--breadcrumbs-compaction-rate))); overflow: visible; + + .TopBar3000:not(.TopBar3000--compact) & { + // 1rem of trail height ensures nice tight spacing in the full or transitioning state, + // but we don't want it in the compact state, as it causes title edit buttons to be cut off at top&bottom + height: 1rem; + } } .TopBar3000__here { @@ -65,7 +70,12 @@ font-size: 1rem; font-weight: 700; line-height: 1.2; - visibility: var(--breadcrumbs-title-large-visibility); + + .TopBar3000--compact & { + // It wouldn't be necessary to set visibility, but for some reason without this positioning + // of breadcrumbs becomes borked when entering title editing mode + visibility: hidden; + } > * { position: absolute; @@ -90,7 +100,12 @@ &.TopBar3000__breadcrumb--here { flex-shrink: 1; cursor: default; - visibility: var(--breadcrumbs-title-small-visibility); + + .TopBar3000--full & { + // It wouldn't be necessary to set visibility, but for some reason without this positioning + // of breadcrumbs becomes borked when entering title editing mode + visibility: hidden; + } > * { opacity: 1; diff --git a/frontend/src/layout/navigation-3000/components/TopBar.tsx b/frontend/src/layout/navigation-3000/components/TopBar.tsx index 805e52ad04e74..705f3d155bdc4 100644 --- a/frontend/src/layout/navigation-3000/components/TopBar.tsx +++ b/frontend/src/layout/navigation-3000/components/TopBar.tsx @@ -66,17 +66,13 @@ export function TopBar(): JSX.Element | null { return breadcrumbs.length ? (
{mobileLayout && ( diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index b1eddf98ad684..b21c10bede17a 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -15,6 +15,7 @@ import { IconRewindPlay, IconRocket, IconServer, + IconSparkles, IconTestTube, IconToggle, IconWarning, @@ -420,6 +421,15 @@ export const navigation3000Logic = kea([ }, ] + if (featureFlags[FEATURE_FLAGS.ARTIFICIAL_HOG]) { + sectionOne.splice(1, 0, { + identifier: Scene.Max, + label: 'Max AI', + icon: , + to: urls.max(), + }) + } + return [ sectionOne, [ diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 175698ad85794..b49144f9ada51 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -766,6 +766,11 @@ class ApiRequest { return apiRequest } + // Chat + public chat(teamId?: TeamType['id']): ApiRequest { + return this.projectsDetail(teamId).addPathComponent('query').addPathComponent('chat') + } + // Notebooks public notebooks(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('notebooks') @@ -2309,6 +2314,14 @@ const api = { .create({ ...options, data: { query, client_query_id: queryId, refresh: refreshParam } }) }, + chatURL: (): string => { + return new ApiRequest().chat().assembleFullUrl() + }, + + async chat(data: any): Promise { + return await api.createResponse(this.chatURL(), data) + }, + /** Fetch data from specified URL. The result already is JSON-parsed. */ async get(url: string, options?: ApiMethodOptions): Promise { const res = await api.getResponse(url, options) diff --git a/frontend/src/lib/colors.ts b/frontend/src/lib/colors.ts index 1413b241800a0..cc9530b5bc524 100644 --- a/frontend/src/lib/colors.ts +++ b/frontend/src/lib/colors.ts @@ -1,3 +1,5 @@ +import { captureException } from '@sentry/react' + import { LifecycleToggle } from '~/types' import { LemonTagType } from './lemon-ui/LemonTag' @@ -39,7 +41,9 @@ export const tagColors: LemonTagType[] = [ export function getColorVar(variable: string): string { const colorValue = getComputedStyle(document.body).getPropertyValue('--' + variable) if (!colorValue) { - throw new Error(`Couldn't find color variable --${variable}`) + captureException(new Error(`Couldn't find color variable --${variable}`)) + // Fall back to black or white depending on the theme + return document.body.getAttribute('theme') === 'light' ? '#000' : '#fff' } return colorValue.trim() } diff --git a/frontend/src/lib/components/CommandBar/ActionResult.tsx b/frontend/src/lib/components/CommandBar/ActionResult.tsx index cff7c24e09d2a..de97368c3ff66 100644 --- a/frontend/src/lib/components/CommandBar/ActionResult.tsx +++ b/frontend/src/lib/components/CommandBar/ActionResult.tsx @@ -30,7 +30,7 @@ export const ActionResult = ({ result, focused }: SearchResultProps): JSX.Elemen )} >
{ diff --git a/frontend/src/lib/components/CommandBar/ActionResults.tsx b/frontend/src/lib/components/CommandBar/ActionResults.tsx index c104546e9210f..df7c8003b2281 100644 --- a/frontend/src/lib/components/CommandBar/ActionResults.tsx +++ b/frontend/src/lib/components/CommandBar/ActionResults.tsx @@ -14,7 +14,7 @@ type ResultsGroupProps = { const ResultsGroup = ({ scope, results, activeResultIndex }: ResultsGroupProps): JSX.Element => { return ( <> -
+
{getNameFromActionScope(scope)}
{results.map((result) => ( diff --git a/frontend/src/lib/components/CommandBar/SearchResult.tsx b/frontend/src/lib/components/CommandBar/SearchResult.tsx index 242dc374f65e2..354470759518e 100644 --- a/frontend/src/lib/components/CommandBar/SearchResult.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResult.tsx @@ -42,7 +42,7 @@ export const SearchResult = ({ result, resultIndex, focused }: SearchResultProps return (
{ diff --git a/frontend/src/lib/components/CommandBar/index.scss b/frontend/src/lib/components/CommandBar/index.scss index 593086bb81634..02aa24cb7a11d 100644 --- a/frontend/src/lib/components/CommandBar/index.scss +++ b/frontend/src/lib/components/CommandBar/index.scss @@ -1,7 +1,7 @@ .LemonInput.CommandBar__input { - height: 2.75rem; - padding-right: 0.375rem; - padding-left: 0.75rem; + height: 3rem; + padding-right: 0.5rem; + padding-left: 1rem; border-color: transparent !important; border-radius: 0; } diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx index ba614fd81ad7f..33c2f5b9c51b0 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx @@ -27,8 +27,8 @@ import { standardAnimations, } from './sprites/sprites' -const xFrames = SPRITE_SHEET_WIDTH / SPRITE_SIZE -const FPS = 24 +export const X_FRAMES = SPRITE_SHEET_WIDTH / SPRITE_SIZE +export const FPS = 24 const GRAVITY_PIXELS = 10 const MAX_JUMP_COUNT = 2 @@ -592,8 +592,8 @@ export class HedgehogActor { width: SPRITE_SIZE, height: SPRITE_SIZE, backgroundImage: `url(${baseSpritePath()}/${this.animation.img}.png)`, - backgroundPosition: `-${(this.animationFrame % xFrames) * SPRITE_SIZE}px -${ - Math.floor(this.animationFrame / xFrames) * SPRITE_SIZE + backgroundPosition: `-${(this.animationFrame % X_FRAMES) * SPRITE_SIZE}px -${ + Math.floor(this.animationFrame / X_FRAMES) * SPRITE_SIZE }px`, filter: imageFilter as any, }} diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx index 7a24d4b69c194..337dc6744b1bf 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddyRender.tsx @@ -1,35 +1,77 @@ +import { useEffect, useRef, useState } from 'react' + import { HedgehogConfig } from '~/types' +import { FPS, X_FRAMES } from './HedgehogBuddy' import { COLOR_TO_FILTER_MAP } from './hedgehogBuddyLogic' -import { baseSpriteAccessoriesPath, baseSpritePath, standardAccessories } from './sprites/sprites' +import { + baseSpriteAccessoriesPath, + baseSpritePath, + SPRITE_SIZE, + standardAccessories, + standardAnimations, +} from './sprites/sprites' -export type HedgehogBuddyStaticProps = Partial & { size?: number | string } +export type HedgehogBuddyStaticProps = Partial & { size?: number | string; waveOnAppearance?: boolean } // Takes a range of options and renders a static hedgehog -export function HedgehogBuddyStatic({ accessories, color, size }: HedgehogBuddyStaticProps): JSX.Element { +export function HedgehogBuddyStatic({ + accessories, + color, + size, + waveOnAppearance, +}: HedgehogBuddyStaticProps): JSX.Element { const imgSize = size ?? 60 const accessoryInfos = accessories?.map((x) => standardAccessories[x]) const filter = color ? COLOR_TO_FILTER_MAP[color] : null + const [animationIteration, setAnimationIteration] = useState(waveOnAppearance ? 1 : 0) + const [_, setTimerLoop] = useState(0) + const animationFrameRef = useRef(0) + + useEffect(() => { + if (animationIteration) { + setTimerLoop(0) + let timer: any = null + const loop = (): void => { + if (animationFrameRef.current < standardAnimations.wave.frames) { + animationFrameRef.current++ + timer = setTimeout(loop, 1000 / FPS) + } else { + animationFrameRef.current = 0 + } + setTimerLoop((x) => x + 1) + } + loop() + return () => { + clearTimeout(timer) + } + } + }, [animationIteration]) + return (
setAnimationIteration((x) => x + 1) : undefined} > - @@ -37,7 +79,7 @@ export function HedgehogBuddyStatic({ accessories, color, size }: HedgehogBuddyS void className?: string dataAttr?: string + onHeaderClick?: () => void } function LemonCollapsePanel({ @@ -106,13 +107,17 @@ function LemonCollapsePanel({ className, dataAttr, onChange, + onHeaderClick, }: LemonCollapsePanelProps): JSX.Element { const { height: contentHeight, ref: contentRef } = useResizeObserver({ box: 'border-box' }) return (
onChange(!isExpanded)} + onClick={() => { + onHeaderClick && onHeaderClick() + onChange(!isExpanded) + }} icon={isExpanded ? : } className="LemonCollapsePanel__header" {...(dataAttr ? { 'data-attr': dataAttr } : {})} diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss index 0c47f978b6726..70ee5864f4978 100644 --- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss +++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss @@ -81,6 +81,14 @@ } } + &.LemonInput--large { + --lemon-input-height: 3rem; + + .LemonIcon { + font-size: 1.5rem; + } + } + &.LemonInput--has-content { > .LemonIcon { color: var(--primary-3000); diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx index da51f8a6891c9..5f9117b9e41b3 100644 --- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx +++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.tsx @@ -44,7 +44,7 @@ interface LemonInputPropsBase /** Special case - show a transparent background rather than white */ transparentBackground?: boolean /** Size of the element. Default: `'medium'`. */ - size?: 'xsmall' | 'small' | 'medium' + size?: 'xsmall' | 'small' | 'medium' | 'large' onPressEnter?: (event: React.KeyboardEvent) => void 'data-attr'?: string 'aria-label'?: string diff --git a/frontend/src/lib/lemon-ui/icons/categories.ts b/frontend/src/lib/lemon-ui/icons/categories.ts index d7e29e9a5327b..c57ef8d09c6ef 100644 --- a/frontend/src/lib/lemon-ui/icons/categories.ts +++ b/frontend/src/lib/lemon-ui/icons/categories.ts @@ -51,6 +51,7 @@ export const OBJECTS = { 'IconGear', 'IconGearFilled', 'IconStack', + 'IconSparkles', ], People: ['IconPeople', 'IconPeopleFilled', 'IconPerson', 'IconProfile', 'IconUser', 'IconGroups'], 'Business & Finance': ['IconStore', 'IconCart', 'IconReceipt', 'IconPiggyBank', 'IconHandMoney'], diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index c1d66c0d78583..8bbce53ef67ed 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -9,6 +9,7 @@ import { AnimationType } from 'lib/animations/animations' import { Animation } from 'lib/components/Animation/Animation' import { useCallback, useState } from 'react' import { DatabaseTableTreeWithItems } from 'scenes/data-warehouse/external/DataWarehouseTables' +import { InsightErrorState } from 'scenes/insights/EmptyStates' import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { urls } from 'scenes/urls' @@ -104,6 +105,8 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX sourceFeatures, response, responseLoading, + responseError, + queryCancelled, isChartSettingsPanelOpen, } = useValues(dataVisualizationLogic) @@ -202,7 +205,27 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX 'pt-[46px]': showEditingUI, })} > - {component} + {visualizationType !== ChartDisplayType.ActionsTable && responseError ? ( +
+ +
+ ) : ( + component + )}
diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 10e539b1d61ac..90be24bd22c33 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1,6 +1,134 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "definitions": { + "AIActionsNode": { + "additionalProperties": false, + "properties": { + "custom_name": { + "type": "string" + }, + "event": { + "description": "The event or `null` for all events.", + "type": ["string", "null"] + }, + "fixedProperties": { + "items": { + "$ref": "#/definitions/AIPropertyFilter" + }, + "type": "array" + }, + "kind": { + "const": "EventsNode", + "type": "string" + }, + "math": { + "$ref": "#/definitions/MathType" + }, + "math_group_type_index": { + "enum": [0, 1, 2, 3, 4], + "type": "number" + }, + "math_property": { + "type": "string" + }, + "name": { + "type": "string" + }, + "orderBy": { + "description": "Columns to order by", + "items": { + "type": "string" + }, + "type": "array" + }, + "properties": { + "items": { + "$ref": "#/definitions/AIPropertyFilter" + }, + "type": "array" + }, + "response": { + "type": "object" + } + }, + "required": ["kind"], + "type": "object" + }, + "AIEventsNode": { + "additionalProperties": false, + "properties": { + "custom_name": { + "type": "string" + }, + "event": { + "description": "The event or `null` for all events.", + "type": ["string", "null"] + }, + "fixedProperties": { + "items": { + "$ref": "#/definitions/AIPropertyFilter" + }, + "type": "array" + }, + "kind": { + "const": "EventsNode", + "type": "string" + }, + "math": { + "$ref": "#/definitions/MathType" + }, + "math_group_type_index": { + "enum": [0, 1, 2, 3, 4], + "type": "number" + }, + "math_property": { + "type": "string" + }, + "name": { + "type": "string" + }, + "orderBy": { + "description": "Columns to order by", + "items": { + "type": "string" + }, + "type": "array" + }, + "properties": { + "items": { + "$ref": "#/definitions/AIPropertyFilter" + }, + "type": "array" + }, + "response": { + "type": "object" + } + }, + "required": ["kind"], + "type": "object" + }, + "AIPropertyFilter": { + "anyOf": [ + { + "$ref": "#/definitions/EventPropertyFilter" + }, + { + "$ref": "#/definitions/PersonPropertyFilter" + }, + { + "$ref": "#/definitions/SessionPropertyFilter" + }, + { + "$ref": "#/definitions/CohortPropertyFilter" + }, + { + "$ref": "#/definitions/GroupPropertyFilter" + }, + { + "$ref": "#/definitions/FeaturePropertyFilter" + } + ] + }, "ActionsNode": { "additionalProperties": false, "properties": { @@ -1107,6 +1235,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "is_cached": { "type": "boolean" }, @@ -4181,6 +4312,92 @@ "required": ["columns", "hogql", "results", "types"], "type": "object" }, + "ExperimentalAITrendsQuery": { + "additionalProperties": false, + "properties": { + "aggregation_group_type_index": { + "description": "Groups aggregation", + "type": "integer" + }, + "breakdownFilter": { + "additionalProperties": false, + "description": "Breakdown of the events and actions", + "properties": { + "breakdown_hide_other_aggregation": { + "type": ["boolean", "null"] + }, + "breakdown_histogram_bin_count": { + "type": "integer" + }, + "breakdown_limit": { + "type": "integer" + }, + "breakdowns": { + "items": { + "$ref": "#/definitions/Breakdown" + }, + "maxLength": 3, + "type": "array" + } + }, + "type": "object" + }, + "compareFilter": { + "$ref": "#/definitions/CompareFilter", + "description": "Compare to date range" + }, + "dateRange": { + "$ref": "#/definitions/InsightDateRange", + "description": "Date range for the query" + }, + "filterTestAccounts": { + "default": false, + "description": "Exclude internal and test users by applying the respective filters", + "type": "boolean" + }, + "interval": { + "$ref": "#/definitions/IntervalType", + "default": "day", + "description": "Granularity of the response. Can be one of `hour`, `day`, `week` or `month`" + }, + "kind": { + "const": "TrendsQuery", + "type": "string" + }, + "properties": { + "default": [], + "description": "Property filters for all series", + "items": { + "$ref": "#/definitions/AIPropertyFilter" + }, + "type": "array" + }, + "samplingFactor": { + "description": "Sampling rate", + "type": ["number", "null"] + }, + "series": { + "description": "Events and actions to include", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/AIEventsNode" + }, + { + "$ref": "#/definitions/AIActionsNode" + } + ] + }, + "type": "array" + }, + "trendsFilter": { + "$ref": "#/definitions/TrendsFilter", + "description": "Properties specific to the trends insight" + } + }, + "required": ["kind", "series"], + "type": "object" + }, "FeaturePropertyFilter": { "additionalProperties": false, "properties": { @@ -4724,6 +4941,9 @@ "layout": { "$ref": "#/definitions/FunnelLayout", "default": "vertical" + }, + "useUdf": { + "type": "boolean" } }, "type": "object" @@ -4870,6 +5090,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "modifiers": { "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" @@ -8093,6 +8316,9 @@ "description": "Generated HogQL query.", "type": "string" }, + "isUdf": { + "type": "boolean" + }, "modifiers": { "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 0a6f14f2fb8ed..761a26fe22489 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -8,14 +8,17 @@ import { BreakdownType, ChartDisplayCategory, ChartDisplayType, + CohortPropertyFilter, CountPerActorMathType, DurationType, EventPropertyFilter, EventType, + FeaturePropertyFilter, FilterLogicalOperator, FilterType, FunnelsFilterType, GroupMathType, + GroupPropertyFilter, HogQLMathType, InsightShortId, InsightType, @@ -826,6 +829,79 @@ export interface TrendsQuery extends InsightsQueryBase { compareFilter?: CompareFilter } +export type AIPropertyFilter = + | EventPropertyFilter + | PersonPropertyFilter + // | ElementPropertyFilter + | SessionPropertyFilter + | CohortPropertyFilter + // | RecordingPropertyFilter + // | LogEntryPropertyFilter + // | HogQLPropertyFilter + // | EmptyPropertyFilter + // | DataWarehousePropertyFilter + // | DataWarehousePersonPropertyFilter + | GroupPropertyFilter + | FeaturePropertyFilter + +export interface AIEventsNode + extends Omit { + properties?: AIPropertyFilter[] + fixedProperties?: AIPropertyFilter[] +} + +export interface AIActionsNode + extends Omit { + properties?: AIPropertyFilter[] + fixedProperties?: AIPropertyFilter[] +} + +export interface ExperimentalAITrendsQuery { + kind: NodeKind.TrendsQuery + /** + * Granularity of the response. Can be one of `hour`, `day`, `week` or `month` + * + * @default day + */ + interval?: IntervalType + /** Events and actions to include */ + series: (AIEventsNode | AIActionsNode)[] + /** Properties specific to the trends insight */ + trendsFilter?: TrendsFilter + /** Breakdown of the events and actions */ + breakdownFilter?: Omit< + BreakdownFilter, + | 'breakdown' + | 'breakdown_type' + | 'breakdown_normalize_url' + | 'histogram_bin_count' + | 'breakdown_group_type_index' + > + /** Compare to date range */ + compareFilter?: CompareFilter + /** Date range for the query */ + dateRange?: InsightDateRange + /** + * Exclude internal and test users by applying the respective filters + * + * @default false + */ + filterTestAccounts?: boolean + /** + * Property filters for all series + * + * @default [] + */ + properties?: AIPropertyFilter[] + + /** + * Groups aggregation + */ + aggregation_group_type_index?: integer + /** Sampling rate */ + samplingFactor?: number | null +} + /** `FunnelsFilterType` minus everything inherited from `FilterType` and persons modal related params */ export type FunnelsFilterLegacy = Omit< FunnelsFilterType, @@ -871,6 +947,7 @@ export type FunnelsFilter = { hiddenLegendBreakdowns?: string[] /** @default total */ funnelStepReference?: FunnelsFilterLegacy['funnel_step_reference'] + useUdf?: boolean } export interface FunnelsQuery extends InsightsQueryBase { @@ -897,7 +974,9 @@ export type FunnelTrendsResults = Record[] export interface FunnelsQueryResponse extends AnalyticsQueryResponseBase< FunnelStepsResults | FunnelStepsBreakdownResults | FunnelTimeToConvertResults | FunnelTrendsResults - > {} + > { + isUdf?: boolean +} export type CachedFunnelsQueryResponse = CachedQueryResponse diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 23bc70635693a..0824fc5438068 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -45,6 +45,7 @@ export const appScenes: Record any> = { [Scene.OrganizationCreateFirst]: () => import('./organization/Create'), [Scene.OrganizationCreationConfirm]: () => import('./organization/ConfirmOrganization/ConfirmOrganization'), [Scene.ProjectHomepage]: () => import('./project-homepage/ProjectHomepage'), + [Scene.Max]: () => import('./max/Max'), [Scene.ProjectCreateFirst]: () => import('./project/Create'), [Scene.SystemStatus]: () => import('./instance/SystemStatus'), [Scene.ToolbarLaunch]: () => import('./toolbar-launch/ToolbarLaunch'), diff --git a/frontend/src/scenes/dashboard/dashboardTemplateVariablesLogic.ts b/frontend/src/scenes/dashboard/dashboardTemplateVariablesLogic.ts index 39b45617e66ac..da23d22466d25 100644 --- a/frontend/src/scenes/dashboard/dashboardTemplateVariablesLogic.ts +++ b/frontend/src/scenes/dashboard/dashboardTemplateVariablesLogic.ts @@ -1,4 +1,4 @@ -import { actions, kea, path, props, propsChanged, reducers } from 'kea' +import { actions, kea, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { isEmptyObject } from 'lib/utils' import { DashboardTemplateVariableType, FilterType, Optional } from '~/types' @@ -24,6 +24,11 @@ export const dashboardTemplateVariablesLogic = kea ({ index }), + incrementActiveVariableIndex: true, + possiblyIncrementActiveVariableIndex: true, + resetVariable: (variableId: string) => ({ variableId }), + goToNextUntouchedActiveVariableIndex: true, }), reducers({ variables: [ @@ -41,14 +46,64 @@ export const dashboardTemplateVariablesLogic = kea { if (v.name === variableName && filterGroup?.events?.length && filterGroup.events[0]) { - return { ...v, default: filterGroup.events[0] } + return { ...v, default: filterGroup.events[0], touched: true } } - return v + return { ...v } + }) + }, + resetVariable: (state, { variableId }) => { + return state.map((v: DashboardTemplateVariableType) => { + if (v.id === variableId) { + return { ...v, default: FALLBACK_EVENT, touched: false } + } + return { ...v } }) }, }, ], + activeVariableIndex: [ + 0, + { + setActiveVariableIndex: (_, { index }) => index, + incrementActiveVariableIndex: (state) => state + 1, + }, + ], }), + selectors(() => ({ + activeVariable: [ + (s) => [s.variables, s.activeVariableIndex], + (variables: DashboardTemplateVariableType[], activeVariableIndex: number) => { + return variables[activeVariableIndex] + }, + ], + allVariablesAreTouched: [ + (s) => [s.variables], + (variables: DashboardTemplateVariableType[]) => { + return variables.every((v) => v.touched) + }, + ], + })), + listeners(({ actions, props, values }) => ({ + possiblyIncrementActiveVariableIndex: () => { + if (props.variables.length > 0 && values.activeVariableIndex < props.variables.length - 1) { + actions.incrementActiveVariableIndex() + } + }, + goToNextUntouchedActiveVariableIndex: () => { + let nextIndex = values.variables.findIndex((v, i) => !v.touched && i > values.activeVariableIndex) + if (nextIndex !== -1) { + actions.setActiveVariableIndex(nextIndex) + return + } + if (nextIndex == -1) { + nextIndex = values.variables.findIndex((v) => !v.touched) + if (nextIndex == -1) { + nextIndex = values.activeVariableIndex + } + } + actions.setActiveVariableIndex(nextIndex) + }, + })), propsChanged(({ actions, props }, oldProps) => { if (props.variables !== oldProps.variables) { actions.setVariables(props.variables) diff --git a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts index 409d9a2c03983..9718eb83c76b9 100644 --- a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts +++ b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts @@ -109,7 +109,9 @@ export const databaseTableListLogic = kea([ } return Object.values(database.tables) - .filter((n): n is DatabaseSchemaDataWarehouseTable => n.type === 'data_warehouse') + .filter( + (n): n is DatabaseSchemaDataWarehouseTable => n.type === 'data_warehouse' || n.type == 'view' + ) .reduce((acc, cur) => { acc[cur.name] = database.tables[cur.name] as DatabaseSchemaDataWarehouseTable return acc diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx index f385f82c5b402..5f4b9fbc628aa 100644 --- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx @@ -567,6 +567,45 @@ export const SOURCE_DETAILS: Record = { ], caption: 'Select an existing Salesforce account to link to PostHog or create a new connection', }, + Vitally: { + name: 'Vitally', + fields: [ + { + name: 'secret_token', + label: 'Secret token', + type: 'text', + required: true, + placeholder: 'sk_live_...', + }, + { + type: 'select', + name: 'region', + label: 'Vitally region', + required: true, + defaultValue: 'EU', + options: [ + { + label: 'EU', + value: 'EU', + }, + { + label: 'US', + value: 'US', + fields: [ + { + name: 'subdomain', + label: 'Vitally subdomain', + type: 'text', + required: true, + placeholder: '', + }, + ], + }, + ], + }, + ], + caption: '', + }, } export const buildKeaFormDefaultFromSourceDetails = ( diff --git a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx index e983ce363bd95..4d1fc0f20b4cd 100644 --- a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx +++ b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx @@ -14,6 +14,7 @@ import IconSalesforce from 'public/services/salesforce.png' import IconSnowflake from 'public/services/snowflake.png' import IconMSSQL from 'public/services/sql-azure.png' import IconStripe from 'public/services/stripe.png' +import IconVitally from 'public/services/vitally.png' import IconZendesk from 'public/services/zendesk.png' import { urls } from 'scenes/urls' @@ -27,6 +28,7 @@ const StatusTagSetting = { Completed: 'success', Error: 'danger', Failed: 'danger', + 'Billing limits': 'danger', } export function DataWarehouseManagedSourcesTable(): JSX.Element { @@ -188,6 +190,7 @@ export function RenderDataWarehouseSourceIcon({ azure: Iconazure, Salesforce: IconSalesforce, MSSQL: IconMSSQL, + Vitally: IconVitally, }[type] return ( @@ -202,7 +205,7 @@ export function RenderDataWarehouseSourceIcon({ } > - {type} + {type}
diff --git a/frontend/src/scenes/data-warehouse/settings/source/Schemas.tsx b/frontend/src/scenes/data-warehouse/settings/source/Schemas.tsx index 2a3fa469ac658..56c5e12963dd8 100644 --- a/frontend/src/scenes/data-warehouse/settings/source/Schemas.tsx +++ b/frontend/src/scenes/data-warehouse/settings/source/Schemas.tsx @@ -47,6 +47,7 @@ const StatusTagSetting = { Completed: 'success', Error: 'danger', Failed: 'danger', + 'Billing limits': 'danger', } export const SchemaTable = ({ schemas, isLoading }: SchemaTableProps): JSX.Element => { diff --git a/frontend/src/scenes/data-warehouse/settings/source/Syncs.tsx b/frontend/src/scenes/data-warehouse/settings/source/Syncs.tsx index a86a41ec867ae..c283e32c7b54b 100644 --- a/frontend/src/scenes/data-warehouse/settings/source/Syncs.tsx +++ b/frontend/src/scenes/data-warehouse/settings/source/Syncs.tsx @@ -11,7 +11,7 @@ const StatusTagSetting: Record = { Running: 'primary', Completed: 'success', Failed: 'danger', - Cancelled: 'default', + 'Billing limits': 'danger', } interface SyncsProps { diff --git a/frontend/src/scenes/max/Max.scss b/frontend/src/scenes/max/Max.scss new file mode 100644 index 0000000000000..68671f6310558 --- /dev/null +++ b/frontend/src/scenes/max/Max.scss @@ -0,0 +1,3 @@ +.InsightVizDisplay { + flex: 1; +} diff --git a/frontend/src/scenes/max/Max.tsx b/frontend/src/scenes/max/Max.tsx new file mode 100644 index 0000000000000..594ca3344aed3 --- /dev/null +++ b/frontend/src/scenes/max/Max.tsx @@ -0,0 +1,157 @@ +import './Max.scss' + +import { LemonButton, LemonInput, Spinner } from '@posthog/lemon-ui' +import clsx from 'clsx' +import { useActions, useValues } from 'kea' +import { HedgehogBuddyStatic } from 'lib/components/HedgehogBuddy/HedgehogBuddyRender' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { uuid } from 'lib/utils' +import React, { useState } from 'react' +import { SceneExport } from 'scenes/sceneTypes' +import { userLogic } from 'scenes/userLogic' + +import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' +import { Query } from '~/queries/Query/Query' +import { NodeKind } from '~/queries/schema' + +import { maxLogic } from './maxLogic' + +export const scene: SceneExport = { + component: Max, + logic: maxLogic, +} + +function Message({ + role, + children, + className, +}: React.PropsWithChildren<{ role: 'user' | 'assistant'; className?: string }>): JSX.Element { + return ( +
+ {children} +
+ ) +} + +export function Max(): JSX.Element | null { + const { user } = useValues(userLogic) + const { featureFlags } = useValues(featureFlagLogic) + + const logic = maxLogic({ + sessionId: uuid(), + }) + const { thread, threadLoading } = useValues(logic) + const { askMax } = useActions(logic) + + const [question, setQuestion] = useState('') + + if (!featureFlags[FEATURE_FLAGS.ARTIFICIAL_HOG]) { + return null + } + + return ( + <> +
+ {thread.map((message, index) => { + if (message.role === 'user' || typeof message.content === 'string') { + return ( + + {message.content || No text} + + ) + } + + const query = { + kind: NodeKind.InsightVizNode, + source: message.content?.answer, + } + + return ( + + {message.content?.reasoning_steps && ( + +
    + {message.content.reasoning_steps.map((step, index) => ( +
  • {step}
  • + ))} +
+
+ )} + {message.status === 'completed' && message.content?.answer && ( + +
+ +
+ + Edit Query + +
+ )} +
+ ) + })} + {threadLoading && ( + +
+ Let me think… + +
+
+ )} +
+
+
+ +
+ setQuestion(value)} + placeholder="Hey, I'm Max! What would you like to know about your product?" + fullWidth + size="large" + autoFocus + onPressEnter={() => { + askMax(question) + setQuestion('') + }} + disabled={threadLoading} + suffix={ + { + askMax(question) + setQuestion('') + }} + disabledReason={threadLoading ? 'Thinking…' : undefined} + > + Ask Max + + } + /> +
+ + ) +} diff --git a/frontend/src/scenes/max/maxLogic.ts b/frontend/src/scenes/max/maxLogic.ts new file mode 100644 index 0000000000000..a0a863e98e0eb --- /dev/null +++ b/frontend/src/scenes/max/maxLogic.ts @@ -0,0 +1,164 @@ +import { actions, kea, listeners, path, props, reducers } from 'kea' +import api from 'lib/api' + +import { ExperimentalAITrendsQuery } from '~/queries/schema' + +import type { maxLogicType } from './maxLogicType' + +export interface MaxLogicProps { + sessionId: string +} + +interface TrendGenerationResult { + reasoning_steps?: string[] + answer?: ExperimentalAITrendsQuery +} + +export interface ThreadMessage { + role: 'user' | 'assistant' + content?: string | TrendGenerationResult + status?: 'loading' | 'completed' | 'error' +} + +export const maxLogic = kea([ + path(['scenes', 'max', 'maxLogic']), + props({} as MaxLogicProps), + actions({ + askMax: (prompt: string) => ({ prompt }), + setThreadLoaded: true, + addMessage: (message: ThreadMessage) => ({ message }), + replaceMessage: (index: number, message: ThreadMessage) => ({ index, message }), + setMessageStatus: (index: number, status: ThreadMessage['status']) => ({ index, status }), + }), + reducers({ + thread: [ + [] as ThreadMessage[], + { + addMessage: (state, { message }) => [...state, message], + replaceMessage: (state, { message, index }) => [ + ...state.slice(0, index), + message, + ...state.slice(index + 1), + ], + setMessageStatus: (state, { index, status }) => [ + ...state.slice(0, index), + { + ...state[index], + status, + }, + ...state.slice(index + 1), + ], + }, + ], + threadLoading: [ + false, + { + askMax: () => true, + setThreadLoaded: () => false, + }, + ], + }), + listeners(({ actions, values, props }) => ({ + askMax: async ({ prompt }) => { + actions.addMessage({ role: 'user', content: prompt }) + const newIndex = values.thread.length + + try { + const response = await api.chat({ + session_id: props.sessionId, + messages: values.thread.map(({ role, content }) => ({ + role, + content: typeof content === 'string' ? content : JSON.stringify(content), + })), + }) + const reader = response.body?.getReader() + const decoder = new TextDecoder() + + if (reader) { + let firstChunk = true + + while (true) { + const { done, value } = await reader.read() + if (done) { + actions.setMessageStatus(newIndex, 'completed') + break + } + + const text = decoder.decode(value) + const parsedResponse = parseResponse(text) + + if (firstChunk) { + firstChunk = false + + if (parsedResponse) { + actions.addMessage({ role: 'assistant', content: parsedResponse, status: 'loading' }) + } + } else if (parsedResponse) { + actions.replaceMessage(newIndex, { + role: 'assistant', + content: parsedResponse, + status: 'loading', + }) + } + } + } + } catch { + actions.setMessageStatus(values.thread.length - 1 === newIndex ? newIndex : newIndex - 1, 'error') + } + + actions.setThreadLoaded() + }, + })), +]) + +/** + * Parses the generation result from the API. Some generation chunks might be sent in batches. + * @param response + */ +function parseResponse(response: string, recursive = true): TrendGenerationResult | null { + try { + const parsed = JSON.parse(response) + return parsed as TrendGenerationResult + } catch { + if (!recursive) { + return null + } + + const results: [number, number][] = [] + let pair: [number, number] = [0, 0] + let seq = 0 + + for (let i = 0; i < response.length; i++) { + const char = response[i] + + if (char === '{') { + if (seq === 0) { + pair[0] = i + } + + seq += 1 + } + + if (char === '}') { + seq -= 1 + if (seq === 0) { + pair[1] = i + } + } + + if (seq === 0) { + results.push(pair) + pair = [0, 0] + } + } + + const lastPair = results.pop() + + if (lastPair) { + const [left, right] = lastPair + return parseResponse(response.slice(left, right + 1), false) + } + + return null + } +} diff --git a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx index 1f67ee2abd399..2bed8e8a07d2f 100644 --- a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx +++ b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateConfigureStep.tsx @@ -1,17 +1,17 @@ import { IconArrowRight } from '@posthog/icons' -import { LemonButton, LemonCard, LemonSkeleton } from '@posthog/lemon-ui' +import { LemonButton, LemonCard, LemonSkeleton, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { authorizedUrlListLogic, AuthorizedUrlListType } from 'lib/components/AuthorizedUrlList/authorizedUrlListLogic' import { IframedToolbarBrowser } from 'lib/components/IframedToolbarBrowser/IframedToolbarBrowser' import { iframedToolbarBrowserLogic } from 'lib/components/IframedToolbarBrowser/iframedToolbarBrowserLogic' import { useRef, useState } from 'react' -import { DashboardTemplateVariables } from 'scenes/dashboard/DashboardTemplateVariables' import { dashboardTemplateVariablesLogic } from 'scenes/dashboard/dashboardTemplateVariablesLogic' import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' import { OnboardingStepKey } from '../onboardingLogic' import { OnboardingStep } from '../OnboardingStep' import { sdksLogic } from '../sdks/sdksLogic' +import { DashboardTemplateVariables } from './DashboardTemplateVariables' import { onboardingTemplateConfigLogic } from './onboardingTemplateConfigLogic' export const OnboardingDashboardTemplateConfigureStep = ({ @@ -23,11 +23,14 @@ export const OnboardingDashboardTemplateConfigureStep = ({ const { activeDashboardTemplate } = useValues(onboardingTemplateConfigLogic) const { createDashboardFromTemplate } = useActions(newDashboardLogic) const { isLoading } = useValues(newDashboardLogic) - const { variables } = useValues(dashboardTemplateVariablesLogic) const { snippetHosts } = useValues(sdksLogic) const { addUrl } = useActions(authorizedUrlListLogic({ actionId: null, type: AuthorizedUrlListType.TOOLBAR_URLS })) const { setBrowserUrl } = useActions(iframedToolbarBrowserLogic({ iframeRef, clearBrowserUrlOnUnmount: true })) const { browserUrl } = useValues(iframedToolbarBrowserLogic({ iframeRef, clearBrowserUrlOnUnmount: true })) + const theDashboardTemplateVariablesLogic = dashboardTemplateVariablesLogic({ + variables: activeDashboardTemplate?.variables || [], + }) + const { variables, allVariablesAreTouched } = useValues(theDashboardTemplateVariablesLogic) const [isSubmitting, setIsSubmitting] = useState(false) @@ -105,7 +108,15 @@ export const OnboardingDashboardTemplateConfigureStep = ({ )}
- +

+ For each action below, select an element on your site that indicates when that action is + taken, or enter a custom event name that you'll send using{' '} + + posthog.capture() + {' '} + (no need to send it now) . +

+ Create dashboard diff --git a/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx new file mode 100644 index 0000000000000..e22f822eedcfe --- /dev/null +++ b/frontend/src/scenes/onboarding/productAnalyticsSteps/DashboardTemplateVariables.tsx @@ -0,0 +1,186 @@ +import { IconCheckCircle, IconInfo, IconTrash } from '@posthog/icons' +import { LemonBanner, LemonButton, LemonCollapse, LemonInput, LemonLabel } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { useEffect, useState } from 'react' +import { dashboardTemplateVariablesLogic } from 'scenes/dashboard/dashboardTemplateVariablesLogic' +import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' + +import { DashboardTemplateVariableType } from '~/types' + +function VariableSelector({ + variable, + hasSelectedSite, +}: { + variable: DashboardTemplateVariableType + hasSelectedSite: boolean +}): JSX.Element { + const { activeDashboardTemplate } = useValues(newDashboardLogic) + const theDashboardTemplateVariablesLogic = dashboardTemplateVariablesLogic({ + variables: activeDashboardTemplate?.variables || [], + }) + const { setVariable, resetVariable, goToNextUntouchedActiveVariableIndex, incrementActiveVariableIndex } = + useActions(theDashboardTemplateVariablesLogic) + const { allVariablesAreTouched, variables, activeVariableIndex } = useValues(theDashboardTemplateVariablesLogic) + const [customEventName, setCustomEventName] = useState(null) + const [showCustomEventField, setShowCustomEventField] = useState(false) + + const FALLBACK_EVENT = { + id: '$other_event', + math: 'dau', + type: 'events', + } + + return ( +
+
+

+ {variable.description} +

+
+ {variable.touched && !customEventName && ( +
+
+ {' '} + Selected +

.md-invite-button

+
+
+ } + type="tertiary" + size="small" + onClick={() => resetVariable(variable.id)} + /> +
+
+ )} + {showCustomEventField && ( +
+ Custom event name +

+ Set the name that you'll use for a custom event (eg. a backend event) instead of selecting an + event from your site. You can change this later if needed. +

+
+ { + if (v) { + setCustomEventName(v) + setVariable(variable.name, { + events: [{ id: v, math: 'dau', type: 'events' }], + }) + } else { + setCustomEventName(null) + resetVariable(variable.id) + } + }} + onBlur={() => { + if (customEventName) { + setVariable(variable.name, { + events: [{ id: customEventName, math: 'dau', type: 'events' }], + }) + } else { + resetVariable(variable.id) + setShowCustomEventField(false) + } + }} + /> +
+ } + type="tertiary" + size="small" + onClick={() => { + resetVariable(variable.id) + setCustomEventName(null) + setShowCustomEventField(false) + }} + /> +
+
+
+ )} + {!hasSelectedSite ? ( + Please select a site to continue. + ) : ( +
+ {variable.touched ? ( + <> + {!allVariablesAreTouched || + (allVariablesAreTouched && variables.length !== activeVariableIndex + 1) ? ( + + !allVariablesAreTouched + ? goToNextUntouchedActiveVariableIndex() + : variables.length !== activeVariableIndex + 1 + ? incrementActiveVariableIndex() + : null + } + > + Continue + + ) : null} + + ) : ( +
+ { + setShowCustomEventField(false) + setVariable(variable.name, { events: [FALLBACK_EVENT] }) + }} + > + Select from site + + setShowCustomEventField(true)}> + Use custom event + +
+ )} +
+ )} +
+ ) +} + +export function DashboardTemplateVariables({ hasSelectedSite }: { hasSelectedSite: boolean }): JSX.Element { + const { activeDashboardTemplate } = useValues(newDashboardLogic) + const theDashboardTemplateVariablesLogic = dashboardTemplateVariablesLogic({ + variables: activeDashboardTemplate?.variables || [], + }) + const { variables, activeVariableIndex } = useValues(theDashboardTemplateVariablesLogic) + const { setVariables, setActiveVariableIndex } = useActions(theDashboardTemplateVariablesLogic) + + // TODO: onboarding-dashboard-templates: this is a hack, I'm not sure why it's not set properly initially. + useEffect(() => { + setVariables(activeDashboardTemplate?.variables || []) + }, [activeDashboardTemplate]) + + return ( +
+ ({ + key: v.id, + header: ( +
+ {v.name} + {v.touched && } +
+ ), + content: , + className: 'p-4 bg-white', + onHeaderClick: () => { + setActiveVariableIndex(i) + }, + }))} + embedded + size="small" + /> +
+ ) +} diff --git a/frontend/src/scenes/persons/PersonDisplay.tsx b/frontend/src/scenes/persons/PersonDisplay.tsx index b37d51fba17de..ce0593f9eb6bb 100644 --- a/frontend/src/scenes/persons/PersonDisplay.tsx +++ b/frontend/src/scenes/persons/PersonDisplay.tsx @@ -64,6 +64,17 @@ export function PersonDisplay({ const notebookNode = useNotebookNode() + const handleClick = (e: React.MouseEvent): void => { + if (visible && href && !noLink && person?.properties) { + router.actions.push(href) + } else if (visible && !person?.properties) { + e.preventDefault() + } else { + setVisible(true) + } + return + } + let content = ( {withIcon && } @@ -72,26 +83,13 @@ export function PersonDisplay({ ) content = ( - { - if (visible && href && !noLink) { - router.actions.push(href) - } else { - setVisible(true) - } - } - : undefined - } - > - {noLink || !href ? ( + + {noLink || !href || (visible && !person?.properties) ? ( content ) : ( { + onClick={(e: React.MouseEvent): void => { if (!noPopover && !notebookNode) { e.preventDefault() return diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx index dacfed655a513..2c46f62d4bf4b 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx @@ -36,6 +36,7 @@ import { PipelineTab, PropertyFilterType, PropertyGroupFilter, + PropertyGroupFilterValue, } from '~/types' import { EmailTemplate } from './email-templater/emailTemplaterLogic' @@ -448,10 +449,14 @@ export const hogFunctionConfigurationLogic = kea [s.configuration], (configuration): TrendsQuery => { - const properties: PropertyGroupFilter = { + const seriesProperties: PropertyGroupFilterValue = { type: FilterLogicalOperator.Or, values: [], } + const properties: PropertyGroupFilter = { + type: FilterLogicalOperator.And, + values: [seriesProperties], + } for (const event of configuration.filters?.events ?? []) { const eventProperties: AnyPropertyFilter[] = [...(event.properties ?? [])] if (event.id) { @@ -466,7 +471,7 @@ export const hogFunctionConfigurationLogic = kea 0) { + const globalProperties: PropertyGroupFilterValue = { + type: FilterLogicalOperator.And, + values: [], + } + for (const property of configuration.filters?.properties ?? []) { + globalProperties.values.push(property as AnyPropertyFilter) + } + properties.values.push(globalProperties) + } return { kind: NodeKind.TrendsQuery, diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts index 5f519137e32b0..adddb012a10b3 100644 --- a/frontend/src/scenes/sceneTypes.ts +++ b/frontend/src/scenes/sceneTypes.ts @@ -48,6 +48,7 @@ export enum Scene { DataWarehouseRedirect = 'DataWarehouseRedirect', OrganizationCreateFirst = 'OrganizationCreate', ProjectHomepage = 'ProjectHomepage', + Max = 'Max', ProjectCreateFirst = 'ProjectCreate', SystemStatus = 'SystemStatus', AsyncMigrations = 'AsyncMigrations', diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 9b4019886e9b2..7581e424bd2fd 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -253,6 +253,12 @@ export const sceneConfigurations: Record = { projectBased: true, name: 'Homepage', }, + [Scene.Max]: { + projectBased: true, + name: 'Max', + layout: 'app-raw', + hideProjectNotice: true, // FIXME: Currently doesn't render well... + }, [Scene.IntegrationsRedirect]: { name: 'Integrations redirect', }, @@ -343,7 +349,7 @@ export const sceneConfigurations: Record = { }, [Scene.Notebook]: { projectBased: true, - hideProjectNotice: true, // Currently doesn't render well... + hideProjectNotice: true, // FIXME: Currently doesn't render well... name: 'Notebook', layout: 'app-raw', activityScope: ActivityScope.NOTEBOOK, @@ -517,6 +523,7 @@ export const routes: Record = { [urls.annotations()]: Scene.DataManagement, [urls.annotation(':id')]: Scene.DataManagement, [urls.projectHomepage()]: Scene.ProjectHomepage, + [urls.max()]: Scene.Max, [urls.projectCreateFirst()]: Scene.ProjectCreateFirst, [urls.organizationBilling()]: Scene.Billing, [urls.organizationCreateFirst()]: Scene.OrganizationCreateFirst, diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index a58bc3e739ead..b677c8fec155e 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -170,7 +170,7 @@ const MenuActions = (): JSX.Element => { useActions(sessionRecordingPlayerLogic) const { fetchSimilarRecordings } = useActions(sessionRecordingDataLogic(logicProps)) - const hasMobileExport = useFeatureFlag('SESSION_REPLAY_EXPORT_MOBILE_DATA') + const hasMobileExport = window.IMPERSONATED_SESSION || useFeatureFlag('SESSION_REPLAY_EXPORT_MOBILE_DATA') const hasSimilarRecordings = useFeatureFlag('REPLAY_SIMILAR_RECORDINGS') const onDelete = (): void => { diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 953a180473e94..d5035a871c430 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -1,6 +1,6 @@ import posthogEE from '@posthog/ee/exports' import { customEvent, EventType, eventWithTime } from '@rrweb/types' -import { captureException } from '@sentry/react' +import { captureException, captureMessage } from '@sentry/react' import { actions, afterMount, @@ -25,7 +25,8 @@ import { chainToElements } from 'lib/utils/elements-chain' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import posthog from 'posthog-js' -import { NodeKind } from '~/queries/schema' +import { HogQLQuery, NodeKind } from '~/queries/schema' +import { hogql } from '~/queries/utils' import { AnyPropertyFilter, EncodedRecordingSnapshot, @@ -486,23 +487,34 @@ export const sessionRecordingDataLogic = kea([ return values.sessionEventsData } - const { person } = values.sessionPlayerData + if (!event.id) { + captureMessage('event id not available for matching', { + tags: { feature: 'session-recording-load-full-event-data' }, + extra: { event }, + }) + return values.sessionEventsData + } let loadedProperties: Record = existingEvent.properties - // TODO: Move this to an optimised HogQL query when available... + try { - const res: any = await api.query({ - kind: 'EventsQuery', - select: ['properties', 'timestamp'], - orderBy: ['timestamp ASC'], - limit: 100, - personId: String(person?.id), - after: dayjs(event.timestamp).subtract(1000, 'ms').format(), - before: dayjs(event.timestamp).add(1000, 'ms').format(), - event: existingEvent.event, - }) + const query: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: hogql`SELECT properties, uuid + FROM events + WHERE timestamp > ${dayjs(event.timestamp).subtract(1000, 'ms')} + AND timestamp < ${dayjs(event.timestamp).add(1000, 'ms')} + AND event = ${event.event} + AND uuid = ${event.id}`, + } + const response = await api.query(query) + if (response.error) { + throw new Error(response.error) + } - const result = res.results.find((x: any) => x[1] === event.timestamp) + const result = response.results.find((x: any) => { + return x[1] === event.id + }) if (result) { loadedProperties = JSON.parse(result[0]) @@ -512,7 +524,9 @@ export const sessionRecordingDataLogic = kea([ } catch (e) { // NOTE: This is not ideal but should happen so rarely that it is tolerable. existingEvent.fullyLoaded = true - captureException(e) + captureException(e, { + tags: { feature: 'session-recording-load-full-event-data' }, + }) } // here we map the events list because we want the result to be a new instance to trigger downstream recalculation diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index ca6c98088952a..cc046586fc768 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -156,6 +156,7 @@ export const urls = { organizationCreateFirst: (): string => '/create-organization', projectCreateFirst: (): string => '/organization/create-project', projectHomepage: (): string => '/', + max: (): string => '/max', settings: (section: SettingSectionId | SettingLevelId = 'project', setting?: SettingId): string => combineUrl(`/settings/${section}`, undefined, setting).url, organizationCreationConfirm: (): string => '/organization/confirm-creation', diff --git a/frontend/src/types.ts b/frontend/src/types.ts index cbc91482a2bae..d1aaa92a6007f 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -1800,6 +1800,7 @@ export interface DashboardTemplateVariableType { type: 'event' default: Record required: boolean + touched?: boolean } export type DashboardLayoutSize = 'sm' | 'xs' @@ -3861,6 +3862,7 @@ export const externalDataSources = [ 'Zendesk', 'Snowflake', 'Salesforce', + 'Vitally', ] as const export type ExternalDataSourceType = (typeof externalDataSources)[number] @@ -3924,7 +3926,7 @@ export interface ExternalDataSourceSchema extends SimpleExternalDataSourceSchema export interface ExternalDataJob { id: string created_at: string - status: 'Running' | 'Failed' | 'Completed' | 'Cancelled' + status: 'Running' | 'Failed' | 'Completed' | 'Billing limits' schema: SimpleExternalDataSourceSchema rows_synced: number latest_error: string diff --git a/hogvm/__tests__/__snapshots__/arrays.hoge b/hogvm/__tests__/__snapshots__/arrays.hoge index 4afcc23ad537a..37cfb21b32bde 100644 --- a/hogvm/__tests__/__snapshots__/arrays.hoge +++ b/hogvm/__tests__/__snapshots__/arrays.hoge @@ -24,4 +24,7 @@ 35, 32, "------", 2, "print", 1, 35, 36, 2, 33, 0, 2, "has", 2, 2, "print", 1, 35, 36, 2, 33, 2, 2, "has", 2, 2, "print", 1, 35, 36, 2, 32, "banana", 2, "has", 2, 2, "print", 1, 35, 32, "banananas", 32, "banana", 2, "has", 2, 2, "print", 1, 35, 32, "banananas", 32, "foo", 2, "has", 2, 2, "print", 1, 35, 32, "1", 32, "2", 43, 2, 32, "1", 2, "has", -2, 2, "print", 1, 35, 35, 35, 35] +2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 1, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, +33, 2, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 3, 2, "indexOf", 2, 2, "print", 1, 35, 33, 1, +33, 2, 33, 3, 43, 3, 33, 4, 2, "indexOf", 2, 2, "print", 1, 35, 52, "lambda", 1, 0, 6, 33, 2, 36, 0, 13, 38, 53, 0, 33, +1, 33, 2, 33, 3, 33, 4, 33, 5, 43, 5, 2, "arrayCount", 2, 2, "print", 1, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/arrays.stdout b/hogvm/__tests__/__snapshots__/arrays.stdout index d582bb4c45219..84e5d1b160aff 100644 --- a/hogvm/__tests__/__snapshots__/arrays.stdout +++ b/hogvm/__tests__/__snapshots__/arrays.stdout @@ -46,3 +46,8 @@ false false false true +1 +2 +3 +0 +3 diff --git a/hogvm/__tests__/__snapshots__/strings.hoge b/hogvm/__tests__/__snapshots__/strings.hoge index 0c3118d3a043e..2143350e53df1 100644 --- a/hogvm/__tests__/__snapshots__/strings.hoge +++ b/hogvm/__tests__/__snapshots__/strings.hoge @@ -8,4 +8,9 @@ "print", 1, 35, 32, "banana", 32, "n", 2, "like", 2, 2, "print", 1, 35, 32, "banana", 32, "naan", 2, "like", 2, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "n", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "naan", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "notLike", 2, 2, -"print", 1, 35, 32, "banana", 32, "NO", 2, "notILike", 2, 2, "print", 1, 35] +"print", 1, 35, 32, "banana", 32, "NO", 2, "notILike", 2, 2, "print", 1, 35, 32, "abc", 32, "a", 2, "position", 2, 2, +"print", 1, 35, 32, "abc", 32, "b", 2, "position", 2, 2, "print", 1, 35, 32, "abc", 32, "c", 2, "position", 2, 2, +"print", 1, 35, 32, "abc", 32, "d", 2, "position", 2, 2, "print", 1, 35, 32, "AbC", 32, "a", 2, +"positionCaseInsensitive", 2, 2, "print", 1, 35, 32, "AbC", 32, "b", 2, "positionCaseInsensitive", 2, 2, "print", 1, 35, +32, "AbC", 32, "c", 2, "positionCaseInsensitive", 2, 2, "print", 1, 35, 32, "AbC", 32, "d", 2, +"positionCaseInsensitive", 2, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/strings.stdout b/hogvm/__tests__/__snapshots__/strings.stdout index 4c5bfc110b5b7..57808971dd3f3 100644 --- a/hogvm/__tests__/__snapshots__/strings.stdout +++ b/hogvm/__tests__/__snapshots__/strings.stdout @@ -16,3 +16,11 @@ true false true true +1 +2 +3 +0 +1 +2 +3 +0 diff --git a/hogvm/__tests__/arrays.hog b/hogvm/__tests__/arrays.hog index 6f934807566d9..67d07c451a1ff 100644 --- a/hogvm/__tests__/arrays.hog +++ b/hogvm/__tests__/arrays.hog @@ -64,3 +64,10 @@ print(has(arr, 'banana')) print(has('banananas', 'banana')) print(has('banananas', 'foo')) print(has(['1', '2'], '1')) + +print(indexOf([1,2,3], 1)) // 1 +print(indexOf([1,2,3], 2)) // 2 +print(indexOf([1,2,3], 3)) // 3 +print(indexOf([1,2,3], 4)) // 0 + +print(arrayCount(x -> x > 2, [1,2,3,4,5])) // 3 \ No newline at end of file diff --git a/hogvm/__tests__/strings.hog b/hogvm/__tests__/strings.hog index 322e08bdc51ef..e8d0eab7b8e93 100644 --- a/hogvm/__tests__/strings.hog +++ b/hogvm/__tests__/strings.hog @@ -16,3 +16,11 @@ print(ilike('banana', 'n')) print(ilike('banana', 'naan')) print(notLike('banana', 'N')) print(notILike('banana', 'NO')) +print(position('abc', 'a')) // 1 +print(position('abc', 'b')) // 2 +print(position('abc', 'c')) // 3 +print(position('abc', 'd')) // 0 +print(positionCaseInsensitive('AbC', 'a')) // 1 +print(positionCaseInsensitive('AbC', 'b')) // 2 +print(positionCaseInsensitive('AbC', 'c')) // 3 +print(positionCaseInsensitive('AbC', 'd')) // 0 diff --git a/hogvm/python/stl/__init__.py b/hogvm/python/stl/__init__.py index feaae8a899aeb..a20b39d8c00bf 100644 --- a/hogvm/python/stl/__init__.py +++ b/hogvm/python/stl/__init__.py @@ -386,6 +386,20 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] "replaceAll": STLFunction( fn=lambda args, team, stdout, timeout: args[0].replace(args[1], args[2]), minArgs=3, maxArgs=3 ), + "position": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].index(str(args[1])) + 1) + if isinstance(args[0], str) and str(args[1]) in args[0] + else 0, + minArgs=2, + maxArgs=2, + ), + "positionCaseInsensitive": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].lower().index(str(args[1]).lower()) + 1) + if isinstance(args[0], str) and str(args[1]).lower() in args[0].lower() + else 0, + minArgs=2, + maxArgs=2, + ), "trim": STLFunction(fn=trim, minArgs=1, maxArgs=2), "trimLeft": STLFunction(fn=trimLeft, minArgs=1, maxArgs=2), "trimRight": STLFunction(fn=trimRight, minArgs=1, maxArgs=2), @@ -398,6 +412,13 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] ), "keys": STLFunction(fn=keys, minArgs=1, maxArgs=1), "values": STLFunction(fn=values, minArgs=1, maxArgs=1), + "indexOf": STLFunction( + fn=lambda args, team, stdout, timeout: (args[0].index(args[1]) + 1) + if isinstance(args[0], list) and args[1] in args[0] + else 0, + minArgs=2, + maxArgs=2, + ), "arrayPushBack": STLFunction(fn=arrayPushBack, minArgs=2, maxArgs=2), "arrayPushFront": STLFunction(fn=arrayPushFront, minArgs=2, maxArgs=2), "arrayPopBack": STLFunction(fn=arrayPopBack, minArgs=1, maxArgs=1), diff --git a/hogvm/python/stl/bytecode.py b/hogvm/python/stl/bytecode.py index c84573e89e300..748128e075dce 100644 --- a/hogvm/python/stl/bytecode.py +++ b/hogvm/python/stl/bytecode.py @@ -1,6 +1,7 @@ # This file is generated by hogvm/stl/compile.py # fmt: off BYTECODE_STL: dict[str, tuple[list[str], list]] = { + "arrayCount": (["func", "arr"], [33, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 31, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 7, 33, 1, 36, 2, 6, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -38, 35, 35, 35, 35, 35, 36, 2, 38, 35]), "arrayExists": (["func", "arr"], [36, 1, 36, 2, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 36, 5, 36, 4, 16, 40, 26, 36, 3, 36, 4, 45, 37, 6, 36, 6, 36, 0, 54, 1, 40, 2, 29, 38, 36, 4, 33, 1, 6, 37, 4, 39, -33, 35, 35, 35, 35, 35, 30, 38]), "arrayFilter": (["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 33, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 9, 36, 2, 36, 7, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -40, 35, 35, 35, 35, 35, 36, 2, 38, 35]), "arrayMap": (["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 29, 36, 4, 36, 5, 45, 37, 7, 36, 2, 36, 7, 36, 0, 54, 1, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -36, 35, 35, 35, 35, 35, 36, 2, 38, 35]), diff --git a/hogvm/stl/src/arrayCount.hog b/hogvm/stl/src/arrayCount.hog new file mode 100644 index 0000000000000..da2cac89c27fa --- /dev/null +++ b/hogvm/stl/src/arrayCount.hog @@ -0,0 +1,9 @@ +fn arrayCount(func, arr) { + let count := 0 + for (let i in arr) { + if (func(i)) { + count := count + 1 + } + } + return count +} diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index 8a52d73906343..c02fa2faf7f9c 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.44", + "version": "1.0.46", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "source": "src/index.ts", diff --git a/hogvm/typescript/src/stl/bytecode.ts b/hogvm/typescript/src/stl/bytecode.ts index d966188adaeaa..50e417c8ccb90 100644 --- a/hogvm/typescript/src/stl/bytecode.ts +++ b/hogvm/typescript/src/stl/bytecode.ts @@ -1,5 +1,6 @@ // This file is generated by hogvm/stl/compile.py export const BYTECODE_STL: Record = { + "arrayCount": [["func", "arr"], [33, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 31, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 7, 33, 1, 36, 2, 6, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -38, 35, 35, 35, 35, 35, 36, 2, 38, 35]], "arrayExists": [["func", "arr"], [36, 1, 36, 2, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 36, 5, 36, 4, 16, 40, 26, 36, 3, 36, 4, 45, 37, 6, 36, 6, 36, 0, 54, 1, 40, 2, 29, 38, 36, 4, 33, 1, 6, 37, 4, 39, -33, 35, 35, 35, 35, 35, 30, 38]], "arrayFilter": [["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 33, 36, 4, 36, 5, 45, 37, 7, 36, 7, 36, 0, 54, 1, 40, 9, 36, 2, 36, 7, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -40, 35, 35, 35, 35, 35, 36, 2, 38, 35]], "arrayMap": [["func", "arr"], [43, 0, 36, 1, 36, 3, 2, "values", 1, 33, 1, 36, 4, 2, "length", 1, 31, 36, 6, 36, 5, 16, 40, 29, 36, 4, 36, 5, 45, 37, 7, 36, 2, 36, 7, 36, 0, 54, 1, 2, "arrayPushBack", 2, 37, 2, 36, 5, 33, 1, 6, 37, 5, 39, -36, 35, 35, 35, 35, 35, 36, 2, 38, 35]], diff --git a/hogvm/typescript/src/stl/stl.ts b/hogvm/typescript/src/stl/stl.ts index 1e8282730900b..bb2fcb5280a6b 100644 --- a/hogvm/typescript/src/stl/stl.ts +++ b/hogvm/typescript/src/stl/stl.ts @@ -294,6 +294,28 @@ export const STL: Record = { minArgs: 3, maxArgs: 3, }, + position: { + fn: ([str, elem]) => { + if (typeof str === 'string') { + return str.indexOf(String(elem)) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, + positionCaseInsensitive: { + fn: ([str, elem]) => { + if (typeof str === 'string') { + return str.toLowerCase().indexOf(String(elem).toLowerCase()) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, trim: { fn: ([str, char]) => { if (char === null || char === undefined) { @@ -418,6 +440,17 @@ export const STL: Record = { minArgs: 1, maxArgs: 1, }, + indexOf: { + fn: ([arrOrString, elem]) => { + if (Array.isArray(arrOrString)) { + return arrOrString.indexOf(elem) + 1 + } else { + return 0 + } + }, + minArgs: 2, + maxArgs: 2, + }, arrayPushBack: { fn: ([arr, item]) => { if (!Array.isArray(arr)) { diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 85b48ed0ed16f..54f01686c7aee 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0465_datawarehouse_stripe_account +posthog: 0466_alter_externaldatasource_source_type sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 5b6421e96761f..a3805439479ff 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -27,6 +27,30 @@ posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Un posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] posthog/utils.py:0: note: Possible overload variants: posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] @@ -87,8 +111,6 @@ posthog/batch_exports/service.py:0: error: Argument 4 to "backfill_export" has i posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] -posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "get_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] -posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "patch_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "type[User]", base class "BaseManager" defined the type as "type[_T]") [assignment] posthog/models/user.py:0: error: Cannot override class variable (previously declared on base class "AbstractBaseUser") with instance variable [misc] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment] @@ -234,16 +256,13 @@ posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr-defined] posthog/hogql/printer.py:0: error: Argument 2 to "_get_materialized_column" of "_Printer" has incompatible type "str | int"; expected "str" [arg-type] posthog/hogql/printer.py:0: error: Argument 1 to "_print_identifier" of "_Printer" has incompatible type "str | None"; expected "str" [arg-type] -posthog/user_permissions.py:0: error: Key expression in dictionary comprehension has incompatible type "UUID"; expected type "int" [misc] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "RestrictionLevel") [return-value] -posthog/tasks/warehouse.py:0: error: Argument 1 to "cancel_external_data_workflow" has incompatible type "str | None"; expected "str" [arg-type] posthog/tasks/update_survey_iteration.py:0: error: Incompatible types in assignment (expression has type "ForeignKey[Any, _ST] | Any", variable has type "FeatureFlag | Combinable | None") [assignment] posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "save" [union-attr] -posthog/tasks/update_survey_iteration.py:0: error: Incompatible type for "key" of "FeatureFlag" (got "UUID", expected "str | int | Combinable") [misc] posthog/permissions.py:0: error: Argument 2 to "feature_enabled" has incompatible type "str | None"; expected "str" [arg-type] posthog/models/event/util.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "datetime") [assignment] posthog/models/event/util.py:0: error: Module has no attribute "utc" [attr-defined] @@ -264,8 +283,6 @@ posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/api/shared.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] -ee/billing/quota_limiting.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] -ee/billing/quota_limiting.py:0: error: List comprehension has incompatible type List[int]; expected List[str] [misc] ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] @@ -311,7 +328,6 @@ posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has i posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type] posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type] -posthog/api/email_verification.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] @@ -354,18 +370,12 @@ ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unrea posthog/api/insight.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/api/dashboards/dashboard.py:0: error: Argument 1 to "dashboard_queryset" of "DashboardTile" has incompatible type "DashboardTile_RelatedManager"; expected "QuerySet[Any, Any]" [arg-type] posthog/api/person.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] -posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Cannot determine type of "group_properties_filter_group" [has-type] posthog/caching/insight_caching_state.py:0: error: Argument "params" to "execute" of "CursorWrapper" has incompatible type "list[object]"; expected "Sequence[bool | int | float | Decimal | str | <6 more items> | None] | Mapping[str, bool | int | float | Decimal | str | <6 more items> | None] | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] -posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/caching/insights_api.py:0: error: Unsupported operand types for >= ("datetime" and "None") [operator] posthog/caching/insights_api.py:0: note: Right operand is of type "datetime | None" @@ -373,8 +383,6 @@ posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[An posthog/api/feature_flag.py:0: error: Item "None" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr] posthog/api/feature_flag.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/api/feature_flag.py:0: error: Argument 2 to "get_all_feature_flags" has incompatible type "str | None"; expected "str" [arg-type] -posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] @@ -398,8 +406,6 @@ posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "SelectU posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "sample" [union-attr] posthog/hogql_queries/insights/funnels/funnels_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/survey.py:0: error: Incompatible types in assignment (expression has type "Any | Sequence[Any] | None", variable has type "Survey | None") [assignment] -posthog/api/survey.py:0: error: Argument "item_id" to "log_activity" has incompatible type "UUID"; expected "int | str | UUIDT | None" [arg-type] -posthog/api/survey.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/survey.py:0: error: Item "list[_ErrorFullDetails]" of "_FullDetailDict | list[_ErrorFullDetails] | dict[str, _ErrorFullDetails]" has no attribute "get" [union-attr] posthog/api/survey.py:0: error: Item "object" of "object | Any" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/web_analytics/web_overview.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] @@ -410,7 +416,6 @@ posthog/api/user.py:0: error: "User" has no attribute "social_auth" [attr-defin ee/clickhouse/queries/related_actors_query.py:0: error: Argument 1 to "_query_related_groups" of "RelatedActorsQuery" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] ee/api/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "License") [assignment] ee/api/test/base.py:0: error: "setUpTestData" undefined in superclass [misc] -posthog/warehouse/external_data_source/jobs.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Status") [assignment] posthog/warehouse/external_data_source/jobs.py:0: error: Incompatible type for lookup 'id': (got "UUID | None", expected "UUID | str") [misc] posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] @@ -431,9 +436,23 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: @@ -576,7 +595,6 @@ posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | Non posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/notebook.py:0: error: Incompatible types in assignment (expression has type "int", variable has type "str | None") [assignment] -posthog/api/exports.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] @@ -589,21 +607,6 @@ posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: d posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "dict[str, list[tuple[str, str]]]"; expected "list[Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] @@ -762,26 +765,6 @@ posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] -posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Argument "team_id" to "get_realtime_snapshots" has incompatible type "int"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "str | None" [type-var] posthog/session_recordings/session_recording_api.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] @@ -823,10 +806,8 @@ posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_creat posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] @@ -836,10 +817,7 @@ posthog/api/plugin.py:0: error: Incompatible type for "file_name" of "PluginAtta posthog/api/plugin.py:0: error: Incompatible type for "file_size" of "PluginAttachment" (got "int | None", expected "float | int | str | Combinable") [misc] posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] -posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] @@ -908,7 +886,6 @@ posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "s posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Dict entry 1 has incompatible type "str": "dict[str, Collection[str]]"; expected "str": "str" [dict-item] -posthog/api/test/batch_exports/test_update.py:0: error: Argument 3 to "get_batch_export_ok" has incompatible type "UUID"; expected "int" [arg-type] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] diff --git a/package.json b/package.json index 8a67dc54ba8c3..f6837fc45cf67 100644 --- a/package.json +++ b/package.json @@ -77,8 +77,8 @@ "@medv/finder": "^3.1.0", "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.6.0", - "@posthog/hogvm": "^1.0.44", - "@posthog/icons": "0.7.3", + "@posthog/hogvm": "^1.0.46", + "@posthog/icons": "0.8.1", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", "@rrweb/types": "2.0.0-alpha.13", diff --git a/plugin-server/package.json b/plugin-server/package.json index 75a137b2e1924..b6b9147b14bbf 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -53,7 +53,7 @@ "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", "@posthog/cyclotron": "file:../rust/cyclotron-node", - "@posthog/hogvm": "^1.0.44", + "@posthog/hogvm": "^1.0.46", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index 08f08046ad14c..dd40fb3bf24e3 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -47,8 +47,8 @@ dependencies: specifier: file:../rust/cyclotron-node version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.44 - version: 1.0.44(luxon@3.4.4) + specifier: ^1.0.46 + version: 1.0.46(luxon@3.4.4) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3116,8 +3116,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.44(luxon@3.4.4): - resolution: {integrity: sha512-Ss7gTPyvPyviNipVQOqnsCa66IMmMf+DEg7iX/vQMcWDuFwvHNbdze1iwFVoXCjLci+h8SW2rOMPB0S5A2jJXg==} + /@posthog/hogvm@1.0.46(luxon@3.4.4): + resolution: {integrity: sha512-oTXytxHImxcAUYvK+QVI5nRc3YnVSbn01rFlOrpeTYjW5oqYKMR8nYsjhHGLt5p0pnfeBhmQjBM5mJtVs5bA2Q==} peerDependencies: luxon: ^3.4.4 dependencies: diff --git a/plugin-server/src/cdp/cdp-consumers.ts b/plugin-server/src/cdp/cdp-consumers.ts index 984cd03a23cfd..8c4eec5e11951 100644 --- a/plugin-server/src/cdp/cdp-consumers.ts +++ b/plugin-server/src/cdp/cdp-consumers.ts @@ -44,6 +44,7 @@ import { createInvocation, gzipObject, prepareLogEntriesForClickhouse, + serializeInvocation, unGzipObject, } from './utils' @@ -217,12 +218,7 @@ abstract class CdpConsumerBase { // For now we just enqueue to kafka // For kafka style this is overkill to enqueue this way but it simplifies migrating to the new system - const serializedInvocation: HogFunctionInvocationSerialized = { - ...invocation, - hogFunctionId: invocation.hogFunction.id, - } - - delete (serializedInvocation as any).hogFunction + const serializedInvocation = serializeInvocation(invocation) const request: HogFunctionInvocationSerializedCompressed = { state: await gzipObject(serializedInvocation), @@ -233,7 +229,7 @@ abstract class CdpConsumerBase { this.messagesToProduce.push({ topic: KAFKA_CDP_FUNCTION_CALLBACKS, value: request, - key: invocation.hogFunction.id, + key: `${invocation.hogFunction.id}:${invocation.id}`, }) } @@ -241,20 +237,9 @@ abstract class CdpConsumerBase { await runInstrumentedFunction({ statsKey: `cdpConsumer.handleEachBatch.produceResults`, func: async () => { - console.log('Processing invocations results', results.length) - await Promise.all( results.map(async (result) => { // Tricky: We want to pull all the logs out as we don't want them to be passed around to any subsequent functions - if (result.finished || result.error) { - this.produceAppMetric({ - team_id: result.invocation.teamId, - app_source_id: result.invocation.hogFunction.id, - metric_kind: result.error ? 'failure' : 'success', - metric_name: result.error ? 'failed' : 'succeeded', - count: 1, - }) - } this.produceLogs(result) @@ -274,8 +259,16 @@ abstract class CdpConsumerBase { }) } - if (!result.finished) { - // If it isn't finished then we need to put it back on the queue + if (result.finished || result.error) { + this.produceAppMetric({ + team_id: result.invocation.teamId, + app_source_id: result.invocation.hogFunction.id, + metric_kind: result.error ? 'failure' : 'success', + metric_name: result.error ? 'failed' : 'succeeded', + count: 1, + }) + } else { + // Means there is follow up so we enqueue it await this.queueInvocation(result.invocation) } }) @@ -553,8 +546,15 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { // NOTE: In the future this service will never do fetching (unless we decide we want to do it in node at some point) // This is just "for now" to support the transition to cyclotron const fetchQueue = invocations.filter((item) => item.queue === 'fetch') - const fetchResults = await this.runManyWithHeartbeat(fetchQueue, (item) => - this.fetchExecutor.execute(item) + + const fetchResults = await Promise.all( + fetchQueue.map((item) => { + return runInstrumentedFunction({ + statsKey: `cdpConsumer.handleEachBatch.fetchExecutor.execute`, + func: () => this.fetchExecutor.execute(item), + timeout: 1000, + }) + }) ) const hogQueue = invocations.filter((item) => item.queue === 'hog') @@ -605,11 +605,11 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { invocationSerialized.queueParameters = item.asyncFunctionResponse } - const hogFunction = - invocationSerialized.hogFunction ?? - (invocationSerialized.hogFunctionId - ? this.hogFunctionManager.getHogFunction(invocationSerialized.hogFunctionId) - : undefined) + const hogFunctionId = + invocationSerialized.hogFunctionId ?? invocationSerialized.hogFunction?.id + const hogFunction = hogFunctionId + ? this.hogFunctionManager.getHogFunction(hogFunctionId) + : undefined if (!hogFunction) { status.error('Error finding hog function', { @@ -633,12 +633,6 @@ export class CdpFunctionCallbackConsumer extends CdpConsumerBase { }) ) - invocations.forEach((item) => { - if (!item.hogFunction?.id) { - console.error('No hog function id', item) - } - }) - return invocations }, }) diff --git a/plugin-server/src/cdp/fetch-executor.ts b/plugin-server/src/cdp/fetch-executor.ts index b2e99ef0a1836..89900215ec1fd 100644 --- a/plugin-server/src/cdp/fetch-executor.ts +++ b/plugin-server/src/cdp/fetch-executor.ts @@ -12,7 +12,7 @@ import { HogFunctionQueueParametersFetchRequest, HogFunctionQueueParametersFetchResponse, } from './types' -import { gzipObject } from './utils' +import { gzipObject, serializeInvocation } from './utils' export const BUCKETS_KB_WRITTEN = [0, 128, 512, 1024, 2024, 4096, 10240, Infinity] @@ -52,7 +52,7 @@ export class FetchExecutor { if (this.hogHookEnabledForTeams(invocation.teamId)) { // This is very temporary until we are commited to Cyclotron const payload: HogFunctionInvocationAsyncRequest = { - state: await gzipObject(invocation), + state: await gzipObject(serializeInvocation(invocation)), teamId: invocation.teamId, hogFunctionId: invocation.hogFunction.id, asyncFunctionRequest: { diff --git a/plugin-server/src/cdp/types.ts b/plugin-server/src/cdp/types.ts index 3c4eed47c6d41..9d277bc4edfa8 100644 --- a/plugin-server/src/cdp/types.ts +++ b/plugin-server/src/cdp/types.ts @@ -47,20 +47,6 @@ export interface HogFunctionFilters { bytecode?: HogBytecode } -// We have a "parsed" clickhous event type to make it easier to work with calls from kafka as well as those from the frontend -export interface ParsedClickhouseEvent { - uuid: string - event: string - team_id: number - distinct_id: string - person_id?: string - timestamp: string - created_at: string - properties: Record - person_created_at?: string - person_properties: Record -} - export type GroupType = { id: string // the "key" of the group type: string @@ -105,6 +91,10 @@ export type HogFunctionFilterGlobals = { event: string timestamp: string elements_chain: string + elements_chain_href: string + elements_chain_texts: string[] + elements_chain_ids: string[] + elements_chain_elements: string[] properties: Record person?: { diff --git a/plugin-server/src/cdp/utils.ts b/plugin-server/src/cdp/utils.ts index da1d64273f7aa..375baa91a94e3 100644 --- a/plugin-server/src/cdp/utils.ts +++ b/plugin-server/src/cdp/utils.ts @@ -1,6 +1,7 @@ // NOTE: PostIngestionEvent is our context event - it should never be sent directly to an output, but rather transformed into a lightweight schema import { DateTime } from 'luxon' +import RE2 from 're2' import { gunzip, gzip } from 'zlib' import { RawClickHouseEvent, Team, TimestampFormat } from '../types' @@ -12,9 +13,9 @@ import { HogFunctionInvocation, HogFunctionInvocationGlobals, HogFunctionInvocationResult, + HogFunctionInvocationSerialized, HogFunctionLogEntrySerialized, HogFunctionType, - ParsedClickhouseEvent, } from './types' export const PERSON_DEFAULT_DISPLAY_NAME_PROPERTIES = [ @@ -38,26 +39,6 @@ const getPersonDisplayName = (team: Team, distinctId: string, properties: Record return (customIdentifier || distinctId)?.trim() } -export function convertToParsedClickhouseEvent(event: RawClickHouseEvent): ParsedClickhouseEvent { - const properties = event.properties ? JSON.parse(event.properties) : {} - if (event.elements_chain) { - properties['$elements_chain'] = event.elements_chain - } - - return { - uuid: event.uuid, - event: event.event, - team_id: event.team_id, - distinct_id: event.distinct_id, - person_id: event.person_id, - timestamp: clickHouseTimestampToISO(event.timestamp), - created_at: clickHouseTimestampToISO(event.created_at), - properties: properties, - person_created_at: event.person_created_at ? clickHouseTimestampToISO(event.person_created_at) : undefined, - person_properties: event.person_properties ? JSON.parse(event.person_properties) : {}, - } -} - // that we can keep to as a contract export function convertToHogFunctionInvocationGlobals( event: RawClickHouseEvent, @@ -107,6 +88,46 @@ export function convertToHogFunctionInvocationGlobals( return context } +function getElementsChainHref(elementsChain: string): string { + // Adapted from SQL: extract(elements_chain, '(?::|\")href="(.*?)"'), + const hrefRegex = new RE2(/(?::|")href="(.*?)"/) + const hrefMatch = hrefRegex.exec(elementsChain) + return hrefMatch ? hrefMatch[1] : '' +} + +function getElementsChainTexts(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?::|\")text="(.*?)"')), + const textRegex = new RE2(/(?::|")text="(.*?)"/g) + const textMatches = new Set() + let textMatch + while ((textMatch = textRegex.exec(elementsChain)) !== null) { + textMatches.add(textMatch[1]) + } + return Array.from(textMatches) +} + +function getElementsChainIds(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?::|\")attr_id="(.*?)"')), + const idRegex = new RE2(/(?::|")attr_id="(.*?)"/g) + const idMatches = new Set() + let idMatch + while ((idMatch = idRegex.exec(elementsChain)) !== null) { + idMatches.add(idMatch[1]) + } + return Array.from(idMatches) +} + +function getElementsChainElements(elementsChain: string): string[] { + // Adapted from SQL: arrayDistinct(extractAll(elements_chain, '(?:^|;)(a|button|form|input|select|textarea|label)(?:\\.|$|:)')) + const elementRegex = new RE2(/(?:^|;)(a|button|form|input|select|textarea|label)(?:\.|$|:)/g) + const elementMatches = new Set() + let elementMatch + while ((elementMatch = elementRegex.exec(elementsChain)) !== null) { + elementMatches.add(elementMatch[1]) + } + return Array.from(elementMatches) +} + export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationGlobals): HogFunctionFilterGlobals { const groups: Record = {} @@ -116,14 +137,53 @@ export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationG } } - return { + const elementsChain = globals.event.properties['$elements_chain'] + const response = { event: globals.event.name, - elements_chain: globals.event.properties['$elements_chain'], + elements_chain: elementsChain, + elements_chain_href: '', + elements_chain_texts: [] as string[], + elements_chain_ids: [] as string[], + elements_chain_elements: [] as string[], timestamp: globals.event.timestamp, properties: globals.event.properties, person: globals.person ? { properties: globals.person.properties } : undefined, ...groups, + } satisfies HogFunctionFilterGlobals + + // The elements_chain_* fields are stored as materialized columns in ClickHouse. + // We use the same formula to calculate them here. + if (elementsChain) { + const cache: Record = {} + Object.defineProperties(response, { + elements_chain_href: { + get: () => { + cache.elements_chain_href ??= getElementsChainHref(elementsChain) + return cache.elements_chain_href + }, + }, + elements_chain_texts: { + get: () => { + cache.elements_chain_texts ??= getElementsChainTexts(elementsChain) + return cache.elements_chain_texts + }, + }, + elements_chain_ids: { + get: () => { + cache.elements_chain_ids ??= getElementsChainIds(elementsChain) + return cache.elements_chain_ids + }, + }, + elements_chain_elements: { + get: () => { + cache.elements_chain_elements ??= getElementsChainElements(elementsChain) + return cache.elements_chain_elements + }, + }, + }) } + + return response } export const convertToCaptureEvent = (event: HogFunctionCapturedEvent, team: Team): any => { @@ -224,3 +284,13 @@ export function createInvocation( timings: [], } } + +export function serializeInvocation(invocation: HogFunctionInvocation): HogFunctionInvocationSerialized { + const serializedInvocation: HogFunctionInvocationSerialized = { + ...invocation, + hogFunctionId: invocation.hogFunction.id, + } + + delete (serializedInvocation as any).hogFunction + return invocation +} diff --git a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts index 94b930625b41d..16b336d8f952c 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/utils.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/utils.ts @@ -324,21 +324,21 @@ export const parseKafkaBatch = async ( continue } - const session_key = `${parsedMessage.team_id}:${parsedMessage.session_id}` - const existingMessage = parsedSessions.get(session_key) + const sessionKey = `${parsedMessage.team_id}:${parsedMessage.session_id}` + const existingMessage = parsedSessions.get(sessionKey) + if (existingMessage === undefined) { // First message for this session key, store it and continue looping for more - parsedSessions.set(session_key, parsedMessage) + parsedSessions.set(sessionKey, parsedMessage) continue } for (const [windowId, events] of Object.entries(parsedMessage.eventsByWindowId)) { - if (existingMessage.eventsByWindowId[windowId]) { - existingMessage.eventsByWindowId[windowId].push(...events) - } else { - existingMessage.eventsByWindowId[windowId] = events - } + existingMessage.eventsByWindowId[windowId] = (existingMessage.eventsByWindowId[windowId] || []).concat( + events + ) } + existingMessage.metadata.rawSize += parsedMessage.metadata.rawSize // Update the events ranges diff --git a/plugin-server/tests/cdp/examples.ts b/plugin-server/tests/cdp/examples.ts index c662e31aae187..a3f696c32187c 100644 --- a/plugin-server/tests/cdp/examples.ts +++ b/plugin-server/tests/cdp/examples.ts @@ -417,6 +417,167 @@ export const HOG_FILTERS_EXAMPLES: Record> = { diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts index dc6350e0bb3d2..7740078fe6268 100644 --- a/plugin-server/tests/cdp/hog-executor.test.ts +++ b/plugin-server/tests/cdp/hog-executor.test.ts @@ -233,6 +233,156 @@ describe('Hog Executor', () => { expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) }) + + it('can use elements_chain_texts', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_text_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (buttonText: string) => + `span.LemonButton__content:attr__class="LemonButton__content"nth-child="2"nth-of-type="2"text="${buttonText}";span.LemonButton__chrome:attr__class="LemonButton__chrome"nth-child="1"nth-of-type="1";button.LemonButton.LemonButton--has-icon.LemonButton--secondary.LemonButton--status-default:attr__class="LemonButton LemonButton--secondary LemonButton--status-default LemonButton--has-icon"attr__type="button"nth-child="1"nth-of-type="1"text="${buttonText}";div.flex.gap-4.items-center:attr__class="flex gap-4 items-center"nth-child="1"nth-of-type="1";div.flex.flex-wrap.gap-4.justify-between:attr__class="flex gap-4 justify-between flex-wrap"nth-child="3"nth-of-type="3";div.flex.flex-1.flex-col.gap-4.h-full.relative.w-full:attr__class="relative w-full flex flex-col gap-4 flex-1 h-full"nth-child="1"nth-of-type="1";div.LemonTabs__content:attr__class="LemonTabs__content"nth-child="2"nth-of-type="1";div.LemonTabs.LemonTabs--medium:attr__class="LemonTabs LemonTabs--medium"attr__style="--lemon-tabs-slider-width: 48px; --lemon-tabs-slider-offset: 0px;"nth-child="1"nth-of-type="1";div.Navigation3000__scene:attr__class="Navigation3000__scene"nth-child="2"nth-of-type="2";main:nth-child="2"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('Not our text'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('Reload'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) + + it('can use elements_chain_href', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_href_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (link: string) => + `span.LemonButton__content:attr__class="LemonButton__content"attr__href="${link}"href="${link}"nth-child="2"nth-of-type="2"text="Activity";span.LemonButton__chrome:attr__class="LemonButton__chrome"nth-child="1"nth-of-type="1";a.LemonButton.LemonButton--full-width.LemonButton--has-icon.LemonButton--secondary.LemonButton--status-alt.Link.NavbarButton:attr__class="Link LemonButton LemonButton--secondary LemonButton--status-alt LemonButton--full-width LemonButton--has-icon NavbarButton"attr__data-attr="menu-item-activity"attr__href="${link}"href="${link}"nth-child="1"nth-of-type="1"text="Activity";li.w-full:attr__class="w-full"nth-child="6"nth-of-type="6";ul:nth-child="1"nth-of-type="1";div.Navbar3000__top.ScrollableShadows__inner:attr__class="ScrollableShadows__inner Navbar3000__top"nth-child="1"nth-of-type="1";div.ScrollableShadows.ScrollableShadows--vertical:attr__class="ScrollableShadows ScrollableShadows--vertical"nth-child="1"nth-of-type="1";div.Navbar3000__content:attr__class="Navbar3000__content"nth-child="1"nth-of-type="1";nav.Navbar3000:attr__class="Navbar3000"nth-child="1"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('/project/1/not-a-link'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('/project/1/activity/explore'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) + + it('can use elements_chain_tags and _ids', () => { + const fn = createHogFunction({ + ...HOG_EXAMPLES.simple_fetch, + ...HOG_INPUTS_EXAMPLES.simple_fetch, + ...HOG_FILTERS_EXAMPLES.elements_tag_and_id_filter, + }) + + mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) + const elementsChain = (id: string) => + `a.Link.font-semibold.text-text-3000.text-xl:attr__class="Link font-semibold text-xl text-text-3000"attr__href="/project/1/dashboard/1"attr__id="${id}"attr_id="${id}"href="/project/1/dashboard/1"nth-child="1"nth-of-type="1"text="My App Dashboard";div.ProjectHomepage__dashboardheader__title:attr__class="ProjectHomepage__dashboardheader__title"nth-child="1"nth-of-type="1";div.ProjectHomepage__dashboardheader:attr__class="ProjectHomepage__dashboardheader"nth-child="2"nth-of-type="2";div.ProjectHomepage:attr__class="ProjectHomepage"nth-child="1"nth-of-type="1";div.Navigation3000__scene:attr__class="Navigation3000__scene"nth-child="2"nth-of-type="2";main:nth-child="2"nth-of-type="1";div.Navigation3000:attr__class="Navigation3000"nth-child="1"nth-of-type="1";div:attr__id="root"attr_id="root"nth-child="3"nth-of-type="1";body.overflow-hidden:attr__class="overflow-hidden"attr__theme="light"nth-child="2"nth-of-type="1"` + + const hogGlobals1 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('notfound'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldntMatch = executor.findMatchingFunctions(hogGlobals1) + expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) + expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) + + const hogGlobals2 = createHogExecutionGlobals({ + groups: {}, + event: { + uuid: 'uuid', + name: '$autocapture', + distinct_id: 'distinct_id', + url: 'http://localhost:8000/events/1', + properties: { + $lib_version: '1.2.3', + $elements_chain: elementsChain('homelink'), + }, + timestamp: new Date().toISOString(), + }, + }) + + const resultsShouldMatch = executor.findMatchingFunctions(hogGlobals2) + expect(resultsShouldMatch.matchingFunctions).toHaveLength(1) + expect(resultsShouldMatch.nonMatchingFunctions).toHaveLength(0) + }) }) describe('async functions', () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 833dec3dea675..653bfb354e482 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -53,11 +53,11 @@ dependencies: specifier: 4.6.0 version: 4.6.0(monaco-editor@0.49.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/hogvm': - specifier: ^1.0.44 - version: 1.0.44(luxon@3.5.0) + specifier: ^1.0.46 + version: 1.0.46(luxon@3.5.0) '@posthog/icons': - specifier: 0.7.3 - version: 0.7.3(react-dom@18.2.0)(react@18.2.0) + specifier: 0.8.1 + version: 0.8.1(react-dom@18.2.0)(react@18.2.0) '@posthog/plugin-scaffold': specifier: ^1.4.4 version: 1.4.4 @@ -5414,16 +5414,16 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/hogvm@1.0.44(luxon@3.5.0): - resolution: {integrity: sha512-Ss7gTPyvPyviNipVQOqnsCa66IMmMf+DEg7iX/vQMcWDuFwvHNbdze1iwFVoXCjLci+h8SW2rOMPB0S5A2jJXg==} + /@posthog/hogvm@1.0.46(luxon@3.5.0): + resolution: {integrity: sha512-oTXytxHImxcAUYvK+QVI5nRc3YnVSbn01rFlOrpeTYjW5oqYKMR8nYsjhHGLt5p0pnfeBhmQjBM5mJtVs5bA2Q==} peerDependencies: luxon: ^3.4.4 dependencies: luxon: 3.5.0 dev: false - /@posthog/icons@0.7.3(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-dw8qLS6aSBGGIjo/d24/yuLOgkFAov4C7yOhomMfhce/RwS+u96XXghVolioRHppnAn48pgGnBQIXEELGVEvPA==} + /@posthog/icons@0.8.1(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-/ryXgFnWGzHmwijHE/0gQcEyAD/WkKuwf3NCMG4ibmGMpEqm/d12/+Ccuf3Zj2VZuc+0atGCHkHOiSNJ8dw97A==} peerDependencies: react: '>=16.14.0' react-dom: '>=16.14.0' @@ -15427,7 +15427,7 @@ packages: image-size: 0.5.5 make-dir: 2.1.0 mime: 1.6.0 - native-request: 1.1.2 + native-request: 1.1.0 source-map: 0.6.1 dev: true @@ -16163,8 +16163,8 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - /native-request@1.1.2: - resolution: {integrity: sha512-/etjwrK0J4Ebbcnt35VMWnfiUX/B04uwGJxyJInagxDqf2z5drSt/lsOvEMWGYunz1kaLZAFrV4NDAbOoDKvAQ==} + /native-request@1.1.0: + resolution: {integrity: sha512-uZ5rQaeRn15XmpgE0xoPL8YWqcX90VtCFglYwAgkvKM5e8fog+vePLAhHxuuv/gRkrQxIeh5U3q9sMNUrENqWw==} requiresBuild: true dev: true optional: true @@ -18298,7 +18298,7 @@ packages: react: '>=15' dependencies: react: 18.2.0 - unlayer-types: 1.65.0 + unlayer-types: 1.75.0 dev: false /react-error-boundary@3.1.4(react@18.2.0): @@ -20832,8 +20832,8 @@ packages: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} - /unlayer-types@1.65.0: - resolution: {integrity: sha512-fIeh/TtUhQ16A0oW3mHkcDekvhIbZbN+h0qVgBuVxjGnYME/Ma3saFRO4eKJll0YNyalvb9MdmSz0nyTgr/1/w==} + /unlayer-types@1.75.0: + resolution: {integrity: sha512-yB4TeimlZPKkAGIIE3Acs9e5YHjCpeGcoybq/E8w4lzNTI5EybI9Z9rwIBXhNK0miNarxEIO7rJUWB+Yeve80g==} dev: false /unpipe@1.0.0: diff --git a/posthog/api/email_verification.py b/posthog/api/email_verification.py index e22a298fe44a0..83c12d1dfe1e9 100644 --- a/posthog/api/email_verification.py +++ b/posthog/api/email_verification.py @@ -14,7 +14,7 @@ def is_email_verification_disabled(user: User) -> bool: # using disabled here so that the default state (if no flag exists) is that verification defaults to ON. return user.organization is not None and posthoganalytics.feature_enabled( VERIFICATION_DISABLED_FLAG, - user.organization.id, + str(user.organization.id), groups={"organization": str(user.organization.id)}, group_properties={"organization": {"id": str(user.organization.id)}}, ) diff --git a/posthog/api/query.py b/posthog/api/query.py index 8c71b1465017a..7e6e145f8b5e3 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -1,34 +1,37 @@ +import json import re import uuid -from django.http import JsonResponse +from django.http import JsonResponse, StreamingHttpResponse from drf_spectacular.utils import OpenApiResponse from pydantic import BaseModel -from rest_framework import status -from rest_framework import viewsets -from posthog.api.utils import action -from rest_framework.exceptions import ValidationError, NotAuthenticated +from rest_framework import status, viewsets +from rest_framework.exceptions import NotAuthenticated, ValidationError +from rest_framework.renderers import BaseRenderer from rest_framework.request import Request from rest_framework.response import Response from sentry_sdk import capture_exception, set_tag +from ee.hogai.generate_trends_agent import Conversation, GenerateTrendsAgent from posthog.api.documentation import extend_schema from posthog.api.mixins import PydanticModelMixin +from posthog.api.monitoring import Feature, monitor from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.services.query import process_query_model +from posthog.api.utils import action from posthog.clickhouse.client.execute_async import ( cancel_query, get_query_status, ) from posthog.clickhouse.query_tagging import tag_queries from posthog.errors import ExposedCHQueryError +from posthog.event_usage import report_user_action from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt from posthog.hogql.errors import ExposedHogQLError from posthog.hogql_queries.query_runner import ExecutionMode, execution_mode_from_refresh from posthog.models.user import User from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle, PersonalApiKeyRateThrottle from posthog.schema import QueryRequest, QueryResponseAlternative, QueryStatusResponse -from posthog.api.monitoring import monitor, Feature class QueryThrottle(PersonalApiKeyRateThrottle): @@ -36,6 +39,14 @@ class QueryThrottle(PersonalApiKeyRateThrottle): rate = "120/hour" +class ServerSentEventRenderer(BaseRenderer): + media_type = "text/event-stream" + format = "txt" + + def render(self, data, accepted_media_type=None, renderer_context=None): + return data + + class QueryViewSet(TeamAndOrgViewSetMixin, PydanticModelMixin, viewsets.ViewSet): # NOTE: Do we need to override the scopes for the "create" scope_object = "query" @@ -45,7 +56,7 @@ class QueryViewSet(TeamAndOrgViewSetMixin, PydanticModelMixin, viewsets.ViewSet) sharing_enabled_actions = ["retrieve"] def get_throttles(self): - if self.action == "draft_sql": + if self.action in ("draft_sql", "chat"): return [AIBurstRateThrottle(), AISustainedRateThrottle()] else: return [QueryThrottle()] @@ -144,6 +155,30 @@ def draft_sql(self, request: Request, *args, **kwargs) -> Response: raise ValidationError({"prompt": [str(e)]}, code="unclear") return Response({"sql": result}) + @action(detail=False, methods=["POST"], renderer_classes=[ServerSentEventRenderer]) + def chat(self, request: Request, *args, **kwargs): + assert request.user is not None + validated_body = Conversation.model_validate(request.data) + chain = GenerateTrendsAgent(self.team).bootstrap(validated_body.messages) + + def generate(): + last_message = None + for message in chain.stream({"question": validated_body.messages[0].content}): + if message: + last_message = message[0].model_dump_json() + yield last_message + + if not last_message: + yield json.dumps({"reasoning_steps": ["Schema validation failed"]}) + + report_user_action( + request.user, # type: ignore + "chat with ai", + {"prompt": validated_body.messages[-1].content, "response": last_message}, + ) + + return StreamingHttpResponse(generate(), content_type=ServerSentEventRenderer.media_type) + def handle_column_ch_error(self, error): if getattr(error, "message", None): match = re.search(r"There's no column.*in table", error.message) diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index 3703dc9ea6093..9b470bb936f43 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -80,6 +80,7 @@ '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', diff --git a/posthog/api/test/batch_exports/operations.py b/posthog/api/test/batch_exports/operations.py index 5ac814deab1e2..20f7d2761e2bf 100644 --- a/posthog/api/test/batch_exports/operations.py +++ b/posthog/api/test/batch_exports/operations.py @@ -1,5 +1,6 @@ from django.test.client import Client as TestClient from rest_framework import status +from posthog.models.utils import UUIDT def create_batch_export(client: TestClient, team_id: int, batch_export_data: dict | str): @@ -16,17 +17,17 @@ def create_batch_export_ok(client: TestClient, team_id: int, batch_export_data: return response.json() -def pause_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def pause_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.post(f"/api/projects/{team_id}/batch_exports/{batch_export_id}/pause") -def pause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def pause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = pause_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() -def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: int, backfill: bool = False): +def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT, backfill: bool = False): return client.post( f"/api/projects/{team_id}/batch_exports/{batch_export_id}/unpause", {"backfill": backfill}, @@ -34,17 +35,17 @@ def unpause_batch_export(client: TestClient, team_id: int, batch_export_id: int, ) -def unpause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int, backfill: bool = False): +def unpause_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT, backfill: bool = False): response = unpause_batch_export(client, team_id, batch_export_id, backfill) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() -def get_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def get_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.get(f"/api/projects/{team_id}/batch_exports/{batch_export_id}") -def get_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def get_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = get_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_200_OK, response.json() return response.json() @@ -63,11 +64,11 @@ def get_batch_export_runs_ok(client: TestClient, team_id: int, batch_export_id: return response.json() -def delete_batch_export(client: TestClient, team_id: int, batch_export_id: int): +def delete_batch_export(client: TestClient, team_id: int, batch_export_id: UUIDT): return client.delete(f"/api/projects/{team_id}/batch_exports/{batch_export_id}") -def delete_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): +def delete_batch_export_ok(client: TestClient, team_id: int, batch_export_id: UUIDT): response = delete_batch_export(client, team_id, batch_export_id) assert response.status_code == status.HTTP_204_NO_CONTENT, response return response diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index 78339bd3f30c2..e5a85099efd08 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -11,8 +11,10 @@ from posthog.models.utils import UUIDT from posthog.schema import ( CachedEventsQueryResponse, + DataWarehouseNode, EventPropertyFilter, EventsQuery, + FunnelsQuery, HogQLPropertyFilter, HogQLQuery, PersonPropertyFilter, @@ -731,6 +733,39 @@ def test_invalid_query_kind(self): api_response.content, ) + def test_funnel_query_with_data_warehouse_node_temporarily_raises(self): + # As of September 2024, funnels don't support data warehouse tables YET, so we want a helpful error message + api_response = self.client.post( + f"/api/projects/{self.team.id}/query/", + { + "query": FunnelsQuery( + series=[ + DataWarehouseNode( + id="xyz", + table_name="xyz", + id_field="id", + distinct_id_field="customer_email", + timestamp_field="created", + ), + DataWarehouseNode( + id="abc", + table_name="abc", + id_field="id", + distinct_id_field="customer_email", + timestamp_field="timestamp", + ), + ], + ).model_dump() + }, + ) + self.assertEqual(api_response.status_code, 400) + self.assertDictEqual( + api_response.json(), + self.validation_error_response( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ), + ) + def test_missing_query(self): api_response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": {}}) self.assertEqual(api_response.status_code, 400) diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index ca1b3fcea137c..f1f0243c08cd5 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -2,7 +2,7 @@ from .slack.template_slack import template as slack from .hubspot.template_hubspot import template as hubspot from .customerio.template_customerio import template as customerio, TemplateCustomerioMigrator -from .intercom.template_intercom import template as intercom +from .intercom.template_intercom import template as intercom, TemplateIntercomMigrator from .sendgrid.template_sendgrid import template as sendgrid from .clearbit.template_clearbit import template as clearbit from .posthog.template_posthog import template as posthog @@ -14,6 +14,8 @@ ) from .zapier.template_zapier import template as zapier from .mailgun.template_mailgun import template_mailgun_send_email as mailgun +from .loops.template_loops import template as loops +from .rudderstack.template_rudderstack import template as rudderstack HOG_FUNCTION_TEMPLATES = [ @@ -32,6 +34,8 @@ mailjet_update_contact_list, clearbit, mailgun, + loops, + rudderstack, ] @@ -39,6 +43,7 @@ HOG_FUNCTION_MIGRATORS = { TemplateCustomerioMigrator.plugin_url: TemplateCustomerioMigrator, + TemplateIntercomMigrator.plugin_url: TemplateIntercomMigrator, } __all__ = ["HOG_FUNCTION_TEMPLATES", "HOG_FUNCTION_TEMPLATES_BY_ID"] diff --git a/posthog/cdp/templates/intercom/template_intercom.py b/posthog/cdp/templates/intercom/template_intercom.py index a8b1bf3d1aed9..fd97f2605759f 100644 --- a/posthog/cdp/templates/intercom/template_intercom.py +++ b/posthog/cdp/templates/intercom/template_intercom.py @@ -1,4 +1,6 @@ -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate +from copy import deepcopy +import dataclasses +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionTemplateMigrator template: HogFunctionTemplate = HogFunctionTemplate( @@ -8,19 +10,15 @@ description="Send events and contact information to Intercom", icon_url="/static/services/intercom.png", hog=""" -let accessToken := inputs.access_token -let host := inputs.host -let email := inputs.email - -if (empty(email)) { +if (empty(inputs.email)) { print('`email` input is empty. Skipping.') return } -let res := fetch(f'https://{host}/events', { +let res := fetch(f'https://{inputs.host}/events', { 'method': 'POST', 'headers': { - 'Authorization': f'Bearer {accessToken}', + 'Authorization': f'Bearer {inputs.access_token}', 'Content-Type': 'application/json', 'Accept': 'application/json' }, @@ -89,3 +87,47 @@ "filter_test_accounts": True, }, ) + + +class TemplateIntercomMigrator(HogFunctionTemplateMigrator): + plugin_url = "https://github.com/PostHog/posthog-intercom-plugin" + + @classmethod + def migrate(cls, obj): + hf = deepcopy(dataclasses.asdict(template)) + + useEuropeanDataStorage = obj.config.get("useEuropeanDataStorage", "No") + intercomApiKey = obj.config.get("intercomApiKey", "") + triggeringEvents = obj.config.get("triggeringEvents", "$identify") + ignoredEmailDomains = obj.config.get("ignoredEmailDomains", "") + + hf["filters"] = {} + + events_to_filter = [event.strip() for event in triggeringEvents.split(",") if event.strip()] + domains_to_filter = [domain.strip() for domain in ignoredEmailDomains.split(",") if domain.strip()] + + if domains_to_filter: + hf["filters"]["properties"] = [ + { + "key": "email", + "value": domain, + "operator": "not_icontains", + "type": "person", + } + for domain in domains_to_filter + ] + + if events_to_filter: + hf["filters"]["events"] = [ + {"id": event, "name": event, "type": "events", "order": 0} for event in events_to_filter + ] + + hf["inputs"] = { + "access_token": {"value": intercomApiKey}, + "host": {"value": "api.eu.intercom.com"} + if useEuropeanDataStorage == "Yes" + else {"value": "api.intercom.io"}, + "email": {"value": "{person.properties.email}"}, + } + + return hf diff --git a/posthog/cdp/templates/intercom/test_template_intercom.py b/posthog/cdp/templates/intercom/test_template_intercom.py index 9d9ec4aedb39a..f76f48f1d5485 100644 --- a/posthog/cdp/templates/intercom/test_template_intercom.py +++ b/posthog/cdp/templates/intercom/test_template_intercom.py @@ -1,5 +1,8 @@ +from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest -from posthog.cdp.templates.intercom.template_intercom import template as template_intercom +from posthog.cdp.templates.intercom.template_intercom import template as template_intercom, TemplateIntercomMigrator +from posthog.models.plugin import PluginConfig +from posthog.test.base import BaseTest class TestTemplateIntercom(BaseHogFunctionTemplateTest): @@ -77,3 +80,83 @@ def test_logs_other_errors(self): }, ) ] + + +class TestTemplateMigration(BaseTest): + def get_plugin_config(self, config: dict): + _config = { + "intercomApiKey": "INTERCOM_API_KEY", + "triggeringEvents": "$identify", + "ignoredEmailDomains": "", + "useEuropeanDataStorage": "No", + } + + _config.update(config) + return PluginConfig(enabled=True, order=0, config=_config) + + def test_full_function(self): + obj = self.get_plugin_config({}) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "access_token": {"value": "INTERCOM_API_KEY"}, + "host": {"value": "api.intercom.io"}, + "email": {"value": "{person.properties.email}"}, + } + ) + assert template["filters"] == snapshot( + {"events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}]} + ) + + def test_eu_host(self): + obj = self.get_plugin_config( + { + "useEuropeanDataStorage": "Yes", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "access_token": {"value": "INTERCOM_API_KEY"}, + "host": {"value": "api.eu.intercom.com"}, + "email": {"value": "{person.properties.email}"}, + } + ) + + def test_triggering_events(self): + obj = self.get_plugin_config( + { + "triggeringEvents": "$identify,$pageview, custom event, ", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["filters"] == snapshot( + { + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, + {"id": "custom event", "name": "custom event", "type": "events", "order": 0}, + ] + } + ) + + def test_ignore_domains(self): + obj = self.get_plugin_config( + { + "ignoredEmailDomains": "test.com, other.com, ", + } + ) + + template = TemplateIntercomMigrator.migrate(obj) + assert template["filters"] == snapshot( + { + "properties": [ + {"key": "email", "value": "test.com", "operator": "not_icontains", "type": "person"}, + {"key": "email", "value": "other.com", "operator": "not_icontains", "type": "person"}, + ], + "events": [{"id": "$identify", "name": "$identify", "type": "events", "order": 0}], + } + ) diff --git a/posthog/cdp/templates/loops/template_loops.py b/posthog/cdp/templates/loops/template_loops.py new file mode 100644 index 0000000000000..01230f7727328 --- /dev/null +++ b/posthog/cdp/templates/loops/template_loops.py @@ -0,0 +1,49 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-loops", + name="Send events to Loops", + description="Passes PostHog events to Loops.so", + icon_url="/static/services/loops.png", + hog=""" +let apiKey := inputs.apiKey + +let payload := { + 'userId': event.distinct_id, + 'eventName': event.name == '$set' ? '$identify' : event.name, + 'email': person.properties.email +} +for (let key, value in person.properties) { + payload[key] := value +} +fetch('https://app.loops.so/api/v1/events/send', { + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {apiKey}', + }, + 'body': payload +}) +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Loops API Key", + "description": "Loops API Key", + "default": "", + "secret": True, + "required": True, + } + ], + filters={ + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$set", "name": "$set", "type": "events", "order": 1}, + ], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/loops/test_template_loops.py b/posthog/cdp/templates/loops/test_template_loops.py new file mode 100644 index 0000000000000..c6d48b5228b14 --- /dev/null +++ b/posthog/cdp/templates/loops/test_template_loops.py @@ -0,0 +1,61 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.loops.template_loops import template as template_loops + + +class TestTemplateLoops(BaseHogFunctionTemplateTest): + template = template_loops + + def _inputs(self, **kwargs): + inputs = {"apiKey": "1cac089e00a708680bdb1ed9f082d5bf"} + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"distinct_id": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", "name": "$pageview"}, + "person": { + "properties": {"email": "max@posthog.com", "name": "Max", "company": "PostHog"}, + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://app.loops.so/api/v1/events/send", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", + }, + "body": { + "userId": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", + "eventName": "$pageview", + "email": "max@posthog.com", + "name": "Max", + "company": "PostHog", + }, + }, + ) + ) + + def test_automatic_action_mapping(self): + for event_name, expected_action in [ + ("$identify", "$identify"), + ("$set", "$identify"), + ("$pageview", "$pageview"), + ("$create_alias", "$create_alias"), + ("$autocapture", "$autocapture"), + ("custom", "custom"), + ]: + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"name": event_name, "properties": {"url": "https://example.com", "$browser": "Chrome"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["eventName"] == expected_action diff --git a/posthog/cdp/templates/rudderstack/template_rudderstack.py b/posthog/cdp/templates/rudderstack/template_rudderstack.py new file mode 100644 index 0000000000000..efbfafbaeaa9f --- /dev/null +++ b/posthog/cdp/templates/rudderstack/template_rudderstack.py @@ -0,0 +1,126 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="alpha", + id="template-rudderstack", + name="Send data to RudderStack", + description="Send data to RudderStack", + icon_url="/static/services/rudderstack.png", + hog=""" +fn getPayload() { + let rudderPayload := { + 'context': { + 'app': { + 'name': 'PostHogPlugin', + }, + 'os': { + 'name': event.properties.$os + }, + 'browser': event.properties.$browser, + 'browser_version': event.properties.$browser_version, + 'page': { + 'host': event.properties.$host, + 'url': event.properties.$current_url, + 'path': event.properties.$pathname, + 'referrer': event.properties.$referrer, + 'initial_referrer': event.properties.$initial_referrer, + 'referring_domain': event.properties.$referring_domain, + 'initial_referring_domain': event.properties.$initial_referring_domain, + }, + 'screen': { + 'height': event.properties.$screen_height, + 'width': event.properties.$screen_width, + }, + 'library': { + 'name': event.properties.$lib, + 'version': event.properties.$lib_version, + }, + 'ip': event.$ip, + 'active_feature_flags': event.properties.$active_feature_flags, + 'token': event.properties.token + }, + 'channel': 's2s', + 'messageId': event.uuid, + 'originalTimestamp': event.timestamp, + 'userId': inputs.identifier, + 'anonymousId': event.properties.$anon_distinct_id ?? event.properties.$device_id ?? event.properties.distinct_id, + 'type': 'track', + 'properties': {}, + } + + if (event.name in ('$identify', '$set')) { + rudderPayload.type := 'identify' + rudderPayload.context.trait := event.properties.$set + rudderPayload.traits := event.properties.$set + } else if (event.name == '$create_alias') { + rudderPayload.type := 'alias' + rudderPayload.userId := event.properties.alias + rudderPayload.previousId := event.distinct_id + } else if (event.name == '$pageview') { + rudderPayload.type := 'page' + rudderPayload.name := event.properties.name + rudderPayload.properties.host := event.properties.$host + rudderPayload.properties.url := event.properties.$current_url + rudderPayload.properties.path := event.properties.$pathname + rudderPayload.properties.referrer := event.properties.$referrer + rudderPayload.properties.initial_referrer := event.properties.$initial_referrer + rudderPayload.properties.referring_domain := event.properties.$referring_domain + rudderPayload.properties.initial_referring_domain := event.properties.$initial_referring_domain + } else if (event.name == '$autocapture') { + rudderPayload.type := 'track' + rudderPayload.event := event.properties.$event_type + } else { + rudderPayload.type := 'track' + rudderPayload.event := event.name + } + + for (let key, value in event.properties) { + if (value != null and not key like '$%') { + rudderPayload.properties[key] := value + } + } + + return { + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Basic {base64Encode(f'{inputs.token}:')}', + }, + 'body': { + 'batch': [rudderPayload], + 'sentAt': now() + } + } +} + +fetch(f'{inputs.host}/v1/batch', getPayload()) +""".strip(), + inputs_schema=[ + { + "key": "host", + "type": "string", + "label": "Rudderstack host", + "description": "The destination of the Rudderstack instance", + "default": "https://hosted.rudderlabs.com", + "secret": False, + "required": True, + }, + { + "key": "token", + "type": "string", + "label": "Write API key", + "description": "RudderStack Source Writekey", + "secret": False, + "required": True, + }, + { + "key": "identifier", + "type": "string", + "label": "Identifier", + "default": "{person.uuid}", + "secret": False, + "required": True, + }, + ], +) diff --git a/posthog/cdp/templates/rudderstack/test_template_rudderstack.py b/posthog/cdp/templates/rudderstack/test_template_rudderstack.py new file mode 100644 index 0000000000000..61469f8d3f79d --- /dev/null +++ b/posthog/cdp/templates/rudderstack/test_template_rudderstack.py @@ -0,0 +1,107 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.rudderstack.template_rudderstack import template as template_rudderstack + + +class TestTemplateRudderstack(BaseHogFunctionTemplateTest): + template = template_rudderstack + + def _inputs(self, **kwargs): + inputs = { + "host": "https://hosted.rudderlabs.com", + "token": "asjdkfasdkjfaskfkjfhdsf", + "identifier": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0", + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "event": { + "uuid": "96a04bdc-6021-4120-a3e3-f1988f59ba5f", + "timestamp": "2024-08-29T13:40:22.713Z", + "distinct_id": "85bcd2e4-d10d-4a99-9dc8-43789b7226a1", + "name": "$pageview", + "properties": {"$current_url": "https://example.com", "$browser": "Chrome"}, + }, + "person": {"uuid": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0"}, + }, + ) + + res = self.get_mock_fetch_calls()[0] + res[1]["body"]["sentAt"]["dt"] = 1724946899.775266 + assert res == snapshot( + ( + "https://hosted.rudderlabs.com/v1/batch", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Basic YXNqZGtmYXNka2pmYXNrZmtqZmhkc2Y6", + }, + "body": { + "batch": [ + { + "context": { + "app": {"name": "PostHogPlugin"}, + "os": {"name": None}, + "browser": "Chrome", + "browser_version": None, + "page": { + "host": None, + "url": "https://example.com", + "path": None, + "referrer": None, + "initial_referrer": None, + "referring_domain": None, + "initial_referring_domain": None, + }, + "screen": {"height": None, "width": None}, + "library": {"name": None, "version": None}, + "ip": None, + "active_feature_flags": None, + "token": None, + }, + "channel": "s2s", + "messageId": "96a04bdc-6021-4120-a3e3-f1988f59ba5f", + "originalTimestamp": "2024-08-29T13:40:22.713Z", + "userId": "a08ff8e1-a5ee-49cc-99e9-564e455c33f0", + "anonymousId": None, + "type": "page", + "properties": { + "host": None, + "url": "https://example.com", + "path": None, + "referrer": None, + "initial_referrer": None, + "referring_domain": None, + "initial_referring_domain": None, + }, + "name": None, + } + ], + "sentAt": {"__hogDateTime__": True, "dt": 1724946899.775266, "zone": "UTC"}, + }, + }, + ) + ) + + def test_automatic_action_mapping(self): + for event_name, expected_action in [ + ("$identify", "identify"), + ("$set", "identify"), + ("$pageview", "page"), + ("$create_alias", "alias"), + ("$autocapture", "track"), + ("custom", "track"), + ]: + self.run_function( + inputs=self._inputs(), + globals={ + "event": {"name": event_name, "properties": {"url": "https://example.com", "$browser": "Chrome"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["batch"][0]["type"] == expected_action diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 557cbfd05c01e..5e64111632997 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -92,6 +92,8 @@ def get_breakdown_limit_for_context(limit_context: LimitContext) -> int: class HogQLQuerySettings(BaseModel): model_config = ConfigDict(extra="forbid") optimize_aggregation_in_order: Optional[bool] = None + date_time_output_format: Optional[str] = None + date_time_input_format: Optional[str] = None # Settings applied on top of all HogQL queries. diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 232e99e7a064e..586eed0c4a274 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -470,6 +470,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "timeStampSub": HogQLFunctionMeta("timeStampSub", 2, 2), "now": HogQLFunctionMeta("now64", 0, 1, tz_aware=True, case_sensitive=False), "nowInBlock": HogQLFunctionMeta("nowInBlock", 1, 1), + "rowNumberInBlock": HogQLFunctionMeta("rowNumberInBlock", 0, 0), "rowNumberInAllBlocks": HogQLFunctionMeta("rowNumberInAllBlocks", 0, 0), "today": HogQLFunctionMeta("today"), "yesterday": HogQLFunctionMeta("yesterday"), @@ -833,6 +834,14 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "leadInFrame": HogQLFunctionMeta("leadInFrame", 1, 1), # table functions "generateSeries": HogQLFunctionMeta("generate_series", 3, 3), + ## UDFS + "aggregate_funnel": HogQLFunctionMeta("aggregate_funnel", 6, 6, aggregate=False), + "aggregate_funnel_array": HogQLFunctionMeta("aggregate_funnel_array", 6, 6, aggregate=False), + "aggregate_funnel_cohort": HogQLFunctionMeta("aggregate_funnel_cohort", 6, 6, aggregate=False), + "aggregate_funnel_trends": HogQLFunctionMeta("aggregate_funnel_trends", 7, 7, aggregate=False), + "aggregate_funnel_array_trends": HogQLFunctionMeta("aggregate_funnel_array_trends", 7, 7, aggregate=False), + "aggregate_funnel_cohort_trends": HogQLFunctionMeta("aggregate_funnel_cohort_trends", 7, 7, aggregate=False), + "aggregate_funnel_test": HogQLFunctionMeta("aggregate_funnel_test", 6, 6, aggregate=False), } # Permitted HogQL aggregations HOGQL_AGGREGATIONS: dict[str, HogQLFunctionMeta] = { @@ -884,7 +893,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy # "topKWeighted": HogQLFunctionMeta("topKWeighted", 1, 1, aggregate=True), # "topKWeightedIf": HogQLFunctionMeta("topKWeightedIf", 2, 2, aggregate=True), "groupArray": HogQLFunctionMeta("groupArray", 1, 1, aggregate=True), - # "groupArrayIf": HogQLFunctionMeta("groupArrayIf", 2, 2, aggregate=True), + "groupArrayIf": HogQLFunctionMeta("groupArrayIf", 2, 2, aggregate=True), # "groupArrayLast": HogQLFunctionMeta("groupArrayLast", 1, 1, aggregate=True), # "groupArrayLastIf": HogQLFunctionMeta("groupArrayLastIf", 2, 2, aggregate=True), "groupUniqArray": HogQLFunctionMeta("groupUniqArray", 1, 1, aggregate=True), diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index bce1453e561f8..65db42700c958 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -550,23 +550,26 @@ def action_to_expr(action: Action) -> ast.Expr: if step.text is not None: value = step.text if step.text_matching == "regex": - match = ast.CompareOperationOp.Regex + exprs.append( + parse_expr( + "arrayExists(x -> x =~ {value}, elements_chain_texts)", + {"value": ast.Constant(value=value)}, + ) + ) elif step.text_matching == "contains": - match = ast.CompareOperationOp.ILike - value = f"%{value}%" + exprs.append( + parse_expr( + "arrayExists(x -> x ilike {value}, elements_chain_texts)", + {"value": ast.Constant(value=f"%{value}%")}, + ) + ) else: - match = ast.CompareOperationOp.Eq - - exprs.append( - parse_expr( - "arrayExists(x -> {match}, elements_chain_texts)", - { - "match": ast.CompareOperation( - op=match, left=ast.Field(chain=["x"]), right=ast.Constant(value=value) - ) - }, + exprs.append( + parse_expr( + "arrayExists(x -> x = {value}, elements_chain_texts)", + {"value": ast.Constant(value=value)}, + ) ) - ) if step.url: if step.url_matching == "exact": expr = parse_expr( diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index 02aa63b9cb856..fb2f2f230973d 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -649,6 +649,24 @@ def test_action_to_expr(self): self._parse_expr("event = '$autocapture' and arrayExists(x -> x =~ 'blabla', elements_chain_texts)"), ) + action7 = Action.objects.create( + team=self.team, + steps_json=[{"event": "$autocapture", "text": "blabla", "text_matching": "contains"}], + ) + self.assertEqual( + clear_locations(action_to_expr(action7)), + self._parse_expr("event = '$autocapture' and arrayExists(x -> x ilike '%blabla%', elements_chain_texts)"), + ) + + action8 = Action.objects.create( + team=self.team, + steps_json=[{"event": "$autocapture", "text": "blabla", "text_matching": "exact"}], + ) + self.assertEqual( + clear_locations(action_to_expr(action8)), + self._parse_expr("event = '$autocapture' and arrayExists(x -> x = 'blabla', elements_chain_texts)"), + ) + def test_cohort_filter_static(self): cohort = Cohort.objects.create( team=self.team, diff --git a/posthog/hogql_queries/insights/funnels/__init__.py b/posthog/hogql_queries/insights/funnels/__init__.py index 8a20d9784df8b..787cd01ec887d 100644 --- a/posthog/hogql_queries/insights/funnels/__init__.py +++ b/posthog/hogql_queries/insights/funnels/__init__.py @@ -1,5 +1,6 @@ from .base import FunnelBase from .funnel import Funnel +from .funnel_udf import FunnelUDF from .funnel_strict import FunnelStrict from .funnel_unordered import FunnelUnordered from .funnel_time_to_convert import FunnelTimeToConvert diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 477c205dd968c..d5757225246f4 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -33,6 +33,7 @@ FunnelExclusionActionsNode, FunnelTimeToConvertResults, FunnelVizType, + FunnelExclusionEventsNode, ) from posthog.types import EntityNode, ExclusionEntityNode @@ -299,7 +300,9 @@ def _serialize_step( action_id = step.event type = "events" elif isinstance(step, DataWarehouseNode): - raise NotImplementedError("DataWarehouseNode is not supported in funnels") + raise ValidationError( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ) else: action = Action.objects.get(pk=step.id) name = action.name @@ -408,6 +411,92 @@ def _get_inner_event_query( return funnel_events_query + # This version of the inner event query modifies how exclusions are returned to + # make them behave more like steps. It returns a boolean "exclusion_{0..n}" for each event + def _get_inner_event_query_for_udf( + self, + entities: list[EntityNode] | None = None, + entity_name="events", + skip_entity_filter=False, + skip_step_filter=False, + ) -> ast.SelectQuery: + query, funnelsFilter, breakdown, breakdownType, breakdownAttributionType = ( + self.context.query, + self.context.funnelsFilter, + self.context.breakdown, + self.context.breakdownType, + self.context.breakdownAttributionType, + ) + entities_to_use = entities or query.series + + extra_fields: list[str] = [] + + for prop in self.context.includeProperties: + extra_fields.append(prop) + + funnel_events_query = FunnelEventQuery( + context=self.context, + extra_fields=[*self._extra_event_fields, *extra_fields], + extra_event_properties=self._extra_event_properties, + ).to_query( + skip_entity_filter=skip_entity_filter, + ) + # funnel_events_query, params = FunnelEventQuery( + # extra_fields=[*self._extra_event_fields, *extra_fields], + # extra_event_properties=self._extra_event_properties, + # ).get_query(entities_to_use, entity_name, skip_entity_filter=skip_entity_filter) + + all_step_cols: list[ast.Expr] = [] + all_exclusions: list[list[FunnelExclusionEventsNode | FunnelExclusionActionsNode]] = [] + for index, entity in enumerate(entities_to_use): + step_cols = self._get_step_col(entity, index, entity_name) + all_step_cols.extend(step_cols) + all_exclusions.append([]) + + for excluded_entity in funnelsFilter.exclusions or []: + for i in range(excluded_entity.funnelFromStep + 1, excluded_entity.funnelToStep + 1): + all_exclusions[i].append(excluded_entity) + + for index, exclusions in enumerate(all_exclusions): + exclusion_col_expr = self._get_exclusions_col(exclusions, index, entity_name) + all_step_cols.append(exclusion_col_expr) + + breakdown_select_prop = self._get_breakdown_select_prop() + + if breakdown_select_prop: + all_step_cols.extend(breakdown_select_prop) + + funnel_events_query.select = [*funnel_events_query.select, *all_step_cols] + + if breakdown and breakdownType == BreakdownType.COHORT: + assert funnel_events_query.select_from is not None + funnel_events_query.select_from.next_join = self._get_cohort_breakdown_join() + + if not skip_step_filter: + assert isinstance(funnel_events_query.where, ast.Expr) + steps_conditions = self._get_steps_conditions_for_udf(all_exclusions, length=len(entities_to_use)) + funnel_events_query.where = ast.And(exprs=[funnel_events_query.where, steps_conditions]) + + if breakdown and breakdownAttributionType != BreakdownAttributionType.ALL_EVENTS: + # ALL_EVENTS attribution is the old default, which doesn't need the subquery + return self._add_breakdown_attribution_subquery(funnel_events_query) + + return funnel_events_query + + def _get_exclusions_col( + self, + exclusions: list[ExclusionEntityNode], + index: int, + entity_name: str, + ) -> ast.Expr: + if not exclusions: + return parse_expr(f"0 as exclusion_{index}") + + conditions = [self._build_step_query(exclusion, index, entity_name, "") for exclusion in exclusions] + return parse_expr( + f"if({{condition}}, 1, 0) as exclusion_{index}", placeholders={"condition": ast.Or(exprs=conditions)} + ) + def _get_cohort_breakdown_join(self) -> ast.JoinExpr: breakdown = self.context.breakdown @@ -545,12 +634,23 @@ def _get_steps_conditions(self, length: int) -> ast.Expr: return ast.Or(exprs=step_conditions) + def _get_steps_conditions_for_udf(self, exclusions, length: int) -> ast.Expr: + step_conditions: list[ast.Expr] = [] + + for index in range(length): + step_conditions.append(parse_expr(f"step_{index} = 1")) + if exclusions[index]: + step_conditions.append(parse_expr(f"exclusion_{index} = 1")) + + return ast.Or(exprs=step_conditions) + def _get_step_col( self, entity: EntityNode | ExclusionEntityNode, index: int, entity_name: str, step_prefix: str = "", + for_udf: bool = False, ) -> list[ast.Expr]: # step prefix is used to distinguish actual steps, and exclusion steps # without the prefix, we get the same parameter binding for both, which borks things up @@ -559,9 +659,10 @@ def _get_step_col( step_cols.append( parse_expr(f"if({{condition}}, 1, 0) as {step_prefix}step_{index}", placeholders={"condition": condition}) ) - step_cols.append( - parse_expr(f"if({step_prefix}step_{index} = 1, timestamp, null) as {step_prefix}latest_{index}") - ) + if not for_udf: + step_cols.append( + parse_expr(f"if({step_prefix}step_{index} = 1, timestamp, null) as {step_prefix}latest_{index}") + ) for field in self.extra_event_fields_and_properties: step_cols.append( @@ -584,7 +685,9 @@ def _build_step_query( action = Action.objects.get(pk=int(entity.id), team=self.context.team) event_expr = action_to_expr(action) elif isinstance(entity, DataWarehouseNode): - raise NotImplementedError("DataWarehouseNode is not supported in funnels") + raise ValidationError( + "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." + ) elif entity.event is None: # all events event_expr = ast.Constant(value=1) diff --git a/posthog/hogql_queries/insights/funnels/funnel_event_query.py b/posthog/hogql_queries/insights/funnels/funnel_event_query.py index 8acb0f7dea87b..c4cb9507534ef 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_event_query.py +++ b/posthog/hogql_queries/insights/funnels/funnel_event_query.py @@ -7,7 +7,13 @@ from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.action.action import Action from posthog.models.property.property import PropertyName -from posthog.schema import ActionsNode, EventsNode, FunnelExclusionActionsNode, FunnelExclusionEventsNode +from posthog.schema import ( + ActionsNode, + DataWarehouseNode, + EventsNode, + FunnelExclusionActionsNode, + FunnelExclusionEventsNode, +) from rest_framework.exceptions import ValidationError @@ -143,6 +149,8 @@ def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: events.update(action.get_step_events()) except Action.DoesNotExist: raise ValidationError(f"Action ID {node.id} does not exist!") + elif isinstance(node, DataWarehouseNode): + continue # Data warehouse nodes aren't based on events else: raise ValidationError("Series and exclusions must be compose of action and event nodes") diff --git a/posthog/hogql_queries/insights/funnels/funnel_query_context.py b/posthog/hogql_queries/insights/funnels/funnel_query_context.py index 8c280e272dbe3..ef6cf57af9563 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_query_context.py +++ b/posthog/hogql_queries/insights/funnels/funnel_query_context.py @@ -2,7 +2,6 @@ from posthog.hogql.constants import LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.query_context import QueryContext -from posthog.models.filters.mixins.utils import cached_property from posthog.models.property.util import box_value from posthog.models.team.team import Team from posthog.schema import ( @@ -39,6 +38,8 @@ class FunnelQueryContext(QueryContext): includeProperties: list[str] includeFinalMatchingEvents: Optional[bool] + max_steps_override: int | None = None + def __init__( self, query: FunnelsQuery, @@ -105,6 +106,8 @@ def __init__( self.actorsQuery = None - @cached_property + @property def max_steps(self) -> int: + if self.max_steps_override is not None: + return self.max_steps_override return len(self.query.series) diff --git a/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py b/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py index 38600dcc1fec6..5c28697b6edab 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py +++ b/posthog/hogql_queries/insights/funnels/funnel_time_to_convert.py @@ -16,7 +16,8 @@ def __init__( ): super().__init__(context) - self.funnel_order = get_funnel_order_class(self.context.funnelsFilter)(context=self.context) + # Haven't implemented calls for time_to_convert in UDF yet + self.funnel_order = get_funnel_order_class(self.context.funnelsFilter, use_udf=False)(context=self.context) def _format_results(self, results: list) -> FunnelTimeToConvertResults: return FunnelTimeToConvertResults( diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends.py b/posthog/hogql_queries/insights/funnels/funnel_trends.py index ad7a67d1207d6..8bdab281147a9 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends.py @@ -120,16 +120,15 @@ def _format_single_summary(self, summary): labels.append(timestamp.strftime(HUMAN_READABLE_TIMESTAMP_FORMAT)) return {"count": count, "data": data, "days": days, "labels": labels} - def get_query(self) -> ast.SelectQuery: - team, interval, query, now = self.context.team, self.context.interval, self.context.query, self.context.now - - date_range = QueryDateRange( - date_range=query.dateRange, - team=team, - interval=query.interval, - now=now, + def _date_range(self): + return QueryDateRange( + date_range=self.context.query.dateRange, + team=self.context.team, + interval=self.context.query.interval, + now=self.context.now, ) + def get_query(self) -> ast.SelectQuery: step_counts = self.get_step_counts_without_aggregation_query() # Expects multiple rows for same person, first event time, steps taken. @@ -138,12 +137,6 @@ def get_query(self) -> ast.SelectQuery: reached_to_step_count_condition, _, ) = self.get_steps_reached_conditions() - interval_func = get_interval_func_ch(interval.value) - - if date_range.date_from() is None: - _date_from = get_earliest_timestamp(team.pk) - else: - _date_from = date_range.date_from() breakdown_clause = self._get_breakdown_prop_expr() @@ -154,52 +147,12 @@ def get_query(self) -> ast.SelectQuery: *breakdown_clause, ] - formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) - formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) - date_from_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], - ) - date_to_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], - ) data_select_from = ast.JoinExpr(table=step_counts) data_group_by: list[ast.Expr] = [ast.Field(chain=["entrance_period_start"]), *breakdown_clause] data_query = ast.SelectQuery(select=data_select, select_from=data_select_from, group_by=data_group_by) - fill_select: list[ast.Expr] = [ - ast.Alias( - alias="entrance_period_start", - expr=ast.ArithmeticOperation( - left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), - op=ast.ArithmeticOperationOp.Add, - ), - ), - ] - fill_select_from = ast.JoinExpr( - table=ast.Field(chain=["numbers"]), - table_args=[ - ast.ArithmeticOperation( - left=ast.Call( - name="dateDiff", - args=[ - ast.Constant(value=interval.value), - get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), - ], - ), - right=ast.Constant(value=1), - op=ast.ArithmeticOperationOp.Add, - ) - ], - alias="period_offsets", - ) - fill_query = ast.SelectQuery( - select=fill_select, - select_from=fill_select_from, - ) + fill_query = self._get_fill_query() + fill_join = ast.JoinExpr( table=data_query, alias="data", @@ -254,7 +207,7 @@ def get_query(self) -> ast.SelectQuery: ) breakdown_limit = self.get_breakdown_limit() if breakdown_limit: - limit = min(breakdown_limit * len(date_range.all_values()), limit) + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) else: select = [ ast.Field(chain=["fill", "entrance_period_start"]), @@ -276,6 +229,63 @@ def get_query(self) -> ast.SelectQuery: limit=ast.Constant(value=limit), # increased limit (default 100) for hourly breakdown ) + # The fill query returns all the start_interval dates in the response + def _get_fill_query(self) -> ast.SelectQuery: + team, interval = self.context.team, self.context.interval + + date_range = self._date_range() + + if date_range.date_from() is None: + _date_from = get_earliest_timestamp(team.pk) + else: + _date_from = date_range.date_from() + + formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) + formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) + date_from_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], + ) + date_to_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], + ) + interval_func = get_interval_func_ch(interval.value) + + fill_select: list[ast.Expr] = [ + ast.Alias( + alias="entrance_period_start", + expr=ast.ArithmeticOperation( + left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), + op=ast.ArithmeticOperationOp.Add, + ), + ), + ] + fill_select_from = ast.JoinExpr( + table=ast.Field(chain=["numbers"]), + table_args=[ + ast.ArithmeticOperation( + left=ast.Call( + name="dateDiff", + args=[ + ast.Constant(value=interval.value), + get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), + ], + ), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Add, + ) + ], + alias="period_offsets", + ) + fill_query = ast.SelectQuery( + select=fill_select, + select_from=fill_select_from, + ) + return fill_query + def get_step_counts_without_aggregation_query( self, *, specific_entrance_period_start: Optional[datetime] = None ) -> ast.SelectQuery: diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py new file mode 100644 index 0000000000000..2f928e014daf4 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -0,0 +1,163 @@ +from typing import cast + +from posthog.hogql import ast +from posthog.hogql.constants import HogQLQuerySettings +from posthog.hogql.parser import parse_select +from posthog.hogql_queries.insights.funnels import FunnelTrends +from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str +from posthog.schema import BreakdownType, BreakdownAttributionType +from posthog.utils import DATERANGE_MAP + +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" +HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" + + +class FunnelTrendsUDF(FunnelTrends): + def get_step_counts_query(self): + max_steps = self.context.max_steps + return self._get_step_counts_query( + outer_select=[ + *self._get_matching_event_arrays(max_steps), + ], + inner_select=[ + *self._get_matching_events(max_steps), + ], + ) + + def conversion_window_limit(self) -> int: + return int( + self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() + ) + + def get_query(self) -> ast.SelectQuery: + # If they're asking for a "to_step" just truncate the funnel + funnelsFilter = self.context.funnelsFilter + max_steps = self.context.max_steps if funnelsFilter.funnelToStep is None else funnelsFilter.funnelToStep + 1 + self.context.max_steps_override = max_steps + + if self.context.funnelsFilter.funnelOrderType == "strict": + inner_event_query = self._get_inner_event_query_for_udf( + entity_name="events", skip_step_filter=True, skip_entity_filter=True + ) + else: + inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") + + default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" + + # stores the steps as an array of integers from 1 to max_steps + # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] + + # Each event is going to be a set of steps or it's going to be a set of exclusions. It can't be both. + steps = ",".join([f"{i + 1} * step_{i}" for i in range(self.context.max_steps)]) + + # this will error if they put in a bad exclusion + exclusions = "" + if getattr(self.context.funnelsFilter, "exclusions", None): + exclusions = "".join([f",-{i + 1} * exclusion_{i}" for i in range(1, self.context.max_steps)]) + + # Todo: Make this work for breakdowns + if self.context.breakdownType == BreakdownType.COHORT: + fn = "aggregate_funnel_cohort_trends" + breakdown_prop = ", prop" + elif self._query_has_array_breakdown(): + fn = "aggregate_funnel_array_trends" + breakdown_prop = "" + else: + fn = "aggregate_funnel_trends" + breakdown_prop = "" + + prop_selector = "prop" if self.context.breakdown else default_breakdown_selector + prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + + breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" + + from_step = funnelsFilter.funnelFromStep or 0 + + inner_select = cast( + ast.SelectQuery, + parse_select( + f""" + SELECT + arrayJoin({fn}( + {from_step}, + {max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')}, {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + toTimeZone(af_tuple.1, '{self.context.team.timezone}') as entrance_period_start, + af_tuple.2 as success_bool, + af_tuple.3 as breakdown + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + """, + {"inner_event_query": inner_event_query}, + ), + ) + # This is necessary so clickhouse doesn't truncate timezone information when passing datetimes to and from python + inner_select.settings = HogQLQuerySettings(date_time_output_format="iso", date_time_input_format="best_effort") + + conversion_rate_expr = ( + "if(reached_from_step_count > 0, round(reached_to_step_count / reached_from_step_count * 100, 2), 0)" + ) + + fill_query = self._get_fill_query() + + limit = 1_000 + if self.context.breakdown: + breakdown_limit = self.get_breakdown_limit() + if breakdown_limit: + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) + + s = parse_select( + f""" + SELECT + fill.entrance_period_start AS entrance_period_start, + sumIf(data.reached_from_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_from_step_count, + sumIf(data.reached_to_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.prop AS prop + FROM + ({{fill_query}}) as fill + CROSS JOIN (SELECT + entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + breakdown as prop + FROM + ({{inner_select}}) + GROUP BY entrance_period_start, breakdown) as data + GROUP BY + fill.entrance_period_start, + data.prop + ORDER BY + sum(reached_from_step_count) OVER (PARTITION BY data.prop) DESC, + data.prop DESC, + fill.entrance_period_start ASC + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + else: + s = parse_select( + f""" + SELECT + fill.entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + {conversion_rate_expr} as conversion_rate, + breakdown as prop + FROM + ({{inner_select}}) as data + RIGHT OUTER JOIN + ({{fill_query}}) as fill + ON data.entrance_period_start = fill.entrance_period_start + GROUP BY entrance_period_start, data.breakdown + ORDER BY entrance_period_start + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + return cast(ast.SelectQuery, s) diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py new file mode 100644 index 0000000000000..7ec91374dcdee --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py @@ -0,0 +1,184 @@ +from typing import cast + +from posthog.hogql import ast +from posthog.hogql.parser import parse_select +from posthog.hogql_queries.insights.funnels.base import FunnelBase +from posthog.schema import BreakdownType, BreakdownAttributionType +from posthog.utils import DATERANGE_MAP + +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" +HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" + + +class FunnelUDF(FunnelBase): + def get_step_counts_query(self): + max_steps = self.context.max_steps + return self._get_step_counts_query( + outer_select=[ + *self._get_matching_event_arrays(max_steps), + ], + inner_select=[ + *self._get_matching_events(max_steps), + ], + ) + + def conversion_window_limit(self) -> int: + return int( + self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() + ) + + def get_query(self) -> ast.SelectQuery: + if self.context.funnelsFilter.funnelOrderType == "strict": + inner_event_query = self._get_inner_event_query_for_udf( + entity_name="events", skip_step_filter=True, skip_entity_filter=True + ) + else: + inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") + + default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" + + # stores the steps as an array of integers from 1 to max_steps + # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] + + # Each event is going to be a set of steps or it's going to be a set of exclusions. It can't be both. + steps = ",".join([f"{i + 1} * step_{i}" for i in range(self.context.max_steps)]) + + # this will error if they put in a bad exclusion + exclusions = "" + if getattr(self.context.funnelsFilter, "exclusions", None): + exclusions = "".join([f",-{i + 1} * exclusion_{i}" for i in range(1, self.context.max_steps)]) + + if self.context.breakdownType == BreakdownType.COHORT: + fn = "aggregate_funnel_cohort" + breakdown_prop = ", prop" + elif self._query_has_array_breakdown(): + fn = "aggregate_funnel_array" + breakdown_prop = "" + else: + fn = "aggregate_funnel" + breakdown_prop = "" + + prop_selector = "prop" if self.context.breakdown else default_breakdown_selector + prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + + breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" + + # test + ''' + inner_select = parse_select( + f""" + SELECT + arrayJoin({fn}( + {self.context.max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + af_tuple.1 as af, + af_tuple.2 as breakdown, + af_tuple.3 as timings + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + HAVING af >= 0 + """, + {"inner_event_query": inner_event_query}, + ) + return inner_select + ''' + + inner_select = parse_select( + f""" + SELECT + arrayJoin({fn}( + {self.context.max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + af_tuple.1 as af, + af_tuple.2 as breakdown, + af_tuple.3 as timings + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + HAVING af >= 0 + """, + {"inner_event_query": inner_event_query}, + ) + + step_results = ",".join( + [f"countIf(ifNull(equals(af, {i}), 0)) AS step_{i+1}" for i in range(self.context.max_steps)] + ) + step_results2 = ",".join([f"sum(step_{i+1}) AS step_{i+1}" for i in range(self.context.max_steps)]) + + conversion_time_arrays = ",".join( + [ + f"groupArrayIf(timings[{i}], timings[{i}] > 0) AS step_{i}_conversion_times" + for i in range(1, self.context.max_steps) + ] + ) + + order_by = ",".join([f"step_{i+1} DESC" for i in reversed(range(self.context.max_steps))]) + + other_aggregation = "['Other']" if self._query_has_array_breakdown() else "'Other'" + + use_breakdown_limit = self.context.breakdown and self.context.breakdownType in [ + BreakdownType.PERSON, + BreakdownType.EVENT, + BreakdownType.GROUP, + ] + + final_prop = ( + f"if(row_number < {self.get_breakdown_limit()}, breakdown, {other_aggregation})" + if use_breakdown_limit + else "breakdown" + ) + + s = parse_select( + f""" + SELECT + {step_results}, + {conversion_time_arrays}, + rowNumberInBlock() as row_number, + {final_prop} as final_prop + FROM + {{inner_select}} + GROUP BY breakdown + ORDER BY {order_by} + """, + {"inner_select": inner_select}, + ) + + mean_conversion_times = ",".join( + [ + f"arrayMap(x -> if(isNaN(x), NULL, x), [avgArray(step_{i}_conversion_times)])[1] AS step_{i}_average_conversion_time" + for i in range(1, self.context.max_steps) + ] + ) + median_conversion_times = ",".join( + [ + f"arrayMap(x -> if(isNaN(x), NULL, x), [medianArray(step_{i}_conversion_times)])[1] AS step_{i}_median_conversion_time" + for i in range(1, self.context.max_steps) + ] + ) + + # Weird: unless you reference row_number in this outer block, it doesn't work correctly + s = parse_select( + f""" + SELECT + {step_results2}, + {mean_conversion_times}, + {median_conversion_times}, + groupArray(row_number) as row_number, + final_prop + FROM + {{s}} + GROUP BY final_prop + """, + {"s": s}, + ) + + return cast(ast.SelectQuery, s) diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index 08c64720a4f9d..ca10680f9f6a1 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -17,7 +17,9 @@ from posthog.hogql_queries.insights.funnels.funnel_query_context import FunnelQueryContext from posthog.hogql_queries.insights.funnels.funnel_time_to_convert import FunnelTimeToConvert from posthog.hogql_queries.insights.funnels.funnel_trends import FunnelTrends +from posthog.hogql_queries.insights.funnels.funnel_trends_udf import FunnelTrendsUDF from posthog.hogql_queries.insights.funnels.utils import get_funnel_actor_class, get_funnel_order_class +from posthog.hogql_queries.legacy_compatibility.feature_flag import insight_funnels_use_udf from posthog.hogql_queries.query_runner import QueryRunner from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models import Team @@ -28,6 +30,7 @@ FunnelsQuery, FunnelsQueryResponse, HogQLQueryModifiers, + StepOrderValue, ) @@ -102,18 +105,28 @@ def calculate(self): if response.timings is not None: timings.extend(response.timings) - return FunnelsQueryResponse(results=results, timings=timings, hogql=hogql, modifiers=self.modifiers) + return FunnelsQueryResponse( + isUdf=self._use_udf, results=results, timings=timings, hogql=hogql, modifiers=self.modifiers + ) + + @cached_property + def _use_udf(self): + return self.context.funnelsFilter.useUdf or insight_funnels_use_udf(self.team) @cached_property def funnel_order_class(self): - return get_funnel_order_class(self.context.funnelsFilter)(context=self.context) + return get_funnel_order_class(self.context.funnelsFilter, use_udf=self._use_udf)(context=self.context) @cached_property def funnel_class(self): funnelVizType = self.context.funnelsFilter.funnelVizType if funnelVizType == FunnelVizType.TRENDS: - return FunnelTrends(context=self.context, **self.kwargs) + return ( + FunnelTrendsUDF(context=self.context, **self.kwargs) + if self._use_udf and self.context.funnelsFilter.funnelOrderType != StepOrderValue.UNORDERED + else FunnelTrends(context=self.context, **self.kwargs) + ) elif funnelVizType == FunnelVizType.TIME_TO_CONVERT: return FunnelTimeToConvert(context=self.context) else: diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr deleted file mode 100644 index 142118fb7056f..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_breakdowns_by_current_url.ambr +++ /dev/null @@ -1,213 +0,0 @@ -# serializer version: 1 -# name: TestBreakdownsByCurrentURL.test_breakdown_by_current_url - ''' - SELECT [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS value, - count(*) AS count - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), in(e.event, tuple('terminate funnel', 'watched movie')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id)) - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_current_url.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - if(has([['https://example.com/home'], ['https://example.com'], ['/']], prop), prop, ['Other']) AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) - GROUP BY prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_pathname - ''' - SELECT [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS value, - count(*) AS count - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), in(e.event, tuple('terminate funnel', 'watched movie')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id)) - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- -# name: TestBreakdownsByCurrentURL.test_breakdown_by_pathname.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - if(has([['/'], ['/home']], prop), prop, ['Other']) AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', ''), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) - GROUP BY prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index 4df40edbda0d5..053aabd0a64e4 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -1,4 +1,1131 @@ # serializer version: 1 +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen ''' SELECT sum(step_1) AS step_1, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr new file mode 100644 index 0000000000000..837c953bc93f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -0,0 +1,2066 @@ +# serializer version: 1 +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr index 3e8b20aa03221..db3f45444b247 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr @@ -1,4 +1,366 @@ # serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- # name: TestFunnelTrends.test_timezones_trends ''' SELECT fill.entrance_period_start AS entrance_period_start, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr new file mode 100644 index 0000000000000..0c52cf349a36b --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -0,0 +1,602 @@ +# serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'US/Pacific') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfWeek(timestamp, 0), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr new file mode 100644 index 0000000000000..e8487d4a42a9e --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -0,0 +1,1887 @@ +# serializer version: 1 +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 15, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds.1 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2 + ''' + + SELECT DISTINCT person_id + FROM events + WHERE team_id = 2 + AND distinct_id = 'stopped_after_pay' + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2.1 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)button(\\.|$|;|:)'), arrayExists(x -> ifNull(equals(x, 'Pay $10'), 0), e.elements_chain_texts)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)a(\\.|$|;|:)'), equals(e.elements_chain_href, '/movie')), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2011-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2012-01-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$autocapture', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = NULL + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter.1 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = 0 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter.2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.1 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [1, 2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.2 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_property_groups.3 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + INNER JOIN + (SELECT person.id AS id, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age + FROM person + WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_static_cohort_step_filter + ''' + + SELECT count(DISTINCT person_id) + FROM person_static_cohort + WHERE team_id = 2 + AND cohort_id = 2 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_with_static_cohort_step_filter.1 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 2)))), 0)), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_timezones + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'paid'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index 77033100a5009..575f76992ebf3 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -1,6 +1,7 @@ import uuid from datetime import datetime from typing import cast +from unittest.mock import patch, Mock from django.test import override_settings from freezegun import freeze_time @@ -68,6 +69,7 @@ def _create_action(**kwargs): return action +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelBreakdown( ClickhouseTestMixin, funnel_breakdown_test_factory( # type: ignore @@ -81,6 +83,7 @@ class TestFunnelBreakdown( pass +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelGroupBreakdown( ClickhouseTestMixin, funnel_breakdown_group_test_factory( # type: ignore @@ -91,6 +94,7 @@ class TestFunnelGroupBreakdown( pass +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelConversionTime( ClickhouseTestMixin, funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelActors), # type: ignore @@ -2460,10 +2464,14 @@ def test_advanced_funnel_exclusions_between_steps(self): query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results - self.assertEqual(results[0]["name"], "user signed up") - self.assertEqual(results[0]["count"], 0) - - self.assertEqual(results[4]["count"], 0) + # There should be no events. UDF funnels returns an empty array and says "no events" + # Old style funnels returns a count of 0 + try: + self.assertEqual([], results) + except AssertionError: + self.assertEqual(results[0]["name"], "user signed up") + self.assertEqual(results[0]["count"], 0) + self.assertEqual(results[4]["count"], 0) self.assertCountEqual(self._get_actor_ids_at_step(filters, 1), []) @@ -3290,7 +3298,7 @@ def test_timezones(self): # event _create_person(distinct_ids=["user_1"], team_id=self.team.pk) - #  this event shouldn't appear as in US/Pacific this would be the previous day + # this event shouldn't appear as in US/Pacific this would be the previous day _create_event( team=self.team, event="user signed up", @@ -3300,9 +3308,13 @@ def test_timezones(self): query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results - - self.assertEqual(results[0]["name"], "user signed up") - self.assertEqual(results[0]["count"], 0) + # There should be no events. UDF funnels returns an empty array and says "no events" + # Old style funnels returns a count of 0 + try: + self.assertEqual([], results) + except AssertionError: + self.assertEqual(results[0]["name"], "user signed up") + self.assertEqual(results[0]["count"], 0) def test_funnel_with_sampling(self): action_play_movie = Action.objects.create( @@ -4095,6 +4107,7 @@ def test_first_time_for_user_funnel_multiple_ids(self): return TestGetFunnel +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFOSSFunnel(funnel_test_factory(Funnel, _create_event, _create_person)): # type: ignore maxDiff = None diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py index e146547285fae..7be35d81324d1 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py @@ -1,5 +1,6 @@ from datetime import datetime from typing import cast +from unittest.mock import Mock, patch from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner @@ -38,7 +39,7 @@ def _create_action(**kwargs): return action -class TestFunnelStrictStepsBreakdown( +class BaseTestFunnelStrictStepsBreakdown( ClickhouseTestMixin, funnel_breakdown_test_factory( # type: ignore FunnelOrderType.STRICT, @@ -178,7 +179,7 @@ def test_strict_breakdown_events_with_multiple_properties(self): self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [people["person2"].uuid]) -class TestStrictFunnelGroupBreakdown( +class BaseTestStrictFunnelGroupBreakdown( ClickhouseTestMixin, funnel_breakdown_group_test_factory( # type: ignore FunnelOrderType.STRICT, @@ -188,7 +189,7 @@ class TestStrictFunnelGroupBreakdown( pass -class TestFunnelStrictStepsConversionTime( +class BaseTestFunnelStrictStepsConversionTime( ClickhouseTestMixin, funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelStrictActors), # type: ignore ): @@ -196,7 +197,7 @@ class TestFunnelStrictStepsConversionTime( pass -class TestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): @@ -624,3 +625,23 @@ def test_basic_strict_funnel_conversion_times(self): self._get_actor_ids_at_step(filters, 3), [person3_stopped_after_insight_view.uuid], ) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py new file mode 100644 index 0000000000000..178e329d3748e --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py @@ -0,0 +1,28 @@ +from unittest.mock import Mock, patch + +from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import ( + BaseTestFunnelStrictStepsBreakdown, + BaseTestFunnelStrictSteps, + BaseTestStrictFunnelGroupBreakdown, + BaseTestFunnelStrictStepsConversionTime, +) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 7b86e305ca89e..55a1f8660d0ca 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -1,5 +1,6 @@ from datetime import date, datetime, timedelta from typing import cast +from unittest.mock import patch, Mock from zoneinfo import ZoneInfo from freezegun.api import freeze_time @@ -23,7 +24,7 @@ FORMAT_TIME_DAY_END = "%Y-%m-%d 23:59:59" -class TestFunnelTrends(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelTrends(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actors_at_step(self, filter, entrance_period_start, drop_off): @@ -130,43 +131,43 @@ def test_only_one_user_reached_one_step(self): [ { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 1, "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, @@ -1611,3 +1612,8 @@ def test_parses_breakdown_correctly(self): results = FunnelsQueryRunner(query=query, team=self.team).calculate().results self.assertEqual(len(results), 1) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelTrends(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py new file mode 100644 index 0000000000000..6965222b749f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py @@ -0,0 +1,8 @@ +from unittest.mock import patch, Mock + +from posthog.hogql_queries.insights.funnels.test.test_funnel_trends import BaseTestFunnelTrends + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelTrendsUDF(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py new file mode 100644 index 0000000000000..2844d4b7792d4 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_udf.py @@ -0,0 +1,66 @@ +from unittest.mock import patch, Mock + +from posthog.constants import FunnelOrderType +from posthog.hogql_queries.insights.funnels import Funnel +from posthog.hogql_queries.insights.funnels.test.breakdown_cases import ( + funnel_breakdown_test_factory, + funnel_breakdown_group_test_factory, +) +from posthog.models import Action +from posthog.queries.funnels import ClickhouseFunnelActors +from posthog.test.base import ( + ClickhouseTestMixin, + _create_event, + _create_person, +) +from test_funnel import funnel_test_factory +from posthog.hogql_queries.insights.funnels.test.conversion_time_cases import ( + funnel_conversion_time_test_factory, +) + + +def _create_action(**kwargs): + team = kwargs.pop("team") + name = kwargs.pop("name") + properties = kwargs.pop("properties", {}) + action = Action.objects.create(team=team, name=name, steps_json=[{"event": name, "properties": properties}]) + return action + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelBreakdownUDF( + ClickhouseTestMixin, + funnel_breakdown_test_factory( # type: ignore + FunnelOrderType.ORDERED, + ClickhouseFunnelActors, + _create_action, + _create_person, + ), +): + maxDiff = None + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelGroupBreakdownUDF( + ClickhouseTestMixin, + funnel_breakdown_group_test_factory( # type: ignore + FunnelOrderType.ORDERED, + ClickhouseFunnelActors, + ), +): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFOSSFunnelUDF(funnel_test_factory(Funnel, _create_event, _create_person)): # type: ignore + maxDiff = None + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelConversionTimeUDF( + ClickhouseTestMixin, + funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelActors), # type: ignore +): + maxDiff = None + pass diff --git a/posthog/hogql_queries/insights/funnels/utils.py b/posthog/hogql_queries/insights/funnels/utils.py index d5c968a913494..31c31fa9b361d 100644 --- a/posthog/hogql_queries/insights/funnels/utils.py +++ b/posthog/hogql_queries/insights/funnels/utils.py @@ -5,15 +5,18 @@ from rest_framework.exceptions import ValidationError -def get_funnel_order_class(funnelsFilter: FunnelsFilter): +def get_funnel_order_class(funnelsFilter: FunnelsFilter, use_udf=False): from posthog.hogql_queries.insights.funnels import ( Funnel, + FunnelUDF, FunnelStrict, FunnelUnordered, ) if funnelsFilter.funnelOrderType == StepOrderValue.UNORDERED: return FunnelUnordered + elif use_udf: + return FunnelUDF elif funnelsFilter.funnelOrderType == StepOrderValue.STRICT: return FunnelStrict return Funnel diff --git a/posthog/hogql_queries/insights/utils/entities.py b/posthog/hogql_queries/insights/utils/entities.py index 794ce6170da11..b14653b338035 100644 --- a/posthog/hogql_queries/insights/utils/entities.py +++ b/posthog/hogql_queries/insights/utils/entities.py @@ -1,6 +1,7 @@ from posthog.schema import ( ActionsNode, CohortPropertyFilter, + DataWarehouseNode, EmptyPropertyFilter, EventsNode, FunnelExclusionActionsNode, @@ -9,16 +10,16 @@ ) from posthog.types import AnyPropertyFilter, EntityNode, ExclusionEntityNode from collections import Counter -from rest_framework.exceptions import ValidationError def is_equal_type(a: EntityNode, b: EntityNode | ExclusionEntityNode) -> bool: if isinstance(a, EventsNode): return isinstance(b, EventsNode) or isinstance(b, FunnelExclusionEventsNode) - elif isinstance(a, ActionsNode): + if isinstance(a, ActionsNode): return isinstance(b, ActionsNode) or isinstance(b, FunnelExclusionActionsNode) - else: - raise ValidationError(detail=f"Type comparision for {type(a)} and {type(b)} not implemented.") + if isinstance(a, DataWarehouseNode): + return isinstance(b, DataWarehouseNode) + raise ValueError(detail=f"Type comparison for {type(a)} and {type(b)} not implemented.") def is_equal(a: EntityNode, b: EntityNode | ExclusionEntityNode, compare_properties=True) -> bool: @@ -44,6 +45,14 @@ def is_equal(a: EntityNode, b: EntityNode | ExclusionEntityNode, compare_propert ): return False + # different data source + if ( + isinstance(a, DataWarehouseNode) + and isinstance(b, DataWarehouseNode) + and (a.id != b.id or a.id_field != b.id_field) + ): + return False + # different properties if compare_properties and _sorted_property_reprs(a.properties) != _sorted_property_reprs(b.properties): return False diff --git a/posthog/hogql_queries/insights/utils/utils.py b/posthog/hogql_queries/insights/utils/utils.py index 747d7e2b6ca5a..15689aba7927e 100644 --- a/posthog/hogql_queries/insights/utils/utils.py +++ b/posthog/hogql_queries/insights/utils/utils.py @@ -10,3 +10,8 @@ def get_start_of_interval_hogql(interval: str, *, team: Team, source: Optional[a if trunc_func == "toStartOfWeek": trunc_func_args.append(ast.Constant(value=int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode))) return ast.Call(name=trunc_func, args=trunc_func_args) + + +def get_start_of_interval_hogql_str(interval: str, *, team: Team, source: str) -> str: + trunc_func = get_trunc_func_ch(interval) + return f"{trunc_func}({source}{f', {int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode)}' if trunc_func == 'toStartOfWeek' else ''})" diff --git a/posthog/hogql_queries/legacy_compatibility/feature_flag.py b/posthog/hogql_queries/legacy_compatibility/feature_flag.py index 6b4a6e0e22a0b..697e479f6031f 100644 --- a/posthog/hogql_queries/legacy_compatibility/feature_flag.py +++ b/posthog/hogql_queries/legacy_compatibility/feature_flag.py @@ -21,3 +21,24 @@ def hogql_insights_replace_filters(team: Team) -> bool: only_evaluate_locally=True, send_feature_flag_events=False, ) + + +def insight_funnels_use_udf(team: Team) -> bool: + return posthoganalytics.feature_enabled( + "insight-funnels-use-udf", + str(team.uuid), + groups={ + "organization": str(team.organization_id), + "project": str(team.id), + }, + group_properties={ + "organization": { + "id": str(team.organization_id), + }, + "project": { + "id": str(team.id), + }, + }, + only_evaluate_locally=False, + send_feature_flag_events=False, + ) diff --git a/posthog/migrations/0466_alter_externaldatasource_source_type.py b/posthog/migrations/0466_alter_externaldatasource_source_type.py new file mode 100644 index 0000000000000..4a4b2f522f68b --- /dev/null +++ b/posthog/migrations/0466_alter_externaldatasource_source_type.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.15 on 2024-09-05 10:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0465_datawarehouse_stripe_account"), + ] + + operations = [ + migrations.AlterField( + model_name="externaldatasource", + name="source_type", + field=models.CharField( + choices=[ + ("Stripe", "Stripe"), + ("Hubspot", "Hubspot"), + ("Postgres", "Postgres"), + ("Zendesk", "Zendesk"), + ("Snowflake", "Snowflake"), + ("Salesforce", "Salesforce"), + ("MySQL", "MySQL"), + ("MSSQL", "MSSQL"), + ("Vitally", "Vitally"), + ], + max_length=128, + ), + ), + ] diff --git a/posthog/models/utils.py b/posthog/models/utils.py index e9498ce32990e..3dc7e83940b9a 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -164,7 +164,7 @@ class Meta: class UUIDModel(models.Model): """Base Django Model with default autoincremented ID field replaced with UUIDT.""" - id = models.UUIDField(primary_key=True, default=UUIDT, editable=False) + id: models.UUIDField = models.UUIDField(primary_key=True, default=UUIDT, editable=False) class Meta: abstract = True diff --git a/posthog/plugins/plugin_server_api.py b/posthog/plugins/plugin_server_api.py index fd18e49d16ed7..ef6b312ba874c 100644 --- a/posthog/plugins/plugin_server_api.py +++ b/posthog/plugins/plugin_server_api.py @@ -4,6 +4,7 @@ import structlog from posthog.redis import get_client from posthog.settings import CDP_FUNCTION_EXECUTOR_API_URL, PLUGINS_RELOAD_PUBSUB_CHANNEL, PLUGINS_RELOAD_REDIS_URL +from posthog.models.utils import UUIDT logger = structlog.get_logger(__name__) @@ -62,7 +63,7 @@ def populate_plugin_capabilities_on_workers(plugin_id: str): def create_hog_invocation_test( team_id: int, - hog_function_id: str, + hog_function_id: UUIDT, globals: dict, configuration: dict, mock_async_functions: bool, @@ -78,13 +79,13 @@ def create_hog_invocation_test( ) -def get_hog_function_status(team_id: int, hog_function_id: str) -> requests.Response: +def get_hog_function_status(team_id: int, hog_function_id: UUIDT) -> requests.Response: return requests.get( CDP_FUNCTION_EXECUTOR_API_URL + f"/api/projects/{team_id}/hog_functions/{hog_function_id}/status" ) -def patch_hog_function_status(team_id: int, hog_function_id: str, state: int) -> requests.Response: +def patch_hog_function_status(team_id: int, hog_function_id: UUIDT, state: int) -> requests.Response: return requests.patch( CDP_FUNCTION_EXECUTOR_API_URL + f"/api/projects/{team_id}/hog_functions/{hog_function_id}/status", json={"state": state}, diff --git a/posthog/schema.py b/posthog/schema.py index f99badf4c1c84..aaf418fae271b 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -1768,6 +1768,7 @@ class CachedFunnelsQueryResponse(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None is_cached: bool last_refresh: AwareDatetime modifiers: Optional[HogQLQueryModifiers] = Field( @@ -2474,6 +2475,16 @@ class EventsQueryResponse(BaseModel): types: list[str] +class BreakdownFilter1(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + breakdown_hide_other_aggregation: Optional[bool] = None + breakdown_histogram_bin_count: Optional[int] = None + breakdown_limit: Optional[int] = None + breakdowns: Optional[list[Breakdown]] = Field(default=None, max_length=3) + + class FeaturePropertyFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2540,6 +2551,7 @@ class FunnelsQueryResponse(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) @@ -3319,6 +3331,7 @@ class QueryResponseAlternative25(BaseModel): description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", ) hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") + isUdf: Optional[bool] = None modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) @@ -4419,6 +4432,88 @@ class SessionsTimelineQuery(BaseModel): response: Optional[SessionsTimelineQueryResponse] = None +class AIActionsNode(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + custom_name: Optional[str] = None + event: Optional[str] = Field(default=None, description="The event or `null` for all events.") + fixedProperties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + ] + ] + ] = None + kind: Literal["EventsNode"] = "EventsNode" + math: Optional[ + Union[BaseMathType, PropertyMathType, CountPerActorMathType, Literal["unique_group"], Literal["hogql"]] + ] = None + math_group_type_index: Optional[MathGroupTypeIndex] = None + math_property: Optional[str] = None + name: Optional[str] = None + orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by") + properties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + ] + ] + ] = None + response: Optional[dict[str, Any]] = None + + +class AIEventsNode(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + custom_name: Optional[str] = None + event: Optional[str] = Field(default=None, description="The event or `null` for all events.") + fixedProperties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + ] + ] + ] = None + kind: Literal["EventsNode"] = "EventsNode" + math: Optional[ + Union[BaseMathType, PropertyMathType, CountPerActorMathType, Literal["unique_group"], Literal["hogql"]] + ] = None + math_group_type_index: Optional[MathGroupTypeIndex] = None + math_property: Optional[str] = None + name: Optional[str] = None + orderBy: Optional[list[str]] = Field(default=None, description="Columns to order by") + properties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + ] + ] + ] = None + response: Optional[dict[str, Any]] = None + + class ActionsNode(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4499,6 +4594,39 @@ class DatabaseSchemaViewTable(BaseModel): type: Literal["view"] = "view" +class ExperimentalAITrendsQuery(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation") + breakdownFilter: Optional[BreakdownFilter1] = Field(default=None, description="Breakdown of the events and actions") + compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range") + dateRange: Optional[InsightDateRange] = Field(default=None, description="Date range for the query") + filterTestAccounts: Optional[bool] = Field( + default=False, description="Exclude internal and test users by applying the respective filters" + ) + interval: Optional[IntervalType] = Field( + default=IntervalType.DAY, + description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`", + ) + kind: Literal["TrendsQuery"] = "TrendsQuery" + properties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + ] + ] + ] = Field(default=[], description="Property filters for all series") + samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") + series: list[Union[AIEventsNode, AIActionsNode]] = Field(..., description="Events and actions to include") + trendsFilter: Optional[TrendsFilter] = Field(default=None, description="Properties specific to the trends insight") + + class FunnelsFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4517,6 +4645,7 @@ class FunnelsFilter(BaseModel): funnelWindowIntervalUnit: Optional[FunnelConversionWindowTimeUnit] = FunnelConversionWindowTimeUnit.DAY hiddenLegendBreakdowns: Optional[list[str]] = None layout: Optional[FunnelLayout] = FunnelLayout.VERTICAL + useUdf: Optional[bool] = None class HasPropertiesNode(RootModel[Union[EventsNode, EventsQuery, PersonsNode]]): diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py index 925dce44493d1..ee9229109b832 100644 --- a/posthog/tasks/email.py +++ b/posthog/tasks/email.py @@ -20,6 +20,7 @@ Team, User, ) +from posthog.models.utils import UUIDT from posthog.user_permissions import UserPermissions logger = structlog.get_logger(__name__) @@ -159,7 +160,7 @@ def send_fatal_plugin_error( def send_batch_export_run_failure( - batch_export_run_id: str, + batch_export_run_id: UUIDT, ) -> None: logger = structlog.get_logger(__name__) diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py index df765689fb362..149a00eb11831 100644 --- a/posthog/tasks/scheduled.py +++ b/posthog/tasks/scheduled.py @@ -15,7 +15,6 @@ calculate_decide_usage, calculate_replay_embeddings, check_async_migration_health, - check_data_import_row_limits, check_flags_to_rollback, clean_stale_partials, clear_clickhouse_deleted_person, @@ -315,13 +314,7 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: name="delete expired exported assets", ) - sender.add_periodic_task( - crontab(minute="*/20"), - check_data_import_row_limits.s(), - name="check external data rows synced", - ) # Every 20 minutes try to retrieve and calculate total rows synced in period - sender.add_periodic_task( crontab(minute="*/20"), calculate_external_data_rows_synced.s(), diff --git a/posthog/tasks/stop_surveys_reached_target.py b/posthog/tasks/stop_surveys_reached_target.py index 5432a45d84b19..dc8f99ee3cff0 100644 --- a/posthog/tasks/stop_surveys_reached_target.py +++ b/posthog/tasks/stop_surveys_reached_target.py @@ -1,16 +1,16 @@ from itertools import groupby from django.db.models import Q from django.utils import timezone -from uuid import UUID from datetime import datetime from posthog.clickhouse.client.connection import Workload from posthog.client import sync_execute from posthog.models import Survey +from posthog.models.utils import UUIDT def _get_surveys_response_counts( - surveys_ids: list[UUID], team_id: int, earliest_survey_creation_date: datetime + surveys_ids: list[UUIDT], team_id: int, earliest_survey_creation_date: datetime ) -> dict[str, int]: data = sync_execute( """ diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py index 5c4b085072287..68cad94b93d81 100644 --- a/posthog/tasks/tasks.py +++ b/posthog/tasks/tasks.py @@ -8,6 +8,7 @@ from django.utils import timezone from prometheus_client import Gauge from redis import Redis +import requests from structlog import get_logger from posthog.clickhouse.client.limit import limit_concurrency, CeleryConcurrencyLimitExceeded @@ -180,11 +181,7 @@ def pg_row_count() -> None: "log_entries", ] -HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC = { - "heartbeat": "ingestion", - "heartbeat_buffer": "ingestion_buffer", - "heartbeat_api": "ingestion_api", -} +HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC = {"heartbeat": "ingestion", "$heartbeat": "ingestion_api"} @shared_task(ignore_result=True) @@ -192,9 +189,8 @@ def ingestion_lag() -> None: from statshog.defaults.django import statsd from posthog.client import sync_execute + from posthog.models.team.team import Team - # Requires https://github.com/PostHog/posthog-heartbeat-plugin to be enabled on team 2 - # Note that it runs every minute, and we compare it with now(), so there's up to 60s delay query = """ SELECT event, date_diff('second', max(timestamp), now()) FROM events @@ -204,11 +200,13 @@ def ingestion_lag() -> None: GROUP BY event """ + team_ids = settings.INGESTION_LAG_METRIC_TEAM_IDS + try: results = sync_execute( query, { - "team_ids": settings.INGESTION_LAG_METRIC_TEAM_IDS, + "team_ids": team_ids, "events": list(HEARTBEAT_EVENT_TO_INGESTION_LAG_METRIC.keys()), }, ) @@ -226,6 +224,17 @@ def ingestion_lag() -> None: except: pass + for team in Team.objects.filter(pk__in=team_ids): + requests.post( + settings.SITE_URL + "/e", + json={ + "event": "$heartbeat", + "distinct_id": "posthog-celery-heartbeat", + "token": team.api_token, + "properties": {"$timestamp": timezone.now().isoformat()}, + }, + ) + @shared_task(ignore_result=True, queue=CeleryQueue.SESSION_REPLAY_GENERAL.value) def replay_count_metrics() -> None: @@ -890,16 +899,6 @@ def ee_persist_finished_recordings() -> None: persist_finished_recordings() -@shared_task(ignore_result=True) -def check_data_import_row_limits() -> None: - try: - from posthog.tasks.warehouse import check_synced_row_limits - except ImportError: - pass - else: - check_synced_row_limits() - - # this task runs a CH query and triggers other tasks # it can run on the default queue @shared_task(ignore_result=True) diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py index 8a83cad234d88..c6150ef565336 100644 --- a/posthog/tasks/test/test_warehouse.py +++ b/posthog/tasks/test/test_warehouse.py @@ -1,12 +1,11 @@ from posthog.test.base import APIBaseTest from unittest.mock import patch, MagicMock from posthog.tasks.warehouse import ( - check_synced_row_limits_of_team, capture_workspace_rows_synced_by_team, validate_data_warehouse_table_columns, capture_external_data_rows_synced, ) -from posthog.warehouse.models import ExternalDataSource, ExternalDataJob, ExternalDataSchema +from posthog.warehouse.models import ExternalDataSource, ExternalDataJob from freezegun import freeze_time import datetime @@ -14,107 +13,6 @@ class TestWarehouse(APIBaseTest): - @patch("posthog.tasks.warehouse.MONTHLY_LIMIT", 100) - @patch("posthog.tasks.warehouse.cancel_external_data_workflow") - @patch("posthog.tasks.warehouse.pause_external_data_schedule") - @patch("ee.billing.quota_limiting.list_limited_team_attributes") - def test_check_synced_row_limits_of_team_monthly_limit( - self, - list_limited_team_attributes_mock: MagicMock, - pause_schedule_mock: MagicMock, - cancel_workflow_mock: MagicMock, - ) -> None: - list_limited_team_attributes_mock.return_value = [] - - source = ExternalDataSource.objects.create( - source_id="test_id", - connection_id="fake connectino_id", - destination_id="fake destination_id", - team=self.team, - status="Running", - source_type="Stripe", - ) - - schema = ExternalDataSchema.objects.create( - source=source, - name="test_schema", - team=self.team, - status="Running", - ) - - job = ExternalDataJob.objects.create( - pipeline=source, - workflow_id="fake_workflow_id", - team=self.team, - status="Running", - rows_synced=100000, - schema=schema, - ) - - check_synced_row_limits_of_team(self.team.pk) - - source.refresh_from_db() - self.assertEqual(source.status, ExternalDataSource.Status.PAUSED) - - schema.refresh_from_db() - self.assertEqual(schema.status, ExternalDataSchema.Status.PAUSED) - - job.refresh_from_db() - self.assertEqual(job.status, ExternalDataJob.Status.CANCELLED) - - self.assertEqual(pause_schedule_mock.call_count, 1) - self.assertEqual(cancel_workflow_mock.call_count, 1) - - @patch("posthog.tasks.warehouse.cancel_external_data_workflow") - @patch("posthog.tasks.warehouse.pause_external_data_schedule") - @patch("ee.billing.quota_limiting.list_limited_team_attributes") - def test_check_synced_row_limits_of_team( - self, - list_limited_team_attributes_mock: MagicMock, - pause_schedule_mock: MagicMock, - cancel_workflow_mock: MagicMock, - ) -> None: - list_limited_team_attributes_mock.return_value = [self.team.api_token] - - source = ExternalDataSource.objects.create( - source_id="test_id", - connection_id="fake connectino_id", - destination_id="fake destination_id", - team=self.team, - status="Running", - source_type="Stripe", - ) - - schema = ExternalDataSchema.objects.create( - source=source, - name="test_schema", - team=self.team, - status="Running", - ) - - job = ExternalDataJob.objects.create( - pipeline=source, - workflow_id="fake_workflow_id", - team=self.team, - status="Running", - rows_synced=100000, - schema=schema, - ) - - check_synced_row_limits_of_team(self.team.pk) - - source.refresh_from_db() - self.assertEqual(source.status, ExternalDataSource.Status.PAUSED) - - schema.refresh_from_db() - self.assertEqual(schema.status, ExternalDataSchema.Status.PAUSED) - - job.refresh_from_db() - self.assertEqual(job.status, ExternalDataJob.Status.CANCELLED) - - self.assertEqual(pause_schedule_mock.call_count, 1) - self.assertEqual(cancel_workflow_mock.call_count, 1) - @patch("posthog.tasks.warehouse.get_ph_client") @patch( "posthog.tasks.warehouse.DEFAULT_DATE_TIME", diff --git a/posthog/tasks/update_survey_iteration.py b/posthog/tasks/update_survey_iteration.py index 5218a99010252..2c6096b19261b 100644 --- a/posthog/tasks/update_survey_iteration.py +++ b/posthog/tasks/update_survey_iteration.py @@ -58,7 +58,7 @@ def _get_targeting_flag(survey: Survey) -> ForeignKey | ForeignKey | Any: team=survey.team, created_by=survey.created_by, active=True, - key=survey.id, + key=str(survey.id), filters=user_submitted_dismissed_filter, ) new_flag.save() diff --git a/posthog/tasks/warehouse.py b/posthog/tasks/warehouse.py index 518d4fc3027c4..3e75017ca81b4 100644 --- a/posthog/tasks/warehouse.py +++ b/posthog/tasks/warehouse.py @@ -3,20 +3,13 @@ import structlog from celery import shared_task -from posthog.warehouse.data_load.service import ( - cancel_external_data_workflow, - pause_external_data_schedule, - unpause_external_data_schedule, -) -from posthog.warehouse.models import ExternalDataJob, ExternalDataSource, ExternalDataSchema +from posthog.warehouse.models import ExternalDataJob, ExternalDataSource from posthog.ph_client import get_ph_client from posthog.models import Team from django.db.models import Q logger = structlog.get_logger(__name__) -MONTHLY_LIMIT = 500_000_000 - # TODO: adjust to whenever billing officially starts DEFAULT_DATE_TIME = datetime.datetime(2024, 6, 1, tzinfo=datetime.UTC) @@ -32,89 +25,6 @@ def capture_external_data_rows_synced() -> None: capture_workspace_rows_synced_by_team.delay(team_id) -def check_synced_row_limits() -> None: - team_ids = ExternalDataSource.objects.values_list("team", flat=True) - for team_id in team_ids: - check_synced_row_limits_of_team.delay(team_id) - - -@shared_task(ignore_result=True) -def check_synced_row_limits_of_team(team_id: int) -> None: - logger.info("Checking synced row limits of team", team_id=team_id) - team_model = Team.objects.get(pk=team_id) - - from ee.billing.quota_limiting import list_limited_team_attributes, QuotaResource, QuotaLimitingCaches - - # TODO: temp workaround. Should use team ids directly instead of tokens - limited_team_tokens_rows_synced = list_limited_team_attributes( - QuotaResource.ROWS_SYNCED, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY - ) - - # TODO: Remove once billing logic is fully through - start_of_month = datetime.datetime.now().replace(day=1, hour=0, minute=0, second=0, microsecond=0) - rows_synced_list = [ - x - for x in ExternalDataJob.objects.filter(team_id=team_id, created_at__gte=start_of_month).values_list( - "rows_synced", flat=True - ) - if x - ] - total_rows_synced = sum(rows_synced_list) - - if team_model.api_token in limited_team_tokens_rows_synced or total_rows_synced > MONTHLY_LIMIT: - # stop active jobs - running_jobs = ExternalDataJob.objects.filter(team_id=team_id, status=ExternalDataJob.Status.RUNNING) - for job in running_jobs: - try: - cancel_external_data_workflow(job.workflow_id) - except Exception as e: - logger.exception("Could not cancel external data workflow", exc_info=e) - - try: - pause_external_data_schedule(str(job.pipeline.id)) - except Exception as e: - logger.exception("Could not pause external data schedule", exc_info=e) - - job.status = ExternalDataJob.Status.CANCELLED - job.save() - - job.pipeline.status = ExternalDataSource.Status.PAUSED - job.pipeline.save() - - if job.schema: - job.schema.status = ExternalDataSchema.Status.PAUSED - job.schema.save() - - # pause active schemas - all_schemas = ExternalDataSchema.objects.filter( - team_id=team_id, status__in=[ExternalDataSchema.Status.COMPLETED, ExternalDataSchema.Status.RUNNING] - ) - for schema in all_schemas: - try: - pause_external_data_schedule(str(schema.id)) - except Exception as e: - logger.exception("Could not pause external data schedule", exc_info=e) - - schema.status = ExternalDataSchema.Status.PAUSED - schema.save() - - schema.source.status = ExternalDataSource.Status.PAUSED - schema.source.save() - else: - all_schemas = ExternalDataSchema.objects.filter(team_id=team_id, status=ExternalDataSchema.Status.PAUSED) - for schema in all_schemas: - try: - unpause_external_data_schedule(str(schema.id)) - except Exception as e: - logger.exception("Could not unpause external data schedule", exc_info=e) - - schema.status = ExternalDataSchema.Status.COMPLETED - schema.save() - - schema.source.status = ExternalDataSource.Status.RUNNING - schema.source.save() - - @shared_task(ignore_result=True) def capture_workspace_rows_synced_by_team(team_id: int) -> None: ph_client = get_ph_client() diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index 75cde8a9b651b..079cce001d235 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -1,6 +1,7 @@ """This module contains a temporary file to stage data in batch exports.""" import abc +import asyncio import collections.abc import contextlib import csv @@ -390,7 +391,7 @@ async def write_record_batch(self, record_batch: pa.RecordBatch) -> None: column_names = record_batch.column_names column_names.pop(column_names.index("_inserted_at")) - self._write_record_batch(record_batch.select(column_names)) + await asyncio.to_thread(self._write_record_batch, record_batch.select(column_names)) self.last_inserted_at = last_inserted_at self.track_records_written(record_batch) diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index 2b162efa4c538..e945b483d8e95 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -5,6 +5,7 @@ import_data_activity, update_external_data_job_model, check_schedule_activity, + check_billing_limits_activity, ) WORKFLOWS = [ExternalDataJobWorkflow] @@ -15,4 +16,5 @@ import_data_activity, create_source_templates, check_schedule_activity, + check_billing_limits_activity, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 76ca85db9be5f..15b0e6a08db1c 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -1,14 +1,16 @@ import dataclasses import datetime as dt import json -import uuid -from asgiref.sync import sync_to_async from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy # TODO: remove dependency from posthog.temporal.batch_exports.base import PostHogWorkflow +from posthog.temporal.data_imports.workflow_activities.check_billing_limits import ( + CheckBillingLimitsActivityInputs, + check_billing_limits_activity, +) from posthog.temporal.utils import ExternalDataWorkflowInputs from posthog.temporal.data_imports.workflow_activities.create_job_model import ( CreateExternalDataJobModelActivityInputs, @@ -24,7 +26,7 @@ from posthog.warehouse.data_load.source_templates import create_warehouse_templates_for_source from posthog.warehouse.external_data_source.jobs import ( - update_external_job_status, + aupdate_external_job_status, ) from posthog.warehouse.models import ( ExternalDataJob, @@ -67,8 +69,8 @@ async def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInpu logger.info("Schema has a non-retryable error - turning off syncing") await aupdate_should_sync(schema_id=inputs.schema_id, team_id=inputs.team_id, should_sync=False) - await sync_to_async(update_external_job_status)( - run_id=uuid.UUID(inputs.id), + await aupdate_external_job_status( + job_id=inputs.id, status=inputs.status, latest_error=inputs.latest_error, team_id=inputs.team_id, @@ -151,7 +153,7 @@ async def run(self, inputs: ExternalDataWorkflowInputs): ) # TODO: split out the creation of the external data job model from schema getting to seperate out exception handling - run_id, incremental = await workflow.execute_activity( + job_id, incremental = await workflow.execute_activity( create_external_data_job_model_activity, create_external_data_job_inputs, start_to_close_timeout=dt.timedelta(minutes=1), @@ -163,9 +165,24 @@ async def run(self, inputs: ExternalDataWorkflowInputs): ), ) + # Check billing limits + hit_billing_limit = await workflow.execute_activity( + check_billing_limits_activity, + CheckBillingLimitsActivityInputs(job_id=job_id, team_id=inputs.team_id), + start_to_close_timeout=dt.timedelta(minutes=1), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=3, + ), + ) + + if hit_billing_limit: + return + update_inputs = UpdateExternalDataJobStatusInputs( - id=run_id, - run_id=run_id, + id=job_id, + run_id=job_id, status=ExternalDataJob.Status.COMPLETED, latest_error=None, internal_error=None, @@ -176,7 +193,7 @@ async def run(self, inputs: ExternalDataWorkflowInputs): try: job_inputs = ImportDataActivityInputs( team_id=inputs.team_id, - run_id=run_id, + run_id=job_id, schema_id=inputs.external_data_schema_id, source_id=inputs.external_data_source_id, ) @@ -197,17 +214,13 @@ async def run(self, inputs: ExternalDataWorkflowInputs): # Create source templates await workflow.execute_activity( create_source_templates, - CreateSourceTemplateInputs(team_id=inputs.team_id, run_id=run_id), + CreateSourceTemplateInputs(team_id=inputs.team_id, run_id=job_id), start_to_close_timeout=dt.timedelta(minutes=10), retry_policy=RetryPolicy(maximum_attempts=2), ) except exceptions.ActivityError as e: - if isinstance(e.cause, exceptions.CancelledError): - update_inputs.status = ExternalDataJob.Status.CANCELLED - else: - update_inputs.status = ExternalDataJob.Status.FAILED - + update_inputs.status = ExternalDataJob.Status.FAILED update_inputs.internal_error = str(e.cause) update_inputs.latest_error = str(e.cause) raise diff --git a/posthog/temporal/data_imports/pipelines/helpers.py b/posthog/temporal/data_imports/pipelines/helpers.py index 776b7f8dd0582..d0cc153f4e11d 100644 --- a/posthog/temporal/data_imports/pipelines/helpers.py +++ b/posthog/temporal/data_imports/pipelines/helpers.py @@ -5,15 +5,6 @@ from posthog.warehouse.util import database_sync_to_async -async def is_job_cancelled( - team_id: int, - job_id: str, -) -> bool: - model = await aget_external_data_job(team_id, job_id) - - return model.status == ExternalDataJob.Status.CANCELLED - - @database_sync_to_async def aget_external_data_job(team_id, job_id): return ExternalDataJob.objects.get(id=job_id, team_id=team_id) diff --git a/posthog/temporal/data_imports/pipelines/rest_source/__init__.py b/posthog/temporal/data_imports/pipelines/rest_source/__init__.py index 5a5d8e8d09ab0..5dceafd1d2aec 100644 --- a/posthog/temporal/data_imports/pipelines/rest_source/__init__.py +++ b/posthog/temporal/data_imports/pipelines/rest_source/__init__.py @@ -23,7 +23,6 @@ from dlt.sources.helpers.rest_client.paginators import BasePaginator from dlt.sources.helpers.rest_client.typing import HTTPMethodBasic -from posthog.temporal.data_imports.pipelines.helpers import is_job_cancelled from .typing import ( ClientConfig, ResolvedParam, @@ -259,9 +258,6 @@ async def paginate_resource( ) -> AsyncGenerator[Iterator[Any], Any]: yield dlt.mark.materialize_table_schema() # type: ignore - if await is_job_cancelled(team_id=team_id, job_id=job_id): - return - if incremental_object: params = _set_incremental_params( params, @@ -315,9 +311,6 @@ async def paginate_dependent_resource( ) -> AsyncGenerator[Any, Any]: yield dlt.mark.materialize_table_schema() # type: ignore - if await is_job_cancelled(team_id=team_id, job_id=job_id): - return - if incremental_object: params = _set_incremental_params( params, diff --git a/posthog/temporal/data_imports/pipelines/schemas.py b/posthog/temporal/data_imports/pipelines/schemas.py index 0acd00e8bd6f3..15214f939b78a 100644 --- a/posthog/temporal/data_imports/pipelines/schemas.py +++ b/posthog/temporal/data_imports/pipelines/schemas.py @@ -17,6 +17,11 @@ INCREMENTAL_ENDPOINTS as SALESFORCE_INCREMENTAL_ENDPOINTS, INCREMENTAL_FIELDS as SALESFORCE_INCREMENTAL_FIELDS, ) +from posthog.temporal.data_imports.pipelines.vitally.settings import ( + ENDPOINTS as VITALLY_ENDPOINTS, + INCREMENTAL_ENDPOINTS as VITALLY_INCREMENTAL_ENDPOINTS, + INCREMENTAL_FIELDS as VITALLY_INCREMENTAL_FIELDS, +) PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = { ExternalDataSource.Type.STRIPE: STRIPE_ENDPOINTS, @@ -29,6 +34,7 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_ENDPOINTS, ExternalDataSource.Type.MYSQL: (), ExternalDataSource.Type.MSSQL: (), + ExternalDataSource.Type.VITALLY: VITALLY_ENDPOINTS, } PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING = { @@ -40,6 +46,7 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_ENDPOINTS, ExternalDataSource.Type.MYSQL: (), ExternalDataSource.Type.MSSQL: (), + ExternalDataSource.Type.VITALLY: VITALLY_INCREMENTAL_ENDPOINTS, } PIPELINE_TYPE_INCREMENTAL_FIELDS_MAPPING: dict[ExternalDataSource.Type, dict[str, list[IncrementalField]]] = { @@ -51,4 +58,5 @@ ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_FIELDS, ExternalDataSource.Type.MYSQL: {}, ExternalDataSource.Type.MSSQL: {}, + ExternalDataSource.Type.VITALLY: VITALLY_INCREMENTAL_FIELDS, } diff --git a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py index 0fc7f7394b6ad..900c70d3b56c6 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py @@ -173,24 +173,26 @@ def sql_database( # and pass them in here to get empty table materialization binary_columns_to_drop = get_binary_columns(engine, schema or "", table.name) - yield dlt.resource( - table_rows, - name=table.name, - primary_key=get_primary_key(table), - merge_key=get_primary_key(table), - write_disposition={ - "disposition": "merge", - "strategy": "upsert", - } - if incremental - else "replace", - spec=SqlDatabaseTableConfiguration, - table_format="delta", - columns=get_column_hints(engine, schema or "", table.name), - ).add_map(remove_columns(binary_columns_to_drop, team_id))( - engine=engine, - table=table, - incremental=incremental, + yield ( + dlt.resource( + table_rows, + name=table.name, + primary_key=get_primary_key(table), + merge_key=get_primary_key(table), + write_disposition={ + "disposition": "merge", + "strategy": "upsert", + } + if incremental + else "replace", + spec=SqlDatabaseTableConfiguration, + table_format="delta", + columns=get_column_hints(engine, schema or "", table.name), + ).add_map(remove_columns(binary_columns_to_drop, team_id))( + engine=engine, + table=table, + incremental=incremental, + ) ) diff --git a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py index 894407beda8a0..d877effb3e374 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py @@ -54,10 +54,16 @@ def make_query(self) -> Select[Any]: last_value_func = self.incremental.last_value_func if last_value_func is max: # Query ordered and filtered according to last_value function order_by = self.cursor_column.asc() # type: ignore - filter_op = operator.gt + if self.last_value == self.incremental.initial_value: + filter_op = operator.ge + else: + filter_op = operator.gt elif last_value_func is min: order_by = self.cursor_column.desc() # type: ignore - filter_op = operator.lt + if self.last_value == self.incremental.initial_value: + filter_op = operator.le + else: + filter_op = operator.lt else: # Custom last_value, load everything and let incremental handle filtering return query query = query.order_by(order_by) diff --git a/posthog/temporal/data_imports/pipelines/vitally/__init__.py b/posthog/temporal/data_imports/pipelines/vitally/__init__.py new file mode 100644 index 0000000000000..8c526017db38d --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/vitally/__init__.py @@ -0,0 +1,360 @@ +import base64 +from dateutil import parser +from typing import Any, Optional +import dlt +from dlt.sources.helpers.rest_client.paginators import BasePaginator +from dlt.sources.helpers.requests import Response, Request +import requests +from posthog.temporal.data_imports.pipelines.rest_source import RESTAPIConfig, rest_api_resources +from posthog.temporal.data_imports.pipelines.rest_source.typing import EndpointResource + + +def get_resource(name: str, is_incremental: bool) -> EndpointResource: + resources: dict[str, EndpointResource] = { + "Organizations": { + "name": "Organizations", + "table_name": "organizations", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/organizations", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Accounts": { + "name": "Accounts", + "table_name": "accounts", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/accounts", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Users": { + "name": "Users", + "table_name": "users", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/users", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Conversations": { + "name": "Conversations", + "table_name": "conversations", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/conversations", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Notes": { + "name": "Notes", + "table_name": "notes", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/notes", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Projects": { + "name": "Projects", + "table_name": "projects", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/projects", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Tasks": { + "name": "Tasks", + "table_name": "tasks", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/tasks", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "NPS_Responses": { + "name": "NPS_Responses", + "table_name": "nps_responses", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/npsResponses", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + "Custom_Objects": { + "name": "Custom_Objects", + "table_name": "custom_objects", + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + "endpoint": { + "data_selector": "results", + "path": "/resources/customObjects", + "params": { + "limit": 100, + "sortBy": "updatedAt", + "updatedAt": { + "type": "incremental", + "cursor_path": "updatedAt", + "initial_value": "1970-01-01", # type: ignore + "convert": lambda x: parser.parse(x).timestamp(), + } + if is_incremental + else None, + }, + }, + "table_format": "delta", + }, + } + + return resources[name] + + +class VitallyPaginator(BasePaginator): + def __init__(self) -> None: + super().__init__() + + def update_state(self, response: Response, data: Optional[list[Any]] = None) -> None: + res = response.json() + + current_source = dlt.current.get_source() + resources = current_source.resources + current_resource = next(iter(resources.values())) + incremental = current_resource.incremental.incremental + + self._cursor = None + + if not res: + self._has_next_page = False + return + + if incremental: + updated_at_str = res["results"][0]["updatedAt"] + updated_at = parser.parse(updated_at_str).timestamp() + start_value = parser.parse(incremental.start_value).timestamp() + + if start_value >= updated_at: + self._has_next_page = False + return + + if res["next"]: + self._has_next_page = True + self._cursor = res["next"] + else: + self._has_next_page = False + + def update_request(self, request: Request) -> None: + if request.params is None: + request.params = {} + + request.params["from"] = self._cursor + + +def get_base_url(region: str, subdomain: Optional[str]) -> str: + if region == "US" and subdomain: + return f"https://{subdomain}.rest.vitally.io/" + + return "https://rest.vitally-eu.io/" + + +@dlt.source(max_table_nesting=0) +def vitally_source( + secret_token: str, + region: str, + subdomain: Optional[str], + endpoint: str, + team_id: int, + job_id: str, + is_incremental: bool = False, +): + config: RESTAPIConfig = { + "client": { + "base_url": get_base_url(region, subdomain), + "auth": { + "type": "http_basic", + "username": secret_token, + "password": "", + }, + "paginator": VitallyPaginator(), + }, + "resource_defaults": { + "primary_key": "id", + "write_disposition": { + "disposition": "merge", + "strategy": "upsert", + } + if is_incremental + else "replace", + }, + "resources": [get_resource(endpoint, is_incremental)], + } + + yield from rest_api_resources(config, team_id, job_id) + + +def validate_credentials(secret_token: str, region: str, subdomain: Optional[str]) -> bool: + basic_token = base64.b64encode(f"{secret_token}:".encode("ascii")).decode("ascii") + res = requests.get( + f"{get_base_url(region, subdomain)}resources/users?limit=1", + headers={"Authorization": f"Basic {basic_token}"}, + ) + + return res.status_code == 200 diff --git a/posthog/temporal/data_imports/pipelines/vitally/settings.py b/posthog/temporal/data_imports/pipelines/vitally/settings.py new file mode 100644 index 0000000000000..a16d9565f5d1c --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/vitally/settings.py @@ -0,0 +1,108 @@ +from posthog.warehouse.types import IncrementalField, IncrementalFieldType + +ENDPOINTS = ( + "Organizations", + "Accounts", + "Users", + "Conversations", + "Notes", + "Projects", + "Tasks", + "NPS_Responses", + "Custom_Objects", +) + +INCREMENTAL_ENDPOINTS = ( + "Organizations", + "Accounts", + "Users", + "Conversations", + "Notes", + "Projects", + "Tasks", + "NPS_Responses", + "Custom_Objects", +) + +INCREMENTAL_FIELDS: dict[str, list[IncrementalField]] = { + "Organizations": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Accounts": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Users": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Conversations": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Notes": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Projects": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Tasks": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "NPS_Responses": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Custom_Fields": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], + "Custom_Objects": [ + { + "label": "updated_at", + "type": IncrementalFieldType.DateTime, + "field": "updated_at", + "field_type": IncrementalFieldType.DateTime, + } + ], +} diff --git a/posthog/temporal/data_imports/workflow_activities/check_billing_limits.py b/posthog/temporal/data_imports/workflow_activities/check_billing_limits.py new file mode 100644 index 0000000000000..3bce57ef74891 --- /dev/null +++ b/posthog/temporal/data_imports/workflow_activities/check_billing_limits.py @@ -0,0 +1,41 @@ +import dataclasses +from temporalio import activity + +from asgiref.sync import sync_to_async + +from ee.billing.quota_limiting import QuotaLimitingCaches, QuotaResource, list_limited_team_attributes +from posthog.models.team.team import Team +from posthog.temporal.common.logger import bind_temporal_worker_logger +from posthog.warehouse.external_data_source.jobs import aupdate_external_job_status +from posthog.warehouse.models.external_data_job import ExternalDataJob + + +@dataclasses.dataclass +class CheckBillingLimitsActivityInputs: + team_id: int + job_id: str + + +@activity.defn +async def check_billing_limits_activity(inputs: CheckBillingLimitsActivityInputs) -> bool: + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + + team: Team = await sync_to_async(Team.objects.get)(id=inputs.team_id) + + limited_team_tokens_rows_synced = list_limited_team_attributes( + QuotaResource.ROWS_SYNCED, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY + ) + + if team.api_token in limited_team_tokens_rows_synced: + logger.info("Billing limits hit. Canceling sync") + + await aupdate_external_job_status( + job_id=inputs.job_id, + status=ExternalDataJob.Status.CANCELLED, + latest_error=None, + team_id=inputs.team_id, + ) + + return True + + return False diff --git a/posthog/temporal/data_imports/workflow_activities/create_job_model.py b/posthog/temporal/data_imports/workflow_activities/create_job_model.py index 21f5e046d1a28..5ddcf16160a2c 100644 --- a/posthog/temporal/data_imports/workflow_activities/create_job_model.py +++ b/posthog/temporal/data_imports/workflow_activities/create_job_model.py @@ -32,7 +32,7 @@ async def create_external_data_job_model_activity(inputs: CreateExternalDataJobM logger = await bind_temporal_worker_logger(team_id=inputs.team_id) try: - run = await sync_to_async(create_external_data_job)( + job = await sync_to_async(create_external_data_job)( team_id=inputs.team_id, external_data_source_id=inputs.source_id, external_data_schema_id=inputs.schema_id, @@ -108,7 +108,7 @@ async def create_external_data_job_model_activity(inputs: CreateExternalDataJobM if schema_model is None: raise ValueError(f"Schema with ID {inputs.schema_id} not found") - return str(run.id), schema_model.is_incremental + return str(job.id), schema_model.is_incremental except Exception as e: logger.exception( f"External data job failed on create_external_data_job_model_activity for {str(inputs.source_id)} with error: {e}" diff --git a/posthog/temporal/data_imports/workflow_activities/import_data.py b/posthog/temporal/data_imports/workflow_activities/import_data.py index 6ce4237f53711..73706e1191589 100644 --- a/posthog/temporal/data_imports/workflow_activities/import_data.py +++ b/posthog/temporal/data_imports/workflow_activities/import_data.py @@ -283,6 +283,27 @@ async def import_data_activity(inputs: ImportDataActivityInputs): is_incremental=schema.is_incremental, ) + return await _run( + job_inputs=job_inputs, + source=source, + logger=logger, + inputs=inputs, + schema=schema, + reset_pipeline=reset_pipeline, + ) + elif model.pipeline.source_type == ExternalDataSource.Type.VITALLY: + from posthog.temporal.data_imports.pipelines.vitally import vitally_source + + source = vitally_source( + secret_token=model.pipeline.job_inputs.get("secret_token"), + region=model.pipeline.job_inputs.get("region"), + subdomain=model.pipeline.job_inputs.get("subdomain"), + endpoint=schema.name, + team_id=inputs.team_id, + job_id=inputs.run_id, + is_incremental=schema.is_incremental, + ) + return await _run( job_inputs=job_inputs, source=source, diff --git a/posthog/temporal/tests/data_imports/test_end_to_end.py b/posthog/temporal/tests/data_imports/test_end_to_end.py index 0292fe2d83f52..8556ff7bf1c5c 100644 --- a/posthog/temporal/tests/data_imports/test_end_to_end.py +++ b/posthog/temporal/tests/data_imports/test_end_to_end.py @@ -685,3 +685,136 @@ async def test_postgres_schema_evolution(team, postgres_config, postgres_connect assert any(x == "new_col" for x in columns) assert any(x == "_dlt_id" for x in columns) assert any(x == "_dlt_load_id" for x in columns) + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.asyncio +async def test_sql_database_missing_incremental_values(team, postgres_config, postgres_connection): + await postgres_connection.execute( + "CREATE TABLE IF NOT EXISTS {schema}.test_table (id integer)".format(schema=postgres_config["schema"]) + ) + await postgres_connection.execute( + "INSERT INTO {schema}.test_table (id) VALUES (1)".format(schema=postgres_config["schema"]) + ) + await postgres_connection.execute( + "INSERT INTO {schema}.test_table (id) VALUES (null)".format(schema=postgres_config["schema"]) + ) + await postgres_connection.commit() + + await _run( + team=team, + schema_name="test_table", + table_name="postgres_test_table", + source_type="Postgres", + job_inputs={ + "host": postgres_config["host"], + "port": postgres_config["port"], + "database": postgres_config["database"], + "user": postgres_config["user"], + "password": postgres_config["password"], + "schema": postgres_config["schema"], + "ssh_tunnel_enabled": "False", + }, + mock_data_response=[], + sync_type=ExternalDataSchema.SyncType.INCREMENTAL, + sync_type_config={"incremental_field": "id", "incremental_field_type": "integer"}, + ) + + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns + + assert columns is not None + assert len(columns) == 3 + assert any(x == "id" for x in columns) + assert any(x == "_dlt_id" for x in columns) + assert any(x == "_dlt_load_id" for x in columns) + + # Exclude rows that don't have the incremental cursor key set + assert len(res.results) == 1 + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.asyncio +async def test_sql_database_incremental_initual_value(team, postgres_config, postgres_connection): + await postgres_connection.execute( + "CREATE TABLE IF NOT EXISTS {schema}.test_table (id integer)".format(schema=postgres_config["schema"]) + ) + # Setting `id` to `0` - the same as an `integer` incremental initial value + await postgres_connection.execute( + "INSERT INTO {schema}.test_table (id) VALUES (0)".format(schema=postgres_config["schema"]) + ) + await postgres_connection.commit() + + await _run( + team=team, + schema_name="test_table", + table_name="postgres_test_table", + source_type="Postgres", + job_inputs={ + "host": postgres_config["host"], + "port": postgres_config["port"], + "database": postgres_config["database"], + "user": postgres_config["user"], + "password": postgres_config["password"], + "schema": postgres_config["schema"], + "ssh_tunnel_enabled": "False", + }, + mock_data_response=[], + sync_type=ExternalDataSchema.SyncType.INCREMENTAL, + sync_type_config={"incremental_field": "id", "incremental_field_type": "integer"}, + ) + + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns + + assert columns is not None + assert len(columns) == 3 + assert any(x == "id" for x in columns) + assert any(x == "_dlt_id" for x in columns) + assert any(x == "_dlt_load_id" for x in columns) + + # Include rows that have the same incremental value as the `initial_value` + assert len(res.results) == 1 + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.asyncio +async def test_billing_limits(team, stripe_customer): + source = await sync_to_async(ExternalDataSource.objects.create)( + source_id=uuid.uuid4(), + connection_id=uuid.uuid4(), + destination_id=uuid.uuid4(), + team=team, + status="running", + source_type="Stripe", + job_inputs={"stripe_secret_key": "test-key", "stripe_account_id": "acct_id"}, + ) + + schema = await sync_to_async(ExternalDataSchema.objects.create)( + name="Customer", + team_id=team.pk, + source_id=source.pk, + sync_type=ExternalDataSchema.SyncType.FULL_REFRESH, + sync_type_config={}, + ) + + workflow_id = str(uuid.uuid4()) + inputs = ExternalDataWorkflowInputs( + team_id=team.id, + external_data_source_id=source.pk, + external_data_schema_id=schema.id, + ) + + with mock.patch( + "posthog.temporal.data_imports.workflow_activities.check_billing_limits.list_limited_team_attributes", + ) as mock_list_limited_team_attributes: + mock_list_limited_team_attributes.return_value = [team.api_token] + + await _execute_run(workflow_id, inputs, stripe_customer["data"]) + + job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.get)(team_id=team.id, schema_id=schema.pk) + + assert job.status == ExternalDataJob.Status.CANCELLED + + with pytest.raises(Exception): + await sync_to_async(execute_hogql_query)("SELECT * FROM stripe_customer", team) diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index 0c3e1b0001e57..93630571c3a7a 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -15,12 +15,12 @@ ExternalDataJobWorkflow, ExternalDataWorkflowInputs, ) +from posthog.temporal.data_imports.workflow_activities.check_billing_limits import check_billing_limits_activity from posthog.temporal.data_imports.workflow_activities.create_job_model import ( CreateExternalDataJobModelActivityInputs, create_external_data_job_model_activity, ) from posthog.temporal.data_imports.workflow_activities.import_data import ImportDataActivityInputs, import_data_activity -from posthog.temporal.tests.data_imports.conftest import stripe_customer from posthog.warehouse.external_data_source.jobs import create_external_data_job from posthog.warehouse.models import ( get_latest_run_if_exists, @@ -539,112 +539,6 @@ def mock_to_object_store_rs_credentials(class_self): assert len(job_2_charge_objects["Contents"]) == 2 -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_run_stripe_job_cancelled(activity_environment, team, minio_client, **kwargs): - async def setup_job_1(): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key", "stripe_account_id": "acct_id"}, - ) - - customer_schema = await _create_schema("Customer", new_source, team) - - # Already canceled so it should only run once - # This imitates if the job was canceled mid run - new_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.CANCELLED, - rows_synced=0, - schema=customer_schema, - ) - - new_job = await sync_to_async( - ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").prefetch_related("schema").get - )() - - inputs = ImportDataActivityInputs( - team_id=team.id, - run_id=str(new_job.pk), - source_id=new_source.pk, - schema_id=customer_schema.id, - ) - - return new_job, inputs - - job_1, job_1_inputs = await setup_job_1() - - def mock_customers_paginate( - class_self, - path: str = "", - method: Any = "GET", - params: Optional[dict[str, Any]] = None, - json: Optional[dict[str, Any]] = None, - auth: Optional[Any] = None, - paginator: Optional[Any] = None, - data_selector: Optional[Any] = None, - hooks: Optional[Any] = None, - ): - return iter(stripe_customer()["data"]) - - def mock_to_session_credentials(class_self): - return { - "aws_access_key_id": settings.OBJECT_STORAGE_ACCESS_KEY_ID, - "aws_secret_access_key": settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - "endpoint_url": settings.OBJECT_STORAGE_ENDPOINT, - "aws_session_token": None, - "AWS_ALLOW_HTTP": "true", - "AWS_S3_ALLOW_UNSAFE_RENAME": "true", - } - - def mock_to_object_store_rs_credentials(class_self): - return { - "aws_access_key_id": settings.OBJECT_STORAGE_ACCESS_KEY_ID, - "aws_secret_access_key": settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - "endpoint_url": settings.OBJECT_STORAGE_ENDPOINT, - "region": "us-east-1", - "AWS_ALLOW_HTTP": "true", - "AWS_S3_ALLOW_UNSAFE_RENAME": "true", - } - - with ( - mock.patch.object(RESTClient, "paginate", mock_customers_paginate), - override_settings( - BUCKET_URL=f"s3://{BUCKET_NAME}", - AIRBYTE_BUCKET_KEY=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - AIRBYTE_BUCKET_SECRET=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - AIRBYTE_BUCKET_REGION="us-east-1", - BUCKET_NAME=BUCKET_NAME, - ), - mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns", - return_value={"clickhouse": {"id": "string", "name": "string"}}, - ), - mock.patch.object(AwsCredentials, "to_session_credentials", mock_to_session_credentials), - mock.patch.object(AwsCredentials, "to_object_store_rs_credentials", mock_to_object_store_rs_credentials), - ): - await asyncio.gather( - activity_environment.run(import_data_activity, job_1_inputs), - ) - - folder_path = await sync_to_async(job_1.folder_path)() - job_1_customer_objects = await minio_client.list_objects_v2( - Bucket=BUCKET_NAME, Prefix=f"{folder_path}/customer/" - ) - - # if job was not canceled, this job would run indefinitely - assert len(job_1_customer_objects.get("Contents", [])) == 1 - - await sync_to_async(job_1.refresh_from_db)() - assert job_1.rows_synced == 0 - - @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_run_stripe_job_row_count_update(activity_environment, team, minio_client, **kwargs): @@ -803,6 +697,7 @@ async def mock_async_func(inputs): update_external_data_job_model, import_data_activity, create_source_templates, + check_billing_limits_activity, ], workflow_runner=UnsandboxedWorkflowRunner(), ): diff --git a/posthog/test/user_scripts/__init__.py b/posthog/test/user_scripts/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/test/user_scripts/test_aggregate_funnel.py b/posthog/test/user_scripts/test_aggregate_funnel.py new file mode 100644 index 0000000000000..2b20929002f83 --- /dev/null +++ b/posthog/test/user_scripts/test_aggregate_funnel.py @@ -0,0 +1,4206 @@ +from typing import Any + +from posthog.user_scripts.aggregate_funnel import calculate_funnel_from_user_events + + +def test(): + y = [ + [(1577973600.0, "", [1]), (1577980800.0, "", [2]), (1577984400.0, "", [3])], + [(1577880000.0, "", [1]), (1577883600.0, "", [2]), (1577890800.0, "", [3])], + [(1577973600.0, "", [1]), (1577980800.0, "", [2])], + ] + + for x in y: + calculate_funnel_from_user_events(3, 1209600, "first_touch", "strict", [""], x) + + +def test2(): + a: list[Any] = [ + [(1720051532.484019, [], [1, 2, 3, 4, 5, 6])], + [(1720105713.331995, [], [1, 2, 3, 4, 5, 6])], + [(1720329565.847159, [], [1, 2, 3, 4, 5, 6])], + [ + (1720186008.567886, [], [1, 2, 3, 4, 5, 6]), + (1720326697.522923, [], [1, 2, 3, 4, 5, 6]), + (1720482974.426314, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720327526.250804, [], [1, 2, 3, 4, 5, 6]), (1720497558.23414, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979738.339271, [], [1, 2, 3, 4, 5, 6]), + (1720025384.961105, [], [1, 2, 3, 4, 5, 6]), + (1720504618.55439, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720136408.619257, [], [1, 2, 3, 4, 5, 6]), + (1720136458.666712, [], [1, 2, 3, 4, 5, 6]), + (1720136460.776795, [], [1, 2, 3, 4, 5, 6]), + (1720136463.761667, [], [1, 2, 3, 4, 5, 6]), + (1720136465.813823, [], [1, 2, 3, 4, 5, 6]), + (1720153490.167176, [], [1, 2, 3, 4, 5, 6]), + (1720153611.687424, [], [1, 2, 3, 4, 5, 6]), + (1720153613.813758, [], [1, 2, 3, 4, 5, 6]), + (1720221238.819741, [], [1, 2, 3, 4, 5, 6]), + (1720221389.412602, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720234125.717526, [], [1, 2, 3, 4, 5, 6])], + [ + (1720245095.229565, [], [1, 2, 3, 4, 5, 6]), + (1720396821.910578, [], [1, 2, 3, 4, 5, 6]), + (1720502554.801179, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720237286.585886, [], [1, 2, 3, 4, 5, 6]), (1720492842.0014, [], [1, 2, 3, 4, 5, 6])], + [(1720145259.463577, [], [1, 2, 3, 4, 5, 6])], + [(1720173037.951133, [], [1, 2, 3, 4, 5, 6]), (1720398629.834351, [], [1, 2, 3, 4, 5, 6])], + [(1720576515.470242, [], [1, 2, 3, 4, 5, 6])], + [(1720488634.248776, [], [1, 2, 3, 4, 5, 6])], + [ + (1719966672.507604, [], [1, 2, 3, 4, 5, 6]), + (1720379305.230415, [], [1, 2, 3, 4, 5, 6]), + (1720485725.30467, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720056848.984567, [], [1, 2, 3, 4, 5, 6]), + (1720234634.97164, [], [1, 2, 3, 4, 5, 6]), + (1720326372.083307, [], [1, 2, 3, 4, 5, 6]), + (1720487169.130815, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979630.05111, [], [1, 2, 3, 4, 5, 6])], + [(1720067082.599895, [], [1, 2, 3, 4, 5, 6])], + [(1720065455.678956, [], [1, 2, 3, 4, 5, 6])], + [(1720141594.235645, [], [1, 2, 3, 4, 5, 6]), (1720479638.868071, [], [1, 2, 3, 4, 5, 6])], + [(1720172558.775714, [], [1, 2, 3, 4, 5, 6]), (1720589944.987293, [], [1, 2, 3, 4, 5, 6])], + [(1720240665.403432, [], [1, 2, 3, 4, 5, 6]), (1720403456.771406, [], [1, 2, 3, 4, 5, 6])], + [ + (1720151433.593775, [], [1, 2, 3, 4, 5, 6]), + (1720397705.729741, [], [1, 2, 3, 4, 5, 6]), + (1720407937.654196, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063019.413544, [], [1, 2, 3, 4, 5, 6]), + (1720230670.007217, [], [1, 2, 3, 4, 5, 6]), + (1720572529.432945, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720062676.566511, [], [1, 2, 3, 4, 5, 6]), + (1720062768.411832, [], [1, 2, 3, 4, 5, 6]), + (1720062770.476807, [], [1, 2, 3, 4, 5, 6]), + (1720062771.394614, [], [1, 2, 3, 4, 5, 6]), + (1720156065.434007, [], [1, 2, 3, 4, 5, 6]), + (1720156180.339675, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720053274.311851, [], [1, 2, 3, 4, 5, 6]), (1720574916.370766, [], [1, 2, 3, 4, 5, 6])], + [(1720403600.103166, [], [1, 2, 3, 4, 5, 6])], + [(1720070524.509752, [], [1, 2, 3, 4, 5, 6]), (1720330735.128105, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980823.099161, [], [1, 2, 3, 4, 5, 6]), + (1720109783.667678, [], [1, 2, 3, 4, 5, 6]), + (1720488536.75761, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720120539.020908, [], [1, 2, 3, 4, 5, 6]), + (1720235556.263511, [], [1, 2, 3, 4, 5, 6]), + (1720404531.8727, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720461710.602542, [], [1, 2, 3, 4, 5, 6])], + [(1720142147.27027, [], [1, 2, 3, 4, 5, 6]), (1720463509.177443, [], [1, 2, 3, 4, 5, 6])], + [(1720609249.094945, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061653.09558, [], [1, 2, 3, 4, 5, 6]), + (1720331923.364924, [], [1, 2, 3, 4, 5, 6]), + (1720493879.336969, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719981455.944035, [], [1, 2, 3, 4, 5, 6]), + (1719981517.705732, [], [1, 2, 3, 4, 5, 6]), + (1719994503.81365, [], [1, 2, 3, 4, 5, 6]), + (1719994621.6397, [], [1, 2, 3, 4, 5, 6]), + (1719994623.698368, [], [1, 2, 3, 4, 5, 6]), + (1719994627.578717, [], [1, 2, 3, 4, 5, 6]), + (1719994629.663136, [], [1, 2, 3, 4, 5, 6]), + (1719994631.068061, [], [1, 2, 3, 4, 5, 6]), + (1719994633.142381, [], [1, 2, 3, 4, 5, 6]), + (1720027463.767433, [], [1, 2, 3, 4, 5, 6]), + (1720027502.563106, [], [1, 2, 3, 4, 5, 6]), + (1720027504.670674, [], [1, 2, 3, 4, 5, 6]), + (1720057341.723675, [], [1, 2, 3, 4, 5, 6]), + (1720057343.781939, [], [1, 2, 3, 4, 5, 6]), + (1720145087.601179, [], [1, 2, 3, 4, 5, 6]), + (1720145089.680587, [], [1, 2, 3, 4, 5, 6]), + (1720243008.749524, [], [1, 2, 3, 4, 5, 6]), + (1720243068.439551, [], [1, 2, 3, 4, 5, 6]), + (1720318425.097956, [], [1, 2, 3, 4, 5, 6]), + (1720318427.16319, [], [1, 2, 3, 4, 5, 6]), + (1720318432.221956, [], [1, 2, 3, 4, 5, 6]), + (1720318434.329525, [], [1, 2, 3, 4, 5, 6]), + (1720418148.778433, [], [1, 2, 3, 4, 5, 6]), + (1720418150.861104, [], [1, 2, 3, 4, 5, 6]), + (1720488202.399436, [], [1, 2, 3, 4, 5, 6]), + (1720488212.260625, [], [1, 2, 3, 4, 5, 6]), + (1720488214.365566, [], [1, 2, 3, 4, 5, 6]), + (1720572393.815712, [], [1, 2, 3, 4, 5, 6]), + (1720613041.916708, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720057097.342555, [], [1, 2, 3, 4, 5, 6]), + (1720317039.904735, [], [1, 2, 3, 4, 5, 6]), + (1720483178.967836, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720181661.187285, [], [1, 2, 3, 4, 5, 6])], + [(1720199552.174104, [], [1, 2, 3, 4, 5, 6]), (1720568803.062761, [], [1, 2, 3, 4, 5, 6])], + [ + (1720247391.136136, [], [1, 2, 3, 4, 5, 6]), + (1720410696.088339, [], [1, 2, 3, 4, 5, 6]), + (1720599399.171422, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720312357.61565, [], [1, 2, 3, 4, 5, 6])], + [(1720052008.103911, [], [1, 2, 3, 4, 5, 6]), (1720400141.042944, [], [1, 2, 3, 4, 5, 6])], + [(1720210751.331903, [], [1, 2, 3, 4, 5, 6]), (1720503558.839248, [], [1, 2, 3, 4, 5, 6])], + [ + (1720241352.747626, [], [1, 2, 3, 4, 5, 6]), + (1720321677.766712, [], [1, 2, 3, 4, 5, 6]), + (1720409706.122052, [], [1, 2, 3, 4, 5, 6]), + (1720519728.980875, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977467.931514, [], [1, 2, 3, 4, 5, 6]), (1720568695.132969, [], [1, 2, 3, 4, 5, 6])], + [(1720071302.148667, [], [1, 2, 3, 4, 5, 6]), (1720238096.092618, [], [1, 2, 3, 4, 5, 6])], + [(1720057437.769059, [], [1, 2, 3, 4, 5, 6])], + [ + (1720221473.506037, [], [1, 2, 3, 4, 5, 6]), + (1720348129.55283, [], [1, 2, 3, 4, 5, 6]), + (1720482938.000889, [], [1, 2, 3, 4, 5, 6]), + (1720576755.035308, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720235902.362301, [], [1, 2, 3, 4, 5, 6])], + [(1720024782.723245, [], [1, 2, 3, 4, 5, 6])], + [(1720070158.75827, [], [1, 2, 3, 4, 5, 6])], + [ + (1720000651.858702, [], [1, 2, 3, 4, 5, 6]), + (1720244645.395695, [], [1, 2, 3, 4, 5, 6]), + (1720411107.259775, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720191076.938109, [], [1, 2, 3, 4, 5, 6]), + (1720322967.081356, [], [1, 2, 3, 4, 5, 6]), + (1720323158.146239, [], [1, 2, 3, 4, 5, 6]), + (1720323172.234517, [], [1, 2, 3, 4, 5, 6]), + (1720323206.302768, [], [1, 2, 3, 4, 5, 6]), + (1720323313.146535, [], [1, 2, 3, 4, 5, 6]), + (1720323364.511129, [], [1, 2, 3, 4, 5, 6]), + (1720323458.282407, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065260.493236, [], [1, 2, 3, 4, 5, 6]), + (1720065334.608797, [], [1, 2, 3, 4, 5, 6]), + (1720141650.234015, [], [1, 2, 3, 4, 5, 6]), + (1720141749.547675, [], [1, 2, 3, 4, 5, 6]), + (1720141751.641012, [], [1, 2, 3, 4, 5, 6]), + (1720154278.705276, [], [1, 2, 3, 4, 5, 6]), + (1720154280.760532, [], [1, 2, 3, 4, 5, 6]), + (1720229499.077048, [], [1, 2, 3, 4, 5, 6]), + (1720229572.436301, [], [1, 2, 3, 4, 5, 6]), + (1720259010.216367, [], [1, 2, 3, 4, 5, 6]), + (1720259234.335094, [], [1, 2, 3, 4, 5, 6]), + (1720259236.42606, [], [1, 2, 3, 4, 5, 6]), + (1720318686.64822, [], [1, 2, 3, 4, 5, 6]), + (1720318843.45613, [], [1, 2, 3, 4, 5, 6]), + (1720318845.509738, [], [1, 2, 3, 4, 5, 6]), + (1720363113.918907, [], [1, 2, 3, 4, 5, 6]), + (1720363184.856665, [], [1, 2, 3, 4, 5, 6]), + (1720400947.604003, [], [1, 2, 3, 4, 5, 6]), + (1720400949.633637, [], [1, 2, 3, 4, 5, 6]), + (1720498232.720406, [], [1, 2, 3, 4, 5, 6]), + (1720498253.802808, [], [1, 2, 3, 4, 5, 6]), + (1720498255.908508, [], [1, 2, 3, 4, 5, 6]), + (1720586991.26782, [], [1, 2, 3, 4, 5, 6]), + (1720587059.251675, [], [1, 2, 3, 4, 5, 6]), + (1720587061.383312, [], [1, 2, 3, 4, 5, 6]), + (1720638042.876812, [], [1, 2, 3, 4, 5, 6]), + (1720638133.182092, [], [1, 2, 3, 4, 5, 6]), + (1720638135.286491, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978486.488845, [], [1, 2, 3, 4, 5, 6]), (1720406010.994165, [], [1, 2, 3, 4, 5, 6])], + [(1720062931.526777, [], [1, 2, 3, 4, 5, 6])], + [ + (1720142330.725196, [], [1, 2, 3, 4, 5, 6]), + (1720238332.287607, [], [1, 2, 3, 4, 5, 6]), + (1720404745.279674, [], [1, 2, 3, 4, 5, 6]), + (1720577388.350881, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719973721.653682, [], [1, 2, 3, 4, 5, 6]), + (1720045556.714061, [], [1, 2, 3, 4, 5, 6]), + (1720286335.062706, [], [1, 2, 3, 4, 5, 6]), + (1720408637.593505, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719980831.540691, [], [1, 2, 3, 4, 5, 6]), + (1719980890.3872, [], [1, 2, 3, 4, 5, 6]), + (1719980892.464391, [], [1, 2, 3, 4, 5, 6]), + (1720027957.165729, [], [1, 2, 3, 4, 5, 6]), + (1720027959.212697, [], [1, 2, 3, 4, 5, 6]), + (1720055928.682589, [], [1, 2, 3, 4, 5, 6]), + (1720055930.747743, [], [1, 2, 3, 4, 5, 6]), + (1720138782.333308, [], [1, 2, 3, 4, 5, 6]), + (1720138842.547168, [], [1, 2, 3, 4, 5, 6]), + (1720138844.667335, [], [1, 2, 3, 4, 5, 6]), + (1720138846.225705, [], [1, 2, 3, 4, 5, 6]), + (1720153595.409537, [], [1, 2, 3, 4, 5, 6]), + (1720153694.792152, [], [1, 2, 3, 4, 5, 6]), + (1720222583.234486, [], [1, 2, 3, 4, 5, 6]), + (1720222651.732326, [], [1, 2, 3, 4, 5, 6]), + (1720222653.840022, [], [1, 2, 3, 4, 5, 6]), + (1720231129.338916, [], [1, 2, 3, 4, 5, 6]), + (1720231262.508465, [], [1, 2, 3, 4, 5, 6]), + (1720315761.130281, [], [1, 2, 3, 4, 5, 6]), + (1720315844.746953, [], [1, 2, 3, 4, 5, 6]), + (1720315846.831435, [], [1, 2, 3, 4, 5, 6]), + (1720406873.849957, [], [1, 2, 3, 4, 5, 6]), + (1720406879.412626, [], [1, 2, 3, 4, 5, 6]), + (1720485467.197531, [], [1, 2, 3, 4, 5, 6]), + (1720485486.733099, [], [1, 2, 3, 4, 5, 6]), + (1720485488.847143, [], [1, 2, 3, 4, 5, 6]), + (1720485492.354688, [], [1, 2, 3, 4, 5, 6]), + (1720485494.434006, [], [1, 2, 3, 4, 5, 6]), + (1720581292.87898, [], [1, 2, 3, 4, 5, 6]), + (1720581372.990683, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965396.997192, [], [1, 2, 3, 4, 5, 6]), + (1720109840.37035, [], [1, 2, 3, 4, 5, 6]), + (1720224849.338664, [], [1, 2, 3, 4, 5, 6]), + (1720311680.960628, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720593841.069028, [], [1, 2, 3, 4, 5, 6])], + [(1720193711.631887, [], [1, 2, 3, 4, 5, 6]), (1720355386.424798, [], [1, 2, 3, 4, 5, 6])], + [(1720137394.637585, [], [1, 2, 3, 4, 5, 6]), (1720227526.549035, [], [1, 2, 3, 4, 5, 6])], + [(1720601724.604091, [], [1, 2, 3, 4, 5, 6])], + [(1720242114.286726, [], [1, 2, 3, 4, 5, 6]), (1720495287.866943, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984060.976083, [], [1, 2, 3, 4, 5, 6]), + (1720233353.478142, [], [1, 2, 3, 4, 5, 6]), + (1720331822.027661, [], [1, 2, 3, 4, 5, 6]), + (1720499420.953642, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720327908.649598, [], [1, 2, 3, 4, 5, 6]), + (1720327957.004146, [], [1, 2, 3, 4, 5, 6]), + (1720328002.921775, [], [1, 2, 3, 4, 5, 6]), + (1720328054.34555, [], [1, 2, 3, 4, 5, 6]), + (1720394578.210396, [], [1, 2, 3, 4, 5, 6]), + (1720394668.213374, [], [1, 2, 3, 4, 5, 6]), + (1720394670.323029, [], [1, 2, 3, 4, 5, 6]), + (1720410358.68385, [], [1, 2, 3, 4, 5, 6]), + (1720410430.047079, [], [1, 2, 3, 4, 5, 6]), + (1720410432.093006, [], [1, 2, 3, 4, 5, 6]), + (1720485479.982584, [], [1, 2, 3, 4, 5, 6]), + (1720485552.035405, [], [1, 2, 3, 4, 5, 6]), + (1720485554.099771, [], [1, 2, 3, 4, 5, 6]), + (1720576265.461408, [], [1, 2, 3, 4, 5, 6]), + (1720576267.553332, [], [1, 2, 3, 4, 5, 6]), + (1720580196.882833, [], [1, 2, 3, 4, 5, 6]), + (1720580198.938581, [], [1, 2, 3, 4, 5, 6]), + (1720580201.66793, [], [1, 2, 3, 4, 5, 6]), + (1720580203.765767, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720512918.56325, [], [1, 2, 3, 4, 5, 6])], + [(1720587573.354151, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059581.380866, [], [1, 2, 3, 4, 5, 6]), + (1720226059.821101, [], [1, 2, 3, 4, 5, 6]), + (1720569936.860231, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720483812.243251, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047890.599986, [], [1, 2, 3, 4, 5, 6]), + (1720151933.610926, [], [1, 2, 3, 4, 5, 6]), + (1720426395.237753, [], [1, 2, 3, 4, 5, 6]), + (1720589584.479646, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720050613.849179, [], [1, 2, 3, 4, 5, 6]), + (1720183728.480776, [], [1, 2, 3, 4, 5, 6]), + (1720245305.222942, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974717.393306, [], [1, 2, 3, 4, 5, 6]), + (1720238913.058213, [], [1, 2, 3, 4, 5, 6]), + (1720403863.202175, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720066616.245506, [], [1, 2, 3, 4, 5, 6]), (1720329802.077257, [], [1, 2, 3, 4, 5, 6])], + [(1720058023.220462, [], [1, 2, 3, 4, 5, 6]), (1720273250.296181, [], [1, 2, 3, 4, 5, 6])], + [(1720400521.741834, [], [1, 2, 3, 4, 5, 6])], + [(1720590006.799829, [], [1, 2, 3, 4, 5, 6])], + [ + (1719977522.311193, [], [1, 2, 3, 4, 5, 6]), + (1720394307.490994, [], [1, 2, 3, 4, 5, 6]), + (1720541599.758133, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720059720.64277, [], [1, 2, 3, 4, 5, 6]), + (1720225557.277258, [], [1, 2, 3, 4, 5, 6]), + (1720318879.528985, [], [1, 2, 3, 4, 5, 6]), + (1720448939.738279, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720159019.782951, [], [1, 2, 3, 4, 5, 6]), + (1720232688.231366, [], [1, 2, 3, 4, 5, 6]), + (1720312031.934367, [], [1, 2, 3, 4, 5, 6]), + (1720420263.292336, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720051467.327131, [], [1, 2, 3, 4, 5, 6]), + (1720226107.259649, [], [1, 2, 3, 4, 5, 6]), + (1720410027.350582, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966308.30787, [], [1, 2, 3, 4, 5, 6]), + (1720136775.382126, [], [1, 2, 3, 4, 5, 6]), + (1720453167.302523, [], [1, 2, 3, 4, 5, 6]), + (1720578911.142536, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965705.478301, [], [1, 2, 3, 4, 5, 6]), + (1720089492.274268, [], [1, 2, 3, 4, 5, 6]), + (1720458943.365803, [], [1, 2, 3, 4, 5, 6]), + (1720588170.374851, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720137202.259506, [], [1, 2, 3, 4, 5, 6]), (1720309242.730837, [], [1, 2, 3, 4, 5, 6])], + [(1720397209.557207, [], [1, 2, 3, 4, 5, 6]), (1720628958.303298, [], [1, 2, 3, 4, 5, 6])], + [(1719967303.936898, [], [1, 2, 3, 4, 5, 6]), (1720069496.922345, [], [1, 2, 3, 4, 5, 6])], + [ + (1719970996.586184, [], [1, 2, 3, 4, 5, 6]), + (1720168525.715398, [], [1, 2, 3, 4, 5, 6]), + (1720504098.515479, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720601826.09111, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971199.057468, [], [1, 2, 3, 4, 5, 6]), + (1720068258.0151, [], [1, 2, 3, 4, 5, 6]), + (1720137337.044491, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719971421.175757, [], [1, 2, 3, 4, 5, 6]), (1720134959.956933, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976305.13246, [], [1, 2, 3, 4, 5, 6]), + (1720059646.658845, [], [1, 2, 3, 4, 5, 6]), + (1720145964.773181, [], [1, 2, 3, 4, 5, 6]), + (1720233116.664838, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720436748.603567, [], [1, 2, 3, 4, 5, 6])], + [(1719969550.144929, [], [1, 2, 3, 4, 5, 6]), (1720315269.690666, [], [1, 2, 3, 4, 5, 6])], + [ + (1720274096.315691, [], [1, 2, 3, 4, 5, 6]), + (1720274154.981534, [], [1, 2, 3, 4, 5, 6]), + (1720274184.028094, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720268306.026574, [], [1, 2, 3, 4, 5, 6]), (1720323182.163554, [], [1, 2, 3, 4, 5, 6])], + [ + (1720032173.053995, [], [1, 2, 3, 4, 5, 6]), + (1720157155.365383, [], [1, 2, 3, 4, 5, 6]), + (1720314424.94755, [], [1, 2, 3, 4, 5, 6]), + (1720481047.114281, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720010572.095008, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968638.302149, [], [1, 2, 3, 4, 5, 6]), + (1719968762.98274, [], [1, 2, 3, 4, 5, 6]), + (1719968765.072701, [], [1, 2, 3, 4, 5, 6]), + (1720005012.137582, [], [1, 2, 3, 4, 5, 6]), + (1720005160.858454, [], [1, 2, 3, 4, 5, 6]), + (1720005162.913788, [], [1, 2, 3, 4, 5, 6]), + (1720175681.69546, [], [1, 2, 3, 4, 5, 6]), + (1720175774.033356, [], [1, 2, 3, 4, 5, 6]), + (1720175776.151125, [], [1, 2, 3, 4, 5, 6]), + (1720220252.732147, [], [1, 2, 3, 4, 5, 6]), + (1720220252.777516, [], [1, 2, 3, 4, 5, 6]), + (1720220256.747294, [], [1, 2, 3, 4, 5, 6]), + (1720492139.162569, [], [1, 2, 3, 4, 5, 6]), + (1720492141.256483, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970987.569565, [], [1, 2, 3, 4, 5, 6])], + [(1720054508.915859, [], [1, 2, 3, 4, 5, 6]), (1720315224.08896, [], [1, 2, 3, 4, 5, 6])], + [(1720325854.156535, [], [1, 2, 3, 4, 5, 6]), (1720483854.080251, [], [1, 2, 3, 4, 5, 6])], + [(1720406510.418443, [], [1, 2, 3, 4, 5, 6])], + [ + (1720030600.544521, [], [1, 2, 3, 4, 5, 6]), + (1720069524.719771, [], [1, 2, 3, 4, 5, 6]), + (1720484112.369653, [], [1, 2, 3, 4, 5, 6]), + (1720568851.121099, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720304785.385271, [], [1, 2, 3, 4, 5, 6])], + [ + (1720313035.494802, [], [1, 2, 3, 4, 5, 6]), + (1720313090.718424, [], [1, 2, 3, 4, 5, 6]), + (1720313239.783577, [], [1, 2, 3, 4, 5, 6]), + (1720313343.042083, [], [1, 2, 3, 4, 5, 6]), + (1720404935.491204, [], [1, 2, 3, 4, 5, 6]), + (1720404979.552845, [], [1, 2, 3, 4, 5, 6]), + (1720404981.647182, [], [1, 2, 3, 4, 5, 6]), + (1720501716.284759, [], [1, 2, 3, 4, 5, 6]), + (1720501761.154088, [], [1, 2, 3, 4, 5, 6]), + (1720501763.234903, [], [1, 2, 3, 4, 5, 6]), + (1720584650.853158, [], [1, 2, 3, 4, 5, 6]), + (1720584688.899508, [], [1, 2, 3, 4, 5, 6]), + (1720584691.020015, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720229088.205557, [], [1, 2, 3, 4, 5, 6]), (1720487228.612214, [], [1, 2, 3, 4, 5, 6])], + [(1720241088.456982, [], [1, 2, 3, 4, 5, 6])], + [(1720042764.160666, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984325.782697, [], [1, 2, 3, 4, 5, 6]), + (1720047953.098659, [], [1, 2, 3, 4, 5, 6]), + (1720268267.887048, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720054622.362154, [], [1, 2, 3, 4, 5, 6]), + (1720134575.953204, [], [1, 2, 3, 4, 5, 6]), + (1720416355.096939, [], [1, 2, 3, 4, 5, 6]), + (1720500581.691615, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972690.486054, [], [1, 2, 3, 4, 5, 6]), + (1719972877.103609, [], [1, 2, 3, 4, 5, 6]), + (1719972879.22778, [], [1, 2, 3, 4, 5, 6]), + (1719972881.797102, [], [1, 2, 3, 4, 5, 6]), + (1719972883.859612, [], [1, 2, 3, 4, 5, 6]), + (1720052338.317127, [], [1, 2, 3, 4, 5, 6]), + (1720052340.409592, [], [1, 2, 3, 4, 5, 6]), + (1720069426.554888, [], [1, 2, 3, 4, 5, 6]), + (1720069428.615973, [], [1, 2, 3, 4, 5, 6]), + (1720149027.365317, [], [1, 2, 3, 4, 5, 6]), + (1720149089.951754, [], [1, 2, 3, 4, 5, 6]), + (1720149092.012724, [], [1, 2, 3, 4, 5, 6]), + (1720234565.610403, [], [1, 2, 3, 4, 5, 6]), + (1720239125.82035, [], [1, 2, 3, 4, 5, 6]), + (1720239147.87241, [], [1, 2, 3, 4, 5, 6]), + (1720318243.573983, [], [1, 2, 3, 4, 5, 6]), + (1720318245.63705, [], [1, 2, 3, 4, 5, 6]), + (1720418009.877203, [], [1, 2, 3, 4, 5, 6]), + (1720418011.983148, [], [1, 2, 3, 4, 5, 6]), + (1720418014.464823, [], [1, 2, 3, 4, 5, 6]), + (1720485277.913378, [], [1, 2, 3, 4, 5, 6]), + (1720485280.026695, [], [1, 2, 3, 4, 5, 6]), + (1720574328.529507, [], [1, 2, 3, 4, 5, 6]), + (1720574330.633898, [], [1, 2, 3, 4, 5, 6]), + (1720581736.051228, [], [1, 2, 3, 4, 5, 6]), + (1720581766.135021, [], [1, 2, 3, 4, 5, 6]), + (1720581768.228326, [], [1, 2, 3, 4, 5, 6]), + (1720652888.715284, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720328167.133332, [], [1, 2, 3, 4, 5, 6]), (1720394291.998224, [], [1, 2, 3, 4, 5, 6])], + [(1720138278.025347, [], [1, 2, 3, 4, 5, 6])], + [(1720411684.615562, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979551.790599, [], [1, 2, 3, 4, 5, 6]), + (1720062548.79153, [], [1, 2, 3, 4, 5, 6]), + (1720152645.092565, [], [1, 2, 3, 4, 5, 6]), + (1720273648.542968, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720150050.836492, [], [1, 2, 3, 4, 5, 6])], + [(1720235665.517364, [], [1, 2, 3, 4, 5, 6]), (1720504709.666893, [], [1, 2, 3, 4, 5, 6])], + [(1720502409.011067, [], [1, 2, 3, 4, 5, 6]), (1720652305.691241, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983664.396995, [], [1, 2, 3, 4, 5, 6]), + (1720057964.860551, [], [1, 2, 3, 4, 5, 6]), + (1720058069.016671, [], [1, 2, 3, 4, 5, 6]), + (1720119055.986377, [], [1, 2, 3, 4, 5, 6]), + (1720119177.882926, [], [1, 2, 3, 4, 5, 6]), + (1720146988.862958, [], [1, 2, 3, 4, 5, 6]), + (1720146990.940118, [], [1, 2, 3, 4, 5, 6]), + (1720207091.824328, [], [1, 2, 3, 4, 5, 6]), + (1720207147.984162, [], [1, 2, 3, 4, 5, 6]), + (1720207150.045311, [], [1, 2, 3, 4, 5, 6]), + (1720221686.916464, [], [1, 2, 3, 4, 5, 6]), + (1720221731.792885, [], [1, 2, 3, 4, 5, 6]), + (1720221733.892091, [], [1, 2, 3, 4, 5, 6]), + (1720221736.114027, [], [1, 2, 3, 4, 5, 6]), + (1720221738.1731, [], [1, 2, 3, 4, 5, 6]), + (1720221740.137735, [], [1, 2, 3, 4, 5, 6]), + (1720221742.219472, [], [1, 2, 3, 4, 5, 6]), + (1720319188.083254, [], [1, 2, 3, 4, 5, 6]), + (1720319190.195166, [], [1, 2, 3, 4, 5, 6]), + (1720333160.336537, [], [1, 2, 3, 4, 5, 6]), + (1720333162.39224, [], [1, 2, 3, 4, 5, 6]), + (1720350382.882768, [], [1, 2, 3, 4, 5, 6]), + (1720350384.998862, [], [1, 2, 3, 4, 5, 6]), + (1720350385.400746, [], [1, 2, 3, 4, 5, 6]), + (1720350387.504804, [], [1, 2, 3, 4, 5, 6]), + (1720350390.868713, [], [1, 2, 3, 4, 5, 6]), + (1720350392.929211, [], [1, 2, 3, 4, 5, 6]), + (1720482928.905461, [], [1, 2, 3, 4, 5, 6]), + (1720482987.630373, [], [1, 2, 3, 4, 5, 6]), + (1720549510.694147, [], [1, 2, 3, 4, 5, 6]), + (1720549582.87966, [], [1, 2, 3, 4, 5, 6]), + (1720549584.95341, [], [1, 2, 3, 4, 5, 6]), + (1720581476.586746, [], [1, 2, 3, 4, 5, 6]), + (1720581478.656771, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977177.729628, [], [1, 2, 3, 4, 5, 6]), (1720393638.078415, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980378.113974, [], [1, 2, 3, 4, 5, 6]), + (1720443102.049493, [], [1, 2, 3, 4, 5, 6]), + (1720590770.939412, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719979883.308045, [], [1, 2, 3, 4, 5, 6]), + (1720230654.923495, [], [1, 2, 3, 4, 5, 6]), + (1720310908.910099, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719964829.898192, [], [1, 2, 3, 4, 5, 6]), + (1719964832.003811, [], [1, 2, 3, 4, 5, 6]), + (1719985240.876339, [], [1, 2, 3, 4, 5, 6]), + (1719985242.911416, [], [1, 2, 3, 4, 5, 6]), + (1719985243.633507, [], [1, 2, 3, 4, 5, 6]), + (1719985245.665729, [], [1, 2, 3, 4, 5, 6]), + (1720060650.829248, [], [1, 2, 3, 4, 5, 6]), + (1720060759.718692, [], [1, 2, 3, 4, 5, 6]), + (1720060761.830242, [], [1, 2, 3, 4, 5, 6]), + (1720070260.443094, [], [1, 2, 3, 4, 5, 6]), + (1720070280.911994, [], [1, 2, 3, 4, 5, 6]), + (1720070282.979246, [], [1, 2, 3, 4, 5, 6]), + (1720134645.425223, [], [1, 2, 3, 4, 5, 6]), + (1720134793.819981, [], [1, 2, 3, 4, 5, 6]), + (1720134795.932398, [], [1, 2, 3, 4, 5, 6]), + (1720155938.192604, [], [1, 2, 3, 4, 5, 6]), + (1720155940.320279, [], [1, 2, 3, 4, 5, 6]), + (1720155945.041101, [], [1, 2, 3, 4, 5, 6]), + (1720155947.088061, [], [1, 2, 3, 4, 5, 6]), + (1720236895.111761, [], [1, 2, 3, 4, 5, 6]), + (1720236912.473535, [], [1, 2, 3, 4, 5, 6]), + (1720236914.593968, [], [1, 2, 3, 4, 5, 6]), + (1720236917.655587, [], [1, 2, 3, 4, 5, 6]), + (1720318871.824625, [], [1, 2, 3, 4, 5, 6]), + (1720318935.358285, [], [1, 2, 3, 4, 5, 6]), + (1720318937.446561, [], [1, 2, 3, 4, 5, 6]), + (1720318940.05207, [], [1, 2, 3, 4, 5, 6]), + (1720318942.106239, [], [1, 2, 3, 4, 5, 6]), + (1720405217.370251, [], [1, 2, 3, 4, 5, 6]), + (1720405312.528519, [], [1, 2, 3, 4, 5, 6]), + (1720405314.627163, [], [1, 2, 3, 4, 5, 6]), + (1720413100.204244, [], [1, 2, 3, 4, 5, 6]), + (1720413102.291035, [], [1, 2, 3, 4, 5, 6]), + (1720496542.308228, [], [1, 2, 3, 4, 5, 6]), + (1720574260.260325, [], [1, 2, 3, 4, 5, 6]), + (1720574343.117651, [], [1, 2, 3, 4, 5, 6]), + (1720574345.20748, [], [1, 2, 3, 4, 5, 6]), + (1720578705.104516, [], [1, 2, 3, 4, 5, 6]), + (1720578717.159504, [], [1, 2, 3, 4, 5, 6]), + (1720578719.26077, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720071370.538676, [], [1, 2, 3, 4, 5, 6])], + [(1719972220.86175, [], [1, 2, 3, 4, 5, 6]), (1720227223.558904, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047692.206123, [], [1, 2, 3, 4, 5, 6]), + (1720074479.043983, [], [1, 2, 3, 4, 5, 6]), + (1720221755.131247, [], [1, 2, 3, 4, 5, 6]), + (1720343377.429715, [], [1, 2, 3, 4, 5, 6]), + (1720581159.65796, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720054906.379171, [], [1, 2, 3, 4, 5, 6]), + (1720326827.193456, [], [1, 2, 3, 4, 5, 6]), + (1720395837.565662, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972411.855532, [], [1, 2, 3, 4, 5, 6]), + (1719972538.863121, [], [1, 2, 3, 4, 5, 6]), + (1719972540.935712, [], [1, 2, 3, 4, 5, 6]), + (1720063717.900878, [], [1, 2, 3, 4, 5, 6]), + (1720063719.954111, [], [1, 2, 3, 4, 5, 6]), + (1720070114.807467, [], [1, 2, 3, 4, 5, 6]), + (1720070235.024434, [], [1, 2, 3, 4, 5, 6]), + (1720070237.14674, [], [1, 2, 3, 4, 5, 6]), + (1720097819.236115, [], [1, 2, 3, 4, 5, 6]), + (1720097978.260021, [], [1, 2, 3, 4, 5, 6]), + (1720097980.382821, [], [1, 2, 3, 4, 5, 6]), + (1720151026.716063, [], [1, 2, 3, 4, 5, 6]), + (1720151173.670938, [], [1, 2, 3, 4, 5, 6]), + (1720151175.717239, [], [1, 2, 3, 4, 5, 6]), + (1720166439.941955, [], [1, 2, 3, 4, 5, 6]), + (1720166583.693905, [], [1, 2, 3, 4, 5, 6]), + (1720166585.791065, [], [1, 2, 3, 4, 5, 6]), + (1720181553.630642, [], [1, 2, 3, 4, 5, 6]), + (1720181555.746202, [], [1, 2, 3, 4, 5, 6]), + (1720242210.300006, [], [1, 2, 3, 4, 5, 6]), + (1720242331.451228, [], [1, 2, 3, 4, 5, 6]), + (1720316730.127117, [], [1, 2, 3, 4, 5, 6]), + (1720316751.481651, [], [1, 2, 3, 4, 5, 6]), + (1720350332.517593, [], [1, 2, 3, 4, 5, 6]), + (1720350427.724851, [], [1, 2, 3, 4, 5, 6]), + (1720350429.836812, [], [1, 2, 3, 4, 5, 6]), + (1720396153.382808, [], [1, 2, 3, 4, 5, 6]), + (1720396199.106453, [], [1, 2, 3, 4, 5, 6]), + (1720396201.15929, [], [1, 2, 3, 4, 5, 6]), + (1720424092.525755, [], [1, 2, 3, 4, 5, 6]), + (1720424190.959176, [], [1, 2, 3, 4, 5, 6]), + (1720424193.037739, [], [1, 2, 3, 4, 5, 6]), + (1720492456.877253, [], [1, 2, 3, 4, 5, 6]), + (1720492529.103048, [], [1, 2, 3, 4, 5, 6]), + (1720492531.198928, [], [1, 2, 3, 4, 5, 6]), + (1720583806.008143, [], [1, 2, 3, 4, 5, 6]), + (1720583868.43082, [], [1, 2, 3, 4, 5, 6]), + (1720648763.855471, [], [1, 2, 3, 4, 5, 6]), + (1720648878.799852, [], [1, 2, 3, 4, 5, 6]), + (1720648880.882297, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071793.774403, [], [1, 2, 3, 4, 5, 6]), + (1720309002.505766, [], [1, 2, 3, 4, 5, 6]), + (1720367384.379119, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978345.677095, [], [1, 2, 3, 4, 5, 6]), (1720134660.416426, [], [1, 2, 3, 4, 5, 6])], + [ + (1720056400.339178, [], [1, 2, 3, 4, 5, 6]), + (1720137451.906538, [], [1, 2, 3, 4, 5, 6]), + (1720581731.115191, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719974999.631834, [], [1, 2, 3, 4, 5, 6]), (1720503857.499785, [], [1, 2, 3, 4, 5, 6])], + [(1720325249.830373, [], [1, 2, 3, 4, 5, 6])], + [ + (1719970175.486046, [], [1, 2, 3, 4, 5, 6]), + (1720061532.244847, [], [1, 2, 3, 4, 5, 6]), + (1720387059.054565, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157797.242967, [], [1, 2, 3, 4, 5, 6])], + [(1720490173.84352, [], [1, 2, 3, 4, 5, 6])], + [(1720568387.145132, [], [1, 2, 3, 4, 5, 6])], + [(1720027447.264569, [], [1, 2, 3, 4, 5, 6])], + [(1719979106.899872, [], [1, 2, 3, 4, 5, 6]), (1720417473.653713, [], [1, 2, 3, 4, 5, 6])], + [(1720153359.982848, [], [1, 2, 3, 4, 5, 6]), (1720468837.459019, [], [1, 2, 3, 4, 5, 6])], + [(1720047669.218866, [], [1, 2, 3, 4, 5, 6])], + [(1720230050.113895, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975411.228945, [], [1, 2, 3, 4, 5, 6]), + (1720235199.496284, [], [1, 2, 3, 4, 5, 6]), + (1720403154.17646, [], [1, 2, 3, 4, 5, 6]), + (1720626578.282517, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978136.275137, [], [1, 2, 3, 4, 5, 6]), (1720331670.572264, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975669.597909, [], [1, 2, 3, 4, 5, 6]), + (1719975763.25366, [], [1, 2, 3, 4, 5, 6]), + (1720055306.937976, [], [1, 2, 3, 4, 5, 6]), + (1720055449.351479, [], [1, 2, 3, 4, 5, 6]), + (1720067433.572041, [], [1, 2, 3, 4, 5, 6]), + (1720067452.84519, [], [1, 2, 3, 4, 5, 6]), + (1720067454.935816, [], [1, 2, 3, 4, 5, 6]), + (1720233716.974937, [], [1, 2, 3, 4, 5, 6]), + (1720233884.056907, [], [1, 2, 3, 4, 5, 6]), + (1720233886.107033, [], [1, 2, 3, 4, 5, 6]), + (1720238869.144339, [], [1, 2, 3, 4, 5, 6]), + (1720239054.729577, [], [1, 2, 3, 4, 5, 6]), + (1720239056.811577, [], [1, 2, 3, 4, 5, 6]), + (1720248048.594017, [], [1, 2, 3, 4, 5, 6]), + (1720248147.506317, [], [1, 2, 3, 4, 5, 6]), + (1720248149.540209, [], [1, 2, 3, 4, 5, 6]), + (1720323761.342714, [], [1, 2, 3, 4, 5, 6]), + (1720323763.381547, [], [1, 2, 3, 4, 5, 6]), + (1720400825.240853, [], [1, 2, 3, 4, 5, 6]), + (1720400844.815642, [], [1, 2, 3, 4, 5, 6]), + (1720410954.1329, [], [1, 2, 3, 4, 5, 6]), + (1720410956.230411, [], [1, 2, 3, 4, 5, 6]), + (1720410956.661655, [], [1, 2, 3, 4, 5, 6]), + (1720410958.800282, [], [1, 2, 3, 4, 5, 6]), + (1720414005.128157, [], [1, 2, 3, 4, 5, 6]), + (1720414035.742095, [], [1, 2, 3, 4, 5, 6]), + (1720414037.861356, [], [1, 2, 3, 4, 5, 6]), + (1720414039.522054, [], [1, 2, 3, 4, 5, 6]), + (1720414041.622559, [], [1, 2, 3, 4, 5, 6]), + (1720449836.553695, [], [1, 2, 3, 4, 5, 6]), + (1720449909.88067, [], [1, 2, 3, 4, 5, 6]), + (1720449912.006572, [], [1, 2, 3, 4, 5, 6]), + (1720504478.640048, [], [1, 2, 3, 4, 5, 6]), + (1720504584.183246, [], [1, 2, 3, 4, 5, 6]), + (1720504586.273448, [], [1, 2, 3, 4, 5, 6]), + (1720589586.941948, [], [1, 2, 3, 4, 5, 6]), + (1720589732.653657, [], [1, 2, 3, 4, 5, 6]), + (1720589734.757411, [], [1, 2, 3, 4, 5, 6]), + (1720589735.718174, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719987925.192586, [], [1, 2, 3, 4, 5, 6]), (1720319498.157106, [], [1, 2, 3, 4, 5, 6])], + [(1720140316.935341, [], [1, 2, 3, 4, 5, 6]), (1720581286.138288, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984942.453601, [], [1, 2, 3, 4, 5, 6]), + (1720334036.972544, [], [1, 2, 3, 4, 5, 6]), + (1720568302.136228, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720231990.896895, [], [1, 2, 3, 4, 5, 6]), (1720320392.727402, [], [1, 2, 3, 4, 5, 6])], + [(1720151072.246138, [], [1, 2, 3, 4, 5, 6]), (1720309428.675922, [], [1, 2, 3, 4, 5, 6])], + [(1720652752.302257, [], [1, 2, 3, 4, 5, 6])], + [ + (1719977332.758786, [], [1, 2, 3, 4, 5, 6]), + (1720135118.942837, [], [1, 2, 3, 4, 5, 6]), + (1720498977.766189, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720054300.10225, [], [1, 2, 3, 4, 5, 6]), (1720419250.119038, [], [1, 2, 3, 4, 5, 6])], + [ + (1720170242.586928, [], [1, 2, 3, 4, 5, 6]), + (1720322954.401713, [], [1, 2, 3, 4, 5, 6]), + (1720500416.057333, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719976447.157218, [], [1, 2, 3, 4, 5, 6])], + [(1720134454.623643, [], [1, 2, 3, 4, 5, 6]), (1720482790.529945, [], [1, 2, 3, 4, 5, 6])], + [(1720575291.374898, [], [1, 2, 3, 4, 5, 6])], + [(1720575147.912954, [], [1, 2, 3, 4, 5, 6])], + [ + (1719997197.65312, [], [1, 2, 3, 4, 5, 6]), + (1720137704.47896, [], [1, 2, 3, 4, 5, 6]), + (1720226085.527498, [], [1, 2, 3, 4, 5, 6]), + (1720306837.86921, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720314781.298338, [], [1, 2, 3, 4, 5, 6]), (1720443503.319112, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976515.23989, [], [1, 2, 3, 4, 5, 6]), + (1720225806.720086, [], [1, 2, 3, 4, 5, 6]), + (1720388901.256231, [], [1, 2, 3, 4, 5, 6]), + (1720490185.842396, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720568142.650151, [], [1, 2, 3, 4, 5, 6])], + [(1720587602.828532, [], [1, 2, 3, 4, 5, 6])], + [(1720228569.783763, [], [1, 2, 3, 4, 5, 6]), (1720577136.698764, [], [1, 2, 3, 4, 5, 6])], + [ + (1720058398.793045, [], [1, 2, 3, 4, 5, 6]), + (1720317616.711315, [], [1, 2, 3, 4, 5, 6]), + (1720498994.241943, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719988512.009335, [], [1, 2, 3, 4, 5, 6]), + (1720411879.880695, [], [1, 2, 3, 4, 5, 6]), + (1720575546.218164, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720245099.046699, [], [1, 2, 3, 4, 5, 6]), (1720652539.847041, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965518.303227, [], [1, 2, 3, 4, 5, 6]), + (1720241249.736668, [], [1, 2, 3, 4, 5, 6]), + (1720410560.906617, [], [1, 2, 3, 4, 5, 6]), + (1720566388.427971, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720033619.669265, [], [1, 2, 3, 4, 5, 6]), + (1720309514.690673, [], [1, 2, 3, 4, 5, 6]), + (1720584737.484501, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719981137.891986, [], [1, 2, 3, 4, 5, 6]), + (1719981255.525287, [], [1, 2, 3, 4, 5, 6]), + (1719981257.57542, [], [1, 2, 3, 4, 5, 6]), + (1720156254.934266, [], [1, 2, 3, 4, 5, 6]), + (1720156432.088183, [], [1, 2, 3, 4, 5, 6]), + (1720221245.352908, [], [1, 2, 3, 4, 5, 6]), + (1720221247.415618, [], [1, 2, 3, 4, 5, 6]), + (1720306695.430622, [], [1, 2, 3, 4, 5, 6]), + (1720306697.509606, [], [1, 2, 3, 4, 5, 6]), + (1720399726.625066, [], [1, 2, 3, 4, 5, 6]), + (1720399728.675873, [], [1, 2, 3, 4, 5, 6]), + (1720486842.405361, [], [1, 2, 3, 4, 5, 6]), + (1720486974.649877, [], [1, 2, 3, 4, 5, 6]), + (1720494605.919949, [], [1, 2, 3, 4, 5, 6]), + (1720494724.480053, [], [1, 2, 3, 4, 5, 6]), + (1720494726.541559, [], [1, 2, 3, 4, 5, 6]), + (1720572824.284783, [], [1, 2, 3, 4, 5, 6]), + (1720572826.355789, [], [1, 2, 3, 4, 5, 6]), + (1720652512.753893, [], [1, 2, 3, 4, 5, 6]), + (1720652514.833743, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720044224.653908, [], [1, 2, 3, 4, 5, 6]), (1720583176.852571, [], [1, 2, 3, 4, 5, 6])], + [(1720567262.122889, [], [1, 2, 3, 4, 5, 6])], + [(1720518049.925836, [], [1, 2, 3, 4, 5, 6])], + [(1720148280.678113, [], [1, 2, 3, 4, 5, 6]), (1720391739.484219, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983654.268169, [], [1, 2, 3, 4, 5, 6]), + (1719983761.844014, [], [1, 2, 3, 4, 5, 6]), + (1719983763.902973, [], [1, 2, 3, 4, 5, 6]), + (1720014262.846562, [], [1, 2, 3, 4, 5, 6]), + (1720014264.966534, [], [1, 2, 3, 4, 5, 6]), + (1720014268.064236, [], [1, 2, 3, 4, 5, 6]), + (1720014270.176366, [], [1, 2, 3, 4, 5, 6]), + (1720062164.847608, [], [1, 2, 3, 4, 5, 6]), + (1720062166.93557, [], [1, 2, 3, 4, 5, 6]), + (1720070760.286042, [], [1, 2, 3, 4, 5, 6]), + (1720070793.879274, [], [1, 2, 3, 4, 5, 6]), + (1720070795.974998, [], [1, 2, 3, 4, 5, 6]), + (1720136175.682667, [], [1, 2, 3, 4, 5, 6]), + (1720136177.782735, [], [1, 2, 3, 4, 5, 6]), + (1720150756.421019, [], [1, 2, 3, 4, 5, 6]), + (1720150758.537073, [], [1, 2, 3, 4, 5, 6]), + (1720226712.358545, [], [1, 2, 3, 4, 5, 6]), + (1720320316.371588, [], [1, 2, 3, 4, 5, 6]), + (1720396676.623722, [], [1, 2, 3, 4, 5, 6]), + (1720396759.330429, [], [1, 2, 3, 4, 5, 6]), + (1720482810.511366, [], [1, 2, 3, 4, 5, 6]), + (1720482891.609285, [], [1, 2, 3, 4, 5, 6]), + (1720482893.739553, [], [1, 2, 3, 4, 5, 6]), + (1720502988.652815, [], [1, 2, 3, 4, 5, 6]), + (1720503034.447086, [], [1, 2, 3, 4, 5, 6]), + (1720503036.52898, [], [1, 2, 3, 4, 5, 6]), + (1720503036.606516, [], [1, 2, 3, 4, 5, 6]), + (1720503038.712119, [], [1, 2, 3, 4, 5, 6]), + (1720566567.148583, [], [1, 2, 3, 4, 5, 6]), + (1720566710.618717, [], [1, 2, 3, 4, 5, 6]), + (1720624425.022175, [], [1, 2, 3, 4, 5, 6]), + (1720624567.571474, [], [1, 2, 3, 4, 5, 6]), + (1720624569.66289, [], [1, 2, 3, 4, 5, 6]), + (1720652508.525789, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978793.297818, [], [1, 2, 3, 4, 5, 6]), (1720312690.624643, [], [1, 2, 3, 4, 5, 6])], + [(1720408103.17786, [], [1, 2, 3, 4, 5, 6]), (1720496665.901316, [], [1, 2, 3, 4, 5, 6])], + [(1720397084.267673, [], [1, 2, 3, 4, 5, 6]), (1720499411.209847, [], [1, 2, 3, 4, 5, 6])], + [ + (1720111853.777887, [], [1, 2, 3, 4, 5, 6]), + (1720111923.412934, [], [1, 2, 3, 4, 5, 6]), + (1720139482.167685, [], [1, 2, 3, 4, 5, 6]), + (1720139533.842338, [], [1, 2, 3, 4, 5, 6]), + (1720139535.907287, [], [1, 2, 3, 4, 5, 6]), + (1720139540.267313, [], [1, 2, 3, 4, 5, 6]), + (1720139542.34773, [], [1, 2, 3, 4, 5, 6]), + (1720139547.051966, [], [1, 2, 3, 4, 5, 6]), + (1720139549.136732, [], [1, 2, 3, 4, 5, 6]), + (1720464344.794745, [], [1, 2, 3, 4, 5, 6]), + (1720464401.900918, [], [1, 2, 3, 4, 5, 6]), + (1720464404.029255, [], [1, 2, 3, 4, 5, 6]), + (1720498850.875209, [], [1, 2, 3, 4, 5, 6]), + (1720499103.608103, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719973523.458065, [], [1, 2, 3, 4, 5, 6]), (1720233566.787523, [], [1, 2, 3, 4, 5, 6])], + [(1720407928.090004, [], [1, 2, 3, 4, 5, 6])], + [(1720032729.148346, [], [1, 2, 3, 4, 5, 6]), (1720062532.225999, [], [1, 2, 3, 4, 5, 6])], + [(1720034095.862663, [], [1, 2, 3, 4, 5, 6]), (1720491285.681862, [], [1, 2, 3, 4, 5, 6])], + [(1720096472.997597, [], [1, 2, 3, 4, 5, 6]), (1720568056.766425, [], [1, 2, 3, 4, 5, 6])], + [(1720138718.911672, [], [1, 2, 3, 4, 5, 6])], + [ + (1720336998.597537, [], [1, 2, 3, 4, 5, 6]), + (1720489473.142035, [], [1, 2, 3, 4, 5, 6]), + (1720574315.596422, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720577328.151149, [], [1, 2, 3, 4, 5, 6])], + [(1720072713.69671, [], [1, 2, 3, 4, 5, 6]), (1720231002.690818, [], [1, 2, 3, 4, 5, 6])], + [(1719966317.997493, [], [1, 2, 3, 4, 5, 6]), (1720238108.647106, [], [1, 2, 3, 4, 5, 6])], + [(1720146847.656681, [], [1, 2, 3, 4, 5, 6])], + [(1720239981.42926, [], [1, 2, 3, 4, 5, 6])], + [(1720081339.444776, [], [1, 2, 3, 4, 5, 6]), (1720234051.371763, [], [1, 2, 3, 4, 5, 6])], + [(1720236937.844197, [], [1, 2, 3, 4, 5, 6]), (1720501314.981075, [], [1, 2, 3, 4, 5, 6])], + [(1720495611.198831, [], [1, 2, 3, 4, 5, 6])], + [(1720071452.84595, [], [1, 2, 3, 4, 5, 6])], + [(1720320971.754361, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982790.224924, [], [1, 2, 3, 4, 5, 6]), + (1719982861.078823, [], [1, 2, 3, 4, 5, 6]), + (1719982863.122702, [], [1, 2, 3, 4, 5, 6]), + (1720052453.241504, [], [1, 2, 3, 4, 5, 6]), + (1720052505.408448, [], [1, 2, 3, 4, 5, 6]), + (1720052507.485592, [], [1, 2, 3, 4, 5, 6]), + (1720078456.868981, [], [1, 2, 3, 4, 5, 6]), + (1720137047.584706, [], [1, 2, 3, 4, 5, 6]), + (1720137124.096958, [], [1, 2, 3, 4, 5, 6]), + (1720137126.192241, [], [1, 2, 3, 4, 5, 6]), + (1720155528.420602, [], [1, 2, 3, 4, 5, 6]), + (1720155596.835697, [], [1, 2, 3, 4, 5, 6]), + (1720155598.919376, [], [1, 2, 3, 4, 5, 6]), + (1720204090.330488, [], [1, 2, 3, 4, 5, 6]), + (1720204222.690243, [], [1, 2, 3, 4, 5, 6]), + (1720204224.805824, [], [1, 2, 3, 4, 5, 6]), + (1720232760.467367, [], [1, 2, 3, 4, 5, 6]), + (1720232859.977733, [], [1, 2, 3, 4, 5, 6]), + (1720242903.930897, [], [1, 2, 3, 4, 5, 6]), + (1720242906.021355, [], [1, 2, 3, 4, 5, 6]), + (1720309697.411345, [], [1, 2, 3, 4, 5, 6]), + (1720309699.483954, [], [1, 2, 3, 4, 5, 6]), + (1720406346.354509, [], [1, 2, 3, 4, 5, 6]), + (1720406523.466919, [], [1, 2, 3, 4, 5, 6]), + (1720406525.535072, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720065979.154591, [], [1, 2, 3, 4, 5, 6]), (1720325699.423285, [], [1, 2, 3, 4, 5, 6])], + [(1720244750.093352, [], [1, 2, 3, 4, 5, 6]), (1720394343.192185, [], [1, 2, 3, 4, 5, 6])], + [(1720193298.590097, [], [1, 2, 3, 4, 5, 6]), (1720315677.193089, [], [1, 2, 3, 4, 5, 6])], + [(1720501281.07252, [], [1, 2, 3, 4, 5, 6])], + [(1720055721.622214, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137839.895581, [], [1, 2, 3, 4, 5, 6]), + (1720231251.548774, [], [1, 2, 3, 4, 5, 6]), + (1720350224.693877, [], [1, 2, 3, 4, 5, 6]), + (1720519304.741337, [], [1, 2, 3, 4, 5, 6]), + (1720586487.784295, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719969181.053709, [], [1, 2, 3, 4, 5, 6]), (1720434586.266895, [], [1, 2, 3, 4, 5, 6])], + [(1720070912.148493, [], [1, 2, 3, 4, 5, 6])], + [(1720244703.673132, [], [1, 2, 3, 4, 5, 6]), (1720494833.034907, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053732.993216, [], [1, 2, 3, 4, 5, 6]), + (1720053735.086697, [], [1, 2, 3, 4, 5, 6]), + (1720081490.114819, [], [1, 2, 3, 4, 5, 6]), + (1720081492.188923, [], [1, 2, 3, 4, 5, 6]), + (1720108329.744396, [], [1, 2, 3, 4, 5, 6]), + (1720108420.858541, [], [1, 2, 3, 4, 5, 6]), + (1720108422.93886, [], [1, 2, 3, 4, 5, 6]), + (1720142971.786605, [], [1, 2, 3, 4, 5, 6]), + (1720143021.896153, [], [1, 2, 3, 4, 5, 6]), + (1720149975.921352, [], [1, 2, 3, 4, 5, 6]), + (1720150041.125351, [], [1, 2, 3, 4, 5, 6]), + (1720150043.15518, [], [1, 2, 3, 4, 5, 6]), + (1720200733.408027, [], [1, 2, 3, 4, 5, 6]), + (1720200744.366236, [], [1, 2, 3, 4, 5, 6]), + (1720200746.48024, [], [1, 2, 3, 4, 5, 6]), + (1720226248.428928, [], [1, 2, 3, 4, 5, 6]), + (1720226387.261389, [], [1, 2, 3, 4, 5, 6]), + (1720325189.273212, [], [1, 2, 3, 4, 5, 6]), + (1720367266.448359, [], [1, 2, 3, 4, 5, 6]), + (1720367268.528501, [], [1, 2, 3, 4, 5, 6]), + (1720397514.119584, [], [1, 2, 3, 4, 5, 6]), + (1720397583.541623, [], [1, 2, 3, 4, 5, 6]), + (1720397585.62972, [], [1, 2, 3, 4, 5, 6]), + (1720407649.068004, [], [1, 2, 3, 4, 5, 6]), + (1720407859.450723, [], [1, 2, 3, 4, 5, 6]), + (1720407861.5267, [], [1, 2, 3, 4, 5, 6]), + (1720418226.184583, [], [1, 2, 3, 4, 5, 6]), + (1720418312.907521, [], [1, 2, 3, 4, 5, 6]), + (1720418312.959891, [], [1, 2, 3, 4, 5, 6]), + (1720418314.508588, [], [1, 2, 3, 4, 5, 6]), + (1720429033.410454, [], [1, 2, 3, 4, 5, 6]), + (1720429217.5183, [], [1, 2, 3, 4, 5, 6]), + (1720429219.58254, [], [1, 2, 3, 4, 5, 6]), + (1720476196.299215, [], [1, 2, 3, 4, 5, 6]), + (1720476290.414317, [], [1, 2, 3, 4, 5, 6]), + (1720476292.497993, [], [1, 2, 3, 4, 5, 6]), + (1720496668.635514, [], [1, 2, 3, 4, 5, 6]), + (1720496670.762669, [], [1, 2, 3, 4, 5, 6]), + (1720566807.578929, [], [1, 2, 3, 4, 5, 6]), + (1720566881.524889, [], [1, 2, 3, 4, 5, 6]), + (1720566883.613068, [], [1, 2, 3, 4, 5, 6]), + (1720575742.398153, [], [1, 2, 3, 4, 5, 6]), + (1720575760.407369, [], [1, 2, 3, 4, 5, 6]), + (1720575762.530879, [], [1, 2, 3, 4, 5, 6]), + (1720623060.799492, [], [1, 2, 3, 4, 5, 6]), + (1720623163.775703, [], [1, 2, 3, 4, 5, 6]), + (1720623165.819144, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720490829.088908, [], [1, 2, 3, 4, 5, 6])], + [ + (1720222040.449568, [], [1, 2, 3, 4, 5, 6]), + (1720328183.580613, [], [1, 2, 3, 4, 5, 6]), + (1720581997.108309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720631726.024509, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969186.42388, [], [1, 2, 3, 4, 5, 6]), + (1720236467.453142, [], [1, 2, 3, 4, 5, 6]), + (1720460294.599805, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720328624.599959, [], [1, 2, 3, 4, 5, 6]), + (1720411036.02508, [], [1, 2, 3, 4, 5, 6]), + (1720470233.314202, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966062.758672, [], [1, 2, 3, 4, 5, 6]), + (1720057471.705526, [], [1, 2, 3, 4, 5, 6]), + (1720325034.717518, [], [1, 2, 3, 4, 5, 6]), + (1720407309.902625, [], [1, 2, 3, 4, 5, 6]), + (1720573477.911506, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720063887.70449, [], [1, 2, 3, 4, 5, 6])], + [(1720343326.152899, [], [1, 2, 3, 4, 5, 6])], + [(1720411362.644921, [], [1, 2, 3, 4, 5, 6])], + [(1720395606.751317, [], [1, 2, 3, 4, 5, 6])], + [(1720155980.858558, [], [1, 2, 3, 4, 5, 6])], + [(1720450339.669296, [], [1, 2, 3, 4, 5, 6])], + [(1719981567.460091, [], [1, 2, 3, 4, 5, 6]), (1720587320.169523, [], [1, 2, 3, 4, 5, 6])], + [(1720245122.915738, [], [1, 2, 3, 4, 5, 6])], + [(1719970229.063219, [], [1, 2, 3, 4, 5, 6]), (1720488361.805483, [], [1, 2, 3, 4, 5, 6])], + [(1720320009.047059, [], [1, 2, 3, 4, 5, 6])], + [(1720139484.708505, [], [1, 2, 3, 4, 5, 6]), (1720396780.73649, [], [1, 2, 3, 4, 5, 6])], + [(1720238094.386701, [], [1, 2, 3, 4, 5, 6])], + [(1720627574.598265, [], [1, 2, 3, 4, 5, 6])], + [(1720136834.089355, [], [1, 2, 3, 4, 5, 6]), (1720396824.609765, [], [1, 2, 3, 4, 5, 6])], + [(1720225652.369657, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982133.012616, [], [1, 2, 3, 4, 5, 6]), + (1719982177.631804, [], [1, 2, 3, 4, 5, 6]), + (1719982179.720602, [], [1, 2, 3, 4, 5, 6]), + (1720006591.274361, [], [1, 2, 3, 4, 5, 6]), + (1720006644.930183, [], [1, 2, 3, 4, 5, 6]), + (1720006647.03435, [], [1, 2, 3, 4, 5, 6]), + (1720460548.964008, [], [1, 2, 3, 4, 5, 6]), + (1720460614.237345, [], [1, 2, 3, 4, 5, 6]), + (1720460616.332418, [], [1, 2, 3, 4, 5, 6]), + (1720585282.645498, [], [1, 2, 3, 4, 5, 6]), + (1720585293.462072, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720370572.550118, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973160.879923, [], [1, 2, 3, 4, 5, 6]), + (1720329101.982409, [], [1, 2, 3, 4, 5, 6]), + (1720581501.430356, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063998.039445, [], [1, 2, 3, 4, 5, 6]), + (1720232764.384684, [], [1, 2, 3, 4, 5, 6]), + (1720502473.633051, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720153647.635598, [], [1, 2, 3, 4, 5, 6]), + (1720225923.85076, [], [1, 2, 3, 4, 5, 6]), + (1720413430.570698, [], [1, 2, 3, 4, 5, 6]), + (1720584131.042756, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719966759.043742, [], [1, 2, 3, 4, 5, 6])], + [(1720405936.570297, [], [1, 2, 3, 4, 5, 6])], + [(1720144919.519677, [], [1, 2, 3, 4, 5, 6]), (1720402676.1685, [], [1, 2, 3, 4, 5, 6])], + [(1720402994.034134, [], [1, 2, 3, 4, 5, 6]), (1720497341.728864, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973785.927392, [], [1, 2, 3, 4, 5, 6]), + (1720142506.754009, [], [1, 2, 3, 4, 5, 6]), + (1720312482.395361, [], [1, 2, 3, 4, 5, 6]), + (1720578049.42885, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143203.796648, [], [1, 2, 3, 4, 5, 6]), (1720504600.034248, [], [1, 2, 3, 4, 5, 6])], + [(1720138317.024564, [], [1, 2, 3, 4, 5, 6]), (1720307922.860078, [], [1, 2, 3, 4, 5, 6])], + [(1720576710.045341, [], [1, 2, 3, 4, 5, 6])], + [ + (1720237948.24219, [], [1, 2, 3, 4, 5, 6]), + (1720322691.233406, [], [1, 2, 3, 4, 5, 6]), + (1720412663.957815, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720342019.617667, [], [1, 2, 3, 4, 5, 6]), + (1720342090.227667, [], [1, 2, 3, 4, 5, 6]), + (1720342263.731169, [], [1, 2, 3, 4, 5, 6]), + (1720342307.569989, [], [1, 2, 3, 4, 5, 6]), + (1720342413.538738, [], [1, 2, 3, 4, 5, 6]), + (1720342570.868506, [], [1, 2, 3, 4, 5, 6]), + (1720342664.423143, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720053725.982337, [], [1, 2, 3, 4, 5, 6]), + (1720137089.95596, [], [1, 2, 3, 4, 5, 6]), + (1720250340.159455, [], [1, 2, 3, 4, 5, 6]), + (1720408080.82431, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720502215.54816, [], [1, 2, 3, 4, 5, 6])], + [(1720051018.757074, [], [1, 2, 3, 4, 5, 6]), (1720221304.68857, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055639.220711, [], [1, 2, 3, 4, 5, 6]), + (1720242136.136068, [], [1, 2, 3, 4, 5, 6]), + (1720501308.452889, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720210019.832882, [], [1, 2, 3, 4, 5, 6])], + [(1720222496.41532, [], [1, 2, 3, 4, 5, 6])], + [(1720221892.596089, [], [1, 2, 3, 4, 5, 6]), (1720488555.303827, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055240.779901, [], [1, 2, 3, 4, 5, 6]), + (1720485059.84637, [], [1, 2, 3, 4, 5, 6]), + (1720520102.630634, [], [1, 2, 3, 4, 5, 6]), + (1720591031.4354, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720570592.888394, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059956.606064, [], [1, 2, 3, 4, 5, 6]), + (1720232781.82764, [], [1, 2, 3, 4, 5, 6]), + (1720489307.963369, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720466563.789269, [], [1, 2, 3, 4, 5, 6])], + [(1720120332.505828, [], [1, 2, 3, 4, 5, 6]), (1720501386.247192, [], [1, 2, 3, 4, 5, 6])], + [ + (1720045443.968104, [], [1, 2, 3, 4, 5, 6]), + (1720337612.000658, [], [1, 2, 3, 4, 5, 6]), + (1720484793.823359, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720240516.409323, [], [1, 2, 3, 4, 5, 6]), (1720508486.303913, [], [1, 2, 3, 4, 5, 6])], + [ + (1720056682.445295, [], [1, 2, 3, 4, 5, 6]), + (1720239570.480365, [], [1, 2, 3, 4, 5, 6]), + (1720399243.691516, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065346.577694, [], [1, 2, 3, 4, 5, 6]), + (1720111179.563476, [], [1, 2, 3, 4, 5, 6]), + (1720152182.18393, [], [1, 2, 3, 4, 5, 6]), + (1720456368.150945, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720037842.027886, [], [1, 2, 3, 4, 5, 6])], + [(1720051512.155726, [], [1, 2, 3, 4, 5, 6]), (1720316085.436368, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153922.872643, [], [1, 2, 3, 4, 5, 6]), + (1720316484.292604, [], [1, 2, 3, 4, 5, 6]), + (1720481626.562697, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720304528.044157, [], [1, 2, 3, 4, 5, 6]), (1720587171.914424, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969690.052003, [], [1, 2, 3, 4, 5, 6]), + (1720098093.259497, [], [1, 2, 3, 4, 5, 6]), + (1720589467.401983, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720182994.851728, [], [1, 2, 3, 4, 5, 6]), (1720490206.204252, [], [1, 2, 3, 4, 5, 6])], + [(1720305269.133214, [], [1, 2, 3, 4, 5, 6]), (1720580679.401674, [], [1, 2, 3, 4, 5, 6])], + [(1720582113.001824, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971867.373391, [], [1, 2, 3, 4, 5, 6]), + (1719971869.452767, [], [1, 2, 3, 4, 5, 6]), + (1719983561.418747, [], [1, 2, 3, 4, 5, 6]), + (1719983615.306689, [], [1, 2, 3, 4, 5, 6]), + (1719983617.371374, [], [1, 2, 3, 4, 5, 6]), + (1719983622.154397, [], [1, 2, 3, 4, 5, 6]), + (1719983624.239597, [], [1, 2, 3, 4, 5, 6]), + (1720057585.854293, [], [1, 2, 3, 4, 5, 6]), + (1720127843.991043, [], [1, 2, 3, 4, 5, 6]), + (1720127952.545227, [], [1, 2, 3, 4, 5, 6]), + (1720150451.197164, [], [1, 2, 3, 4, 5, 6]), + (1720150472.889245, [], [1, 2, 3, 4, 5, 6]), + (1720229579.372015, [], [1, 2, 3, 4, 5, 6]), + (1720229585.29839, [], [1, 2, 3, 4, 5, 6]), + (1720229587.33746, [], [1, 2, 3, 4, 5, 6]), + (1720272362.151724, [], [1, 2, 3, 4, 5, 6]), + (1720272395.494166, [], [1, 2, 3, 4, 5, 6]), + (1720272397.584197, [], [1, 2, 3, 4, 5, 6]), + (1720325287.360716, [], [1, 2, 3, 4, 5, 6]), + (1720325289.430457, [], [1, 2, 3, 4, 5, 6]), + (1720392144.674955, [], [1, 2, 3, 4, 5, 6]), + (1720392146.786158, [], [1, 2, 3, 4, 5, 6]), + (1720406690.885685, [], [1, 2, 3, 4, 5, 6]), + (1720406692.950513, [], [1, 2, 3, 4, 5, 6]), + (1720486441.134231, [], [1, 2, 3, 4, 5, 6]), + (1720486443.192435, [], [1, 2, 3, 4, 5, 6]), + (1720648828.296221, [], [1, 2, 3, 4, 5, 6]), + (1720648830.340132, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977053.236432, [], [1, 2, 3, 4, 5, 6])], + [(1720146886.388756, [], [1, 2, 3, 4, 5, 6])], + [(1720147721.983335, [], [1, 2, 3, 4, 5, 6]), (1720577319.095652, [], [1, 2, 3, 4, 5, 6])], + [(1720187232.833461, [], [1, 2, 3, 4, 5, 6])], + [(1720309745.334443, [], [1, 2, 3, 4, 5, 6]), (1720525020.981442, [], [1, 2, 3, 4, 5, 6])], + [(1719985270.896874, [], [1, 2, 3, 4, 5, 6]), (1720147203.361104, [], [1, 2, 3, 4, 5, 6])], + [(1719975189.590595, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153681.561666, [], [1, 2, 3, 4, 5, 6]), + (1720315141.854012, [], [1, 2, 3, 4, 5, 6]), + (1720483759.06017, [], [1, 2, 3, 4, 5, 6]), + (1720632532.362134, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719969377.021113, [], [1, 2, 3, 4, 5, 6]), + (1719969547.865829, [], [1, 2, 3, 4, 5, 6]), + (1720050670.589639, [], [1, 2, 3, 4, 5, 6]), + (1720050713.412665, [], [1, 2, 3, 4, 5, 6]), + (1720139076.150907, [], [1, 2, 3, 4, 5, 6]), + (1720139087.933212, [], [1, 2, 3, 4, 5, 6]), + (1720139090.022598, [], [1, 2, 3, 4, 5, 6]), + (1720139092.090332, [], [1, 2, 3, 4, 5, 6]), + (1720148904.698605, [], [1, 2, 3, 4, 5, 6]), + (1720197454.202625, [], [1, 2, 3, 4, 5, 6]), + (1720197456.301898, [], [1, 2, 3, 4, 5, 6]), + (1720221957.937687, [], [1, 2, 3, 4, 5, 6]), + (1720222151.210074, [], [1, 2, 3, 4, 5, 6]), + (1720222153.281944, [], [1, 2, 3, 4, 5, 6]), + (1720231319.785278, [], [1, 2, 3, 4, 5, 6]), + (1720314287.823226, [], [1, 2, 3, 4, 5, 6]), + (1720314375.707773, [], [1, 2, 3, 4, 5, 6]), + (1720314377.787834, [], [1, 2, 3, 4, 5, 6]), + (1720331369.745063, [], [1, 2, 3, 4, 5, 6]), + (1720331582.949466, [], [1, 2, 3, 4, 5, 6]), + (1720331585.058912, [], [1, 2, 3, 4, 5, 6]), + (1720399235.526545, [], [1, 2, 3, 4, 5, 6]), + (1720399237.6268, [], [1, 2, 3, 4, 5, 6]), + (1720410762.341061, [], [1, 2, 3, 4, 5, 6]), + (1720410808.990309, [], [1, 2, 3, 4, 5, 6]), + (1720410811.040448, [], [1, 2, 3, 4, 5, 6]), + (1720493330.828194, [], [1, 2, 3, 4, 5, 6]), + (1720493516.887173, [], [1, 2, 3, 4, 5, 6]), + (1720501442.580123, [], [1, 2, 3, 4, 5, 6]), + (1720501548.316894, [], [1, 2, 3, 4, 5, 6]), + (1720501550.379738, [], [1, 2, 3, 4, 5, 6]), + (1720573012.279738, [], [1, 2, 3, 4, 5, 6]), + (1720573204.24471, [], [1, 2, 3, 4, 5, 6]), + (1720573206.359087, [], [1, 2, 3, 4, 5, 6]), + (1720573210.996145, [], [1, 2, 3, 4, 5, 6]), + (1720573213.096745, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719984464.146305, [], [1, 2, 3, 4, 5, 6])], + [(1719969484.575186, [], [1, 2, 3, 4, 5, 6])], + [(1719967098.321792, [], [1, 2, 3, 4, 5, 6]), (1720140304.171738, [], [1, 2, 3, 4, 5, 6])], + [ + (1720067679.407113, [], [1, 2, 3, 4, 5, 6]), + (1720240007.297001, [], [1, 2, 3, 4, 5, 6]), + (1720499615.946055, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984745.989291, [], [1, 2, 3, 4, 5, 6]), + (1720088802.060799, [], [1, 2, 3, 4, 5, 6]), + (1720226330.102201, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720057325.702019, [], [1, 2, 3, 4, 5, 6]), (1720499465.567145, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055478.668518, [], [1, 2, 3, 4, 5, 6]), + (1720055619.422527, [], [1, 2, 3, 4, 5, 6]), + (1720138025.392906, [], [1, 2, 3, 4, 5, 6]), + (1720138074.32289, [], [1, 2, 3, 4, 5, 6]), + (1720232045.90559, [], [1, 2, 3, 4, 5, 6]), + (1720232073.337701, [], [1, 2, 3, 4, 5, 6]), + (1720278094.793407, [], [1, 2, 3, 4, 5, 6]), + (1720278096.912409, [], [1, 2, 3, 4, 5, 6]), + (1720278099.569789, [], [1, 2, 3, 4, 5, 6]), + (1720278101.660519, [], [1, 2, 3, 4, 5, 6]), + (1720324663.973123, [], [1, 2, 3, 4, 5, 6]), + (1720324666.034118, [], [1, 2, 3, 4, 5, 6]), + (1720412864.0991, [], [1, 2, 3, 4, 5, 6]), + (1720412932.896312, [], [1, 2, 3, 4, 5, 6]), + (1720412934.95735, [], [1, 2, 3, 4, 5, 6]), + (1720493768.204791, [], [1, 2, 3, 4, 5, 6]), + (1720493848.668367, [], [1, 2, 3, 4, 5, 6]), + (1720493850.800293, [], [1, 2, 3, 4, 5, 6]), + (1720493853.855696, [], [1, 2, 3, 4, 5, 6]), + (1720578407.565863, [], [1, 2, 3, 4, 5, 6]), + (1720578455.012928, [], [1, 2, 3, 4, 5, 6]), + (1720578457.12311, [], [1, 2, 3, 4, 5, 6]), + (1720592507.954368, [], [1, 2, 3, 4, 5, 6]), + (1720592695.674207, [], [1, 2, 3, 4, 5, 6]), + (1720592697.763035, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720448820.538088, [], [1, 2, 3, 4, 5, 6]), + (1720448848.354821, [], [1, 2, 3, 4, 5, 6]), + (1720448968.980221, [], [1, 2, 3, 4, 5, 6]), + (1720449109.647373, [], [1, 2, 3, 4, 5, 6]), + (1720449132.605916, [], [1, 2, 3, 4, 5, 6]), + (1720449141.226924, [], [1, 2, 3, 4, 5, 6]), + (1720449174.132961, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143295.563285, [], [1, 2, 3, 4, 5, 6])], + [ + (1720009875.184202, [], [1, 2, 3, 4, 5, 6]), + (1720064301.403426, [], [1, 2, 3, 4, 5, 6]), + (1720221459.433168, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720316122.630709, [], [1, 2, 3, 4, 5, 6]), (1720625396.811387, [], [1, 2, 3, 4, 5, 6])], + [(1720064525.079458, [], [1, 2, 3, 4, 5, 6])], + [(1720600790.059805, [], [1, 2, 3, 4, 5, 6])], + [(1720053513.239524, [], [1, 2, 3, 4, 5, 6]), (1720533559.490134, [], [1, 2, 3, 4, 5, 6])], + [(1720222657.803241, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971419.792625, [], [1, 2, 3, 4, 5, 6]), + (1720239049.653382, [], [1, 2, 3, 4, 5, 6]), + (1720497253.487835, [], [1, 2, 3, 4, 5, 6]), + (1720571009.60795, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719978213.57048, [], [1, 2, 3, 4, 5, 6]), + (1719978402.543586, [], [1, 2, 3, 4, 5, 6]), + (1720067921.564313, [], [1, 2, 3, 4, 5, 6]), + (1720068031.2973, [], [1, 2, 3, 4, 5, 6]), + (1720068033.364045, [], [1, 2, 3, 4, 5, 6]), + (1720076693.193638, [], [1, 2, 3, 4, 5, 6]), + (1720076695.234922, [], [1, 2, 3, 4, 5, 6]), + (1720088372.082518, [], [1, 2, 3, 4, 5, 6]), + (1720088448.747115, [], [1, 2, 3, 4, 5, 6]), + (1720222636.476764, [], [1, 2, 3, 4, 5, 6]), + (1720222701.214913, [], [1, 2, 3, 4, 5, 6]), + (1720311136.481341, [], [1, 2, 3, 4, 5, 6]), + (1720311279.356667, [], [1, 2, 3, 4, 5, 6]), + (1720311281.435353, [], [1, 2, 3, 4, 5, 6]), + (1720321937.516249, [], [1, 2, 3, 4, 5, 6]), + (1720321977.750869, [], [1, 2, 3, 4, 5, 6]), + (1720321979.826956, [], [1, 2, 3, 4, 5, 6]), + (1720321983.309368, [], [1, 2, 3, 4, 5, 6]), + (1720417820.177018, [], [1, 2, 3, 4, 5, 6]), + (1720417888.907443, [], [1, 2, 3, 4, 5, 6]), + (1720482544.485269, [], [1, 2, 3, 4, 5, 6]), + (1720482650.874077, [], [1, 2, 3, 4, 5, 6]), + (1720571012.586842, [], [1, 2, 3, 4, 5, 6]), + (1720571014.653099, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720067135.000485, [], [1, 2, 3, 4, 5, 6]), + (1720226886.323383, [], [1, 2, 3, 4, 5, 6]), + (1720626810.190995, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720021468.494681, [], [1, 2, 3, 4, 5, 6]), (1720244311.296556, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054497.052088, [], [1, 2, 3, 4, 5, 6]), + (1720315797.04068, [], [1, 2, 3, 4, 5, 6]), + (1720396623.976121, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970439.050635, [], [1, 2, 3, 4, 5, 6]), (1720411294.606462, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047660.240807, [], [1, 2, 3, 4, 5, 6]), + (1720209425.126479, [], [1, 2, 3, 4, 5, 6]), + (1720417042.301423, [], [1, 2, 3, 4, 5, 6]), + (1720579466.836909, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720068666.058135, [], [1, 2, 3, 4, 5, 6]), + (1720224717.712974, [], [1, 2, 3, 4, 5, 6]), + (1720313644.184984, [], [1, 2, 3, 4, 5, 6]), + (1720417247.572309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720217112.012918, [], [1, 2, 3, 4, 5, 6])], + [(1720228893.793094, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965114.583168, [], [1, 2, 3, 4, 5, 6]), + (1720221700.128257, [], [1, 2, 3, 4, 5, 6]), + (1720359492.65181, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720149938.452021, [], [1, 2, 3, 4, 5, 6]), + (1720150083.518978, [], [1, 2, 3, 4, 5, 6]), + (1720150100.711862, [], [1, 2, 3, 4, 5, 6]), + (1720403516.136956, [], [1, 2, 3, 4, 5, 6]), + (1720403602.399166, [], [1, 2, 3, 4, 5, 6]), + (1720403688.061721, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720016151.530651, [], [1, 2, 3, 4, 5, 6]), + (1720126052.51206, [], [1, 2, 3, 4, 5, 6]), + (1720243360.967974, [], [1, 2, 3, 4, 5, 6]), + (1720567481.805169, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720259903.388453, [], [1, 2, 3, 4, 5, 6]), (1720495071.607118, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978731.351246, [], [1, 2, 3, 4, 5, 6]), + (1720142275.008236, [], [1, 2, 3, 4, 5, 6]), + (1720225627.748133, [], [1, 2, 3, 4, 5, 6]), + (1720599835.060544, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720308817.017884, [], [1, 2, 3, 4, 5, 6]), (1720500376.721695, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062080.162523, [], [1, 2, 3, 4, 5, 6]), + (1720424051.051867, [], [1, 2, 3, 4, 5, 6]), + (1720577193.657241, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720226358.301934, [], [1, 2, 3, 4, 5, 6]), (1720611516.599998, [], [1, 2, 3, 4, 5, 6])], + [(1720142831.087971, [], [1, 2, 3, 4, 5, 6]), (1720568727.59182, [], [1, 2, 3, 4, 5, 6])], + [(1720045127.801767, [], [1, 2, 3, 4, 5, 6])], + [(1720598254.557545, [], [1, 2, 3, 4, 5, 6])], + [(1720230498.737196, [], [1, 2, 3, 4, 5, 6]), (1720502519.921733, [], [1, 2, 3, 4, 5, 6])], + [(1720149819.132452, [], [1, 2, 3, 4, 5, 6]), (1720317818.669453, [], [1, 2, 3, 4, 5, 6])], + [(1719965630.184525, [], [1, 2, 3, 4, 5, 6]), (1720566194.006106, [], [1, 2, 3, 4, 5, 6])], + [(1719996710.23806, [], [1, 2, 3, 4, 5, 6])], + [(1720053587.04154, [], [1, 2, 3, 4, 5, 6]), (1720476400.319672, [], [1, 2, 3, 4, 5, 6])], + [(1720238998.499612, [], [1, 2, 3, 4, 5, 6])], + [(1720049964.339669, [], [1, 2, 3, 4, 5, 6]), (1720503256.459045, [], [1, 2, 3, 4, 5, 6])], + [(1720629914.75266, [], [1, 2, 3, 4, 5, 6])], + [ + (1720067406.552276, [], [1, 2, 3, 4, 5, 6]), + (1720192823.078475, [], [1, 2, 3, 4, 5, 6]), + (1720615636.068682, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720050726.320669, [], [1, 2, 3, 4, 5, 6]), + (1720159164.117987, [], [1, 2, 3, 4, 5, 6]), + (1720583837.972687, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720470505.483407, [], [1, 2, 3, 4, 5, 6]), (1720589293.330858, [], [1, 2, 3, 4, 5, 6])], + [ + (1719990309.924021, [], [1, 2, 3, 4, 5, 6]), + (1720242385.881249, [], [1, 2, 3, 4, 5, 6]), + (1720648573.041044, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720059240.88106, [], [1, 2, 3, 4, 5, 6]), (1720486550.385795, [], [1, 2, 3, 4, 5, 6])], + [(1720232277.114726, [], [1, 2, 3, 4, 5, 6])], + [(1720156360.413945, [], [1, 2, 3, 4, 5, 6]), (1720415380.907597, [], [1, 2, 3, 4, 5, 6])], + [(1720143721.130937, [], [1, 2, 3, 4, 5, 6])], + [(1720093040.94431, [], [1, 2, 3, 4, 5, 6]), (1720230385.831757, [], [1, 2, 3, 4, 5, 6])], + [(1720313919.101562, [], [1, 2, 3, 4, 5, 6]), (1720600894.542752, [], [1, 2, 3, 4, 5, 6])], + [(1720008883.059792, [], [1, 2, 3, 4, 5, 6]), (1720151981.800615, [], [1, 2, 3, 4, 5, 6])], + [(1720583883.771582, [], [1, 2, 3, 4, 5, 6])], + [(1720054595.476172, [], [1, 2, 3, 4, 5, 6]), (1720494101.96425, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975987.869421, [], [1, 2, 3, 4, 5, 6]), + (1720072012.445937, [], [1, 2, 3, 4, 5, 6]), + (1720141541.892965, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974700.775073, [], [1, 2, 3, 4, 5, 6]), + (1719974810.784479, [], [1, 2, 3, 4, 5, 6]), + (1720022010.687673, [], [1, 2, 3, 4, 5, 6]), + (1720022125.15125, [], [1, 2, 3, 4, 5, 6]), + (1720022127.183082, [], [1, 2, 3, 4, 5, 6]), + (1720022127.850327, [], [1, 2, 3, 4, 5, 6]), + (1720022129.972437, [], [1, 2, 3, 4, 5, 6]), + (1720149346.519881, [], [1, 2, 3, 4, 5, 6]), + (1720149423.39517, [], [1, 2, 3, 4, 5, 6]), + (1720232515.945385, [], [1, 2, 3, 4, 5, 6]), + (1720232642.031851, [], [1, 2, 3, 4, 5, 6]), + (1720232644.151326, [], [1, 2, 3, 4, 5, 6]), + (1720232649.102724, [], [1, 2, 3, 4, 5, 6]), + (1720232651.213687, [], [1, 2, 3, 4, 5, 6]), + (1720291284.378849, [], [1, 2, 3, 4, 5, 6]), + (1720291307.651917, [], [1, 2, 3, 4, 5, 6]), + (1720317724.65539, [], [1, 2, 3, 4, 5, 6]), + (1720317962.176994, [], [1, 2, 3, 4, 5, 6]), + (1720317962.221761, [], [1, 2, 3, 4, 5, 6]), + (1720317967.870483, [], [1, 2, 3, 4, 5, 6]), + (1720416284.403485, [], [1, 2, 3, 4, 5, 6]), + (1720416286.45094, [], [1, 2, 3, 4, 5, 6]), + (1720446964.44037, [], [1, 2, 3, 4, 5, 6]), + (1720447111.491786, [], [1, 2, 3, 4, 5, 6]), + (1720447113.551591, [], [1, 2, 3, 4, 5, 6]), + (1720500857.609857, [], [1, 2, 3, 4, 5, 6]), + (1720500933.241251, [], [1, 2, 3, 4, 5, 6]), + (1720500935.342693, [], [1, 2, 3, 4, 5, 6]), + (1720550391.631024, [], [1, 2, 3, 4, 5, 6]), + (1720550393.677097, [], [1, 2, 3, 4, 5, 6]), + (1720571962.115275, [], [1, 2, 3, 4, 5, 6]), + (1720571964.156322, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720300973.659698, [], [1, 2, 3, 4, 5, 6]), (1720502088.420309, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226060.114355, [], [1, 2, 3, 4, 5, 6]), + (1720367668.242413, [], [1, 2, 3, 4, 5, 6]), + (1720580879.469873, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720148122.993839, [], [1, 2, 3, 4, 5, 6]), + (1720283848.988921, [], [1, 2, 3, 4, 5, 6]), + (1720392902.670008, [], [1, 2, 3, 4, 5, 6]), + (1720547569.939146, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720164561.277691, [], [1, 2, 3, 4, 5, 6]), (1720322322.293618, [], [1, 2, 3, 4, 5, 6])], + [(1720394391.029382, [], [1, 2, 3, 4, 5, 6])], + [(1720578227.91725, [], [1, 2, 3, 4, 5, 6])], + [(1720427348.104988, [], [1, 2, 3, 4, 5, 6]), (1720586312.438776, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967279.972433, [], [1, 2, 3, 4, 5, 6]), + (1719967282.055508, [], [1, 2, 3, 4, 5, 6]), + (1719986090.097845, [], [1, 2, 3, 4, 5, 6]), + (1719986173.00659, [], [1, 2, 3, 4, 5, 6]), + (1719986175.082864, [], [1, 2, 3, 4, 5, 6]), + (1720034526.875582, [], [1, 2, 3, 4, 5, 6]), + (1720061201.725715, [], [1, 2, 3, 4, 5, 6]), + (1720061294.240057, [], [1, 2, 3, 4, 5, 6]), + (1720061296.35589, [], [1, 2, 3, 4, 5, 6]), + (1720155141.396653, [], [1, 2, 3, 4, 5, 6]), + (1720155143.510508, [], [1, 2, 3, 4, 5, 6]), + (1720155145.301155, [], [1, 2, 3, 4, 5, 6]), + (1720155147.393972, [], [1, 2, 3, 4, 5, 6]), + (1720231098.024705, [], [1, 2, 3, 4, 5, 6]), + (1720231317.54759, [], [1, 2, 3, 4, 5, 6]), + (1720231319.611985, [], [1, 2, 3, 4, 5, 6]), + (1720271983.621164, [], [1, 2, 3, 4, 5, 6]), + (1720271985.710974, [], [1, 2, 3, 4, 5, 6]), + (1720316981.40392, [], [1, 2, 3, 4, 5, 6]), + (1720317019.941522, [], [1, 2, 3, 4, 5, 6]), + (1720317022.040965, [], [1, 2, 3, 4, 5, 6]), + (1720411936.226228, [], [1, 2, 3, 4, 5, 6]), + (1720411963.208146, [], [1, 2, 3, 4, 5, 6]), + (1720479757.589657, [], [1, 2, 3, 4, 5, 6]), + (1720479839.302922, [], [1, 2, 3, 4, 5, 6]), + (1720582109.835415, [], [1, 2, 3, 4, 5, 6]), + (1720582111.914294, [], [1, 2, 3, 4, 5, 6]), + (1720652093.707438, [], [1, 2, 3, 4, 5, 6]), + (1720652211.598303, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720579951.356488, [], [1, 2, 3, 4, 5, 6])], + [(1720593973.655643, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061475.003195, [], [1, 2, 3, 4, 5, 6]), + (1720270392.101123, [], [1, 2, 3, 4, 5, 6]), + (1720415797.057544, [], [1, 2, 3, 4, 5, 6]), + (1720574029.592383, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719987865.032004, [], [1, 2, 3, 4, 5, 6]), (1720212776.214811, [], [1, 2, 3, 4, 5, 6])], + [(1720315089.869542, [], [1, 2, 3, 4, 5, 6]), (1720578088.622431, [], [1, 2, 3, 4, 5, 6])], + [(1720575422.335555, [], [1, 2, 3, 4, 5, 6])], + [(1720329438.482756, [], [1, 2, 3, 4, 5, 6]), (1720443842.432414, [], [1, 2, 3, 4, 5, 6])], + [ + (1720135846.308239, [], [1, 2, 3, 4, 5, 6]), + (1720221161.535587, [], [1, 2, 3, 4, 5, 6]), + (1720326226.738859, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719977789.721113, [], [1, 2, 3, 4, 5, 6]), + (1719977899.031956, [], [1, 2, 3, 4, 5, 6]), + (1719977901.119465, [], [1, 2, 3, 4, 5, 6]), + (1719982587.985388, [], [1, 2, 3, 4, 5, 6]), + (1719982666.211377, [], [1, 2, 3, 4, 5, 6]), + (1719982668.29279, [], [1, 2, 3, 4, 5, 6]), + (1719982672.56956, [], [1, 2, 3, 4, 5, 6]), + (1720063592.708606, [], [1, 2, 3, 4, 5, 6]), + (1720063594.776009, [], [1, 2, 3, 4, 5, 6]), + (1720145103.906614, [], [1, 2, 3, 4, 5, 6]), + (1720145165.665926, [], [1, 2, 3, 4, 5, 6]), + (1720157026.459569, [], [1, 2, 3, 4, 5, 6]), + (1720223512.011646, [], [1, 2, 3, 4, 5, 6]), + (1720223586.453989, [], [1, 2, 3, 4, 5, 6]), + (1720223588.535794, [], [1, 2, 3, 4, 5, 6]), + (1720329405.565358, [], [1, 2, 3, 4, 5, 6]), + (1720398313.307695, [], [1, 2, 3, 4, 5, 6]), + (1720398429.724071, [], [1, 2, 3, 4, 5, 6]), + (1720414381.775047, [], [1, 2, 3, 4, 5, 6]), + (1720446240.471098, [], [1, 2, 3, 4, 5, 6]), + (1720481889.793923, [], [1, 2, 3, 4, 5, 6]), + (1720481891.93036, [], [1, 2, 3, 4, 5, 6]), + (1720489136.015971, [], [1, 2, 3, 4, 5, 6]), + (1720489247.728734, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720318482.752639, [], [1, 2, 3, 4, 5, 6])], + [ + (1720242162.48487, [], [1, 2, 3, 4, 5, 6]), + (1720503535.294123, [], [1, 2, 3, 4, 5, 6]), + (1720590538.582039, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720148407.104121, [], [1, 2, 3, 4, 5, 6])], + [(1720154168.367205, [], [1, 2, 3, 4, 5, 6]), (1720568213.544423, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069001.717509, [], [1, 2, 3, 4, 5, 6]), + (1720346135.538471, [], [1, 2, 3, 4, 5, 6]), + (1720489854.284499, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719967260.706099, [], [1, 2, 3, 4, 5, 6]), + (1720082538.484733, [], [1, 2, 3, 4, 5, 6]), + (1720240732.567635, [], [1, 2, 3, 4, 5, 6]), + (1720395713.187024, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719968640.279026, [], [1, 2, 3, 4, 5, 6]), + (1720058387.048155, [], [1, 2, 3, 4, 5, 6]), + (1720240163.514327, [], [1, 2, 3, 4, 5, 6]), + (1720391336.792179, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720310285.653948, [], [1, 2, 3, 4, 5, 6]), (1720589147.207321, [], [1, 2, 3, 4, 5, 6])], + [(1719973140.021275, [], [1, 2, 3, 4, 5, 6]), (1720504055.006021, [], [1, 2, 3, 4, 5, 6])], + [(1720115792.85023, [], [1, 2, 3, 4, 5, 6])], + [ + (1720140689.444004, [], [1, 2, 3, 4, 5, 6]), + (1720312169.980048, [], [1, 2, 3, 4, 5, 6]), + (1720399894.527727, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720316227.642169, [], [1, 2, 3, 4, 5, 6]), (1720484121.740556, [], [1, 2, 3, 4, 5, 6])], + [ + (1720150629.632571, [], [1, 2, 3, 4, 5, 6]), + (1720312593.72112, [], [1, 2, 3, 4, 5, 6]), + (1720584121.246833, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719966806.196729, [], [1, 2, 3, 4, 5, 6]), (1720492831.262792, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069584.25825, [], [1, 2, 3, 4, 5, 6]), + (1720233172.76065, [], [1, 2, 3, 4, 5, 6]), + (1720317363.164219, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720227600.733956, [], [1, 2, 3, 4, 5, 6]), + (1720227600.784387, [], [1, 2, 3, 4, 5, 6]), + (1720227605.27419, [], [1, 2, 3, 4, 5, 6]), + (1720269710.791405, [], [1, 2, 3, 4, 5, 6]), + (1720269759.332462, [], [1, 2, 3, 4, 5, 6]), + (1720326344.424672, [], [1, 2, 3, 4, 5, 6]), + (1720369614.287387, [], [1, 2, 3, 4, 5, 6]), + (1720369719.27491, [], [1, 2, 3, 4, 5, 6]), + (1720369719.331218, [], [1, 2, 3, 4, 5, 6]), + (1720369721.899004, [], [1, 2, 3, 4, 5, 6]), + (1720385493.685201, [], [1, 2, 3, 4, 5, 6]), + (1720385551.219825, [], [1, 2, 3, 4, 5, 6]), + (1720385553.316418, [], [1, 2, 3, 4, 5, 6]), + (1720450115.39061, [], [1, 2, 3, 4, 5, 6]), + (1720450117.502598, [], [1, 2, 3, 4, 5, 6]), + (1720450118.78177, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719969800.978378, [], [1, 2, 3, 4, 5, 6]), + (1720222415.35262, [], [1, 2, 3, 4, 5, 6]), + (1720434706.74629, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720237451.24597, [], [1, 2, 3, 4, 5, 6]), (1720585495.150654, [], [1, 2, 3, 4, 5, 6])], + [(1719970937.04025, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983075.420902, [], [1, 2, 3, 4, 5, 6]), + (1720313367.078665, [], [1, 2, 3, 4, 5, 6]), + (1720413122.113225, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720155749.238687, [], [1, 2, 3, 4, 5, 6]), (1720491353.243799, [], [1, 2, 3, 4, 5, 6])], + [(1720060021.000595, [], [1, 2, 3, 4, 5, 6])], + [(1719988378.536367, [], [1, 2, 3, 4, 5, 6]), (1720228662.183092, [], [1, 2, 3, 4, 5, 6])], + [(1719981886.782157, [], [1, 2, 3, 4, 5, 6]), (1720156878.496962, [], [1, 2, 3, 4, 5, 6])], + [(1720582313.689559, [], [1, 2, 3, 4, 5, 6])], + [(1720232302.477057, [], [1, 2, 3, 4, 5, 6]), (1720493756.958556, [], [1, 2, 3, 4, 5, 6])], + [(1720295778.241704, [], [1, 2, 3, 4, 5, 6])], + [(1720021503.203052, [], [1, 2, 3, 4, 5, 6]), (1720325452.491454, [], [1, 2, 3, 4, 5, 6])], + [(1720527219.478404, [], [1, 2, 3, 4, 5, 6]), (1720567646.306507, [], [1, 2, 3, 4, 5, 6])], + [(1720223792.29193, [], [1, 2, 3, 4, 5, 6])], + [ + (1720006636.772706, [], [1, 2, 3, 4, 5, 6]), + (1720006795.60427, [], [1, 2, 3, 4, 5, 6]), + (1720006845.799981, [], [1, 2, 3, 4, 5, 6]), + (1720007022.741945, [], [1, 2, 3, 4, 5, 6]), + (1720007095.581047, [], [1, 2, 3, 4, 5, 6]), + (1720007134.850115, [], [1, 2, 3, 4, 5, 6]), + (1720025117.762503, [], [1, 2, 3, 4, 5, 6]), + (1720025308.512649, [], [1, 2, 3, 4, 5, 6]), + (1720025310.568037, [], [1, 2, 3, 4, 5, 6]), + (1720052547.163003, [], [1, 2, 3, 4, 5, 6]), + (1720052600.03312, [], [1, 2, 3, 4, 5, 6]), + (1720068046.902248, [], [1, 2, 3, 4, 5, 6]), + (1720068213.189912, [], [1, 2, 3, 4, 5, 6]), + (1720144711.311281, [], [1, 2, 3, 4, 5, 6]), + (1720144713.407177, [], [1, 2, 3, 4, 5, 6]), + (1720222638.332245, [], [1, 2, 3, 4, 5, 6]), + (1720222640.418838, [], [1, 2, 3, 4, 5, 6]), + (1720242141.813366, [], [1, 2, 3, 4, 5, 6]), + (1720242245.921587, [], [1, 2, 3, 4, 5, 6]), + (1720242248.011768, [], [1, 2, 3, 4, 5, 6]), + (1720333146.03005, [], [1, 2, 3, 4, 5, 6]), + (1720333287.562561, [], [1, 2, 3, 4, 5, 6]), + (1720333289.592652, [], [1, 2, 3, 4, 5, 6]), + (1720333292.319879, [], [1, 2, 3, 4, 5, 6]), + (1720333294.386109, [], [1, 2, 3, 4, 5, 6]), + (1720396984.211837, [], [1, 2, 3, 4, 5, 6]), + (1720397094.401782, [], [1, 2, 3, 4, 5, 6]), + (1720486134.144443, [], [1, 2, 3, 4, 5, 6]), + (1720486136.211044, [], [1, 2, 3, 4, 5, 6]), + (1720486140.873481, [], [1, 2, 3, 4, 5, 6]), + (1720486142.970428, [], [1, 2, 3, 4, 5, 6]), + (1720497754.706526, [], [1, 2, 3, 4, 5, 6]), + (1720497979.155047, [], [1, 2, 3, 4, 5, 6]), + (1720531991.462042, [], [1, 2, 3, 4, 5, 6]), + (1720532199.030662, [], [1, 2, 3, 4, 5, 6]), + (1720588796.771517, [], [1, 2, 3, 4, 5, 6]), + (1720588842.077879, [], [1, 2, 3, 4, 5, 6]), + (1720588844.116306, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720239926.764196, [], [1, 2, 3, 4, 5, 6])], + [(1720395045.1902, [], [1, 2, 3, 4, 5, 6])], + [(1720431147.297621, [], [1, 2, 3, 4, 5, 6])], + [(1720240748.713179, [], [1, 2, 3, 4, 5, 6])], + [(1719972432.742571, [], [1, 2, 3, 4, 5, 6])], + [(1720410198.607466, [], [1, 2, 3, 4, 5, 6]), (1720566548.549011, [], [1, 2, 3, 4, 5, 6])], + [(1720455428.865155, [], [1, 2, 3, 4, 5, 6])], + [(1720498325.755933, [], [1, 2, 3, 4, 5, 6])], + [ + (1719983684.033908, [], [1, 2, 3, 4, 5, 6]), + (1720319741.991515, [], [1, 2, 3, 4, 5, 6]), + (1720414800.645761, [], [1, 2, 3, 4, 5, 6]), + (1720484979.12583, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720134283.600008, [], [1, 2, 3, 4, 5, 6])], + [(1720409485.01654, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981074.661088, [], [1, 2, 3, 4, 5, 6]), + (1720143880.41593, [], [1, 2, 3, 4, 5, 6]), + (1720229983.175788, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060903.203334, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968419.743912, [], [1, 2, 3, 4, 5, 6]), + (1719968490.438903, [], [1, 2, 3, 4, 5, 6]), + (1719968620.857174, [], [1, 2, 3, 4, 5, 6]), + (1720498112.351156, [], [1, 2, 3, 4, 5, 6]), + (1720498358.36836, [], [1, 2, 3, 4, 5, 6]), + (1720498468.250047, [], [1, 2, 3, 4, 5, 6]), + (1720574778.111823, [], [1, 2, 3, 4, 5, 6]), + (1720574806.5479, [], [1, 2, 3, 4, 5, 6]), + (1720574917.425735, [], [1, 2, 3, 4, 5, 6]), + (1720574933.603291, [], [1, 2, 3, 4, 5, 6]), + (1720575020.164914, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720406407.483318, [], [1, 2, 3, 4, 5, 6])], + [(1720412099.352018, [], [1, 2, 3, 4, 5, 6]), (1720498223.084881, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979024.598321, [], [1, 2, 3, 4, 5, 6]), + (1720114791.984992, [], [1, 2, 3, 4, 5, 6]), + (1720241390.157269, [], [1, 2, 3, 4, 5, 6]), + (1720500283.345509, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720569522.382331, [], [1, 2, 3, 4, 5, 6])], + [(1720146840.111051, [], [1, 2, 3, 4, 5, 6]), (1720287268.372643, [], [1, 2, 3, 4, 5, 6])], + [(1720583508.926048, [], [1, 2, 3, 4, 5, 6])], + [ + (1720229146.528014, [], [1, 2, 3, 4, 5, 6]), + (1720229340.131801, [], [1, 2, 3, 4, 5, 6]), + (1720229424.480475, [], [1, 2, 3, 4, 5, 6]), + (1720229565.859999, [], [1, 2, 3, 4, 5, 6]), + (1720229567.783491, [], [1, 2, 3, 4, 5, 6]), + (1720229693.297904, [], [1, 2, 3, 4, 5, 6]), + (1720229755.453165, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720098291.146186, [], [1, 2, 3, 4, 5, 6])], + [(1719983104.788269, [], [1, 2, 3, 4, 5, 6]), (1720070626.816099, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226820.995006, [], [1, 2, 3, 4, 5, 6]), + (1720316639.892049, [], [1, 2, 3, 4, 5, 6]), + (1720589368.875624, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070877.576019, [], [1, 2, 3, 4, 5, 6]), (1720234703.959519, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978870.060134, [], [1, 2, 3, 4, 5, 6]), + (1720137971.413991, [], [1, 2, 3, 4, 5, 6]), + (1720491059.303159, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719995043.481796, [], [1, 2, 3, 4, 5, 6]), (1720148819.805573, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052005.359573, [], [1, 2, 3, 4, 5, 6]), + (1720052127.565063, [], [1, 2, 3, 4, 5, 6]), + (1720052129.679258, [], [1, 2, 3, 4, 5, 6]), + (1720064915.795875, [], [1, 2, 3, 4, 5, 6]), + (1720064917.840723, [], [1, 2, 3, 4, 5, 6]), + (1720064919.250429, [], [1, 2, 3, 4, 5, 6]), + (1720140563.359858, [], [1, 2, 3, 4, 5, 6]), + (1720140686.221967, [], [1, 2, 3, 4, 5, 6]), + (1720147133.126896, [], [1, 2, 3, 4, 5, 6]), + (1720154606.237768, [], [1, 2, 3, 4, 5, 6]), + (1720208312.107821, [], [1, 2, 3, 4, 5, 6]), + (1720208397.77235, [], [1, 2, 3, 4, 5, 6]), + (1720208399.88578, [], [1, 2, 3, 4, 5, 6]), + (1720226692.740751, [], [1, 2, 3, 4, 5, 6]), + (1720226809.874422, [], [1, 2, 3, 4, 5, 6]), + (1720226811.929607, [], [1, 2, 3, 4, 5, 6]), + (1720320735.680282, [], [1, 2, 3, 4, 5, 6]), + (1720320737.781583, [], [1, 2, 3, 4, 5, 6]), + (1720394544.101953, [], [1, 2, 3, 4, 5, 6]), + (1720394546.228449, [], [1, 2, 3, 4, 5, 6]), + (1720411628.159882, [], [1, 2, 3, 4, 5, 6]), + (1720411765.678009, [], [1, 2, 3, 4, 5, 6]), + (1720411765.737071, [], [1, 2, 3, 4, 5, 6]), + (1720411771.063593, [], [1, 2, 3, 4, 5, 6]), + (1720493021.815332, [], [1, 2, 3, 4, 5, 6]), + (1720493023.89141, [], [1, 2, 3, 4, 5, 6]), + (1720547092.818141, [], [1, 2, 3, 4, 5, 6]), + (1720547133.337079, [], [1, 2, 3, 4, 5, 6]), + (1720566405.934125, [], [1, 2, 3, 4, 5, 6]), + (1720566407.979963, [], [1, 2, 3, 4, 5, 6]), + (1720592934.864349, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720407281.391395, [], [1, 2, 3, 4, 5, 6]), (1720578489.911262, [], [1, 2, 3, 4, 5, 6])], + [(1720393905.799101, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965431.440319, [], [1, 2, 3, 4, 5, 6]), + (1719965547.770505, [], [1, 2, 3, 4, 5, 6]), + (1719965549.880668, [], [1, 2, 3, 4, 5, 6]), + (1720010279.644796, [], [1, 2, 3, 4, 5, 6]), + (1720010481.117481, [], [1, 2, 3, 4, 5, 6]), + (1720060297.061777, [], [1, 2, 3, 4, 5, 6]), + (1720060299.106745, [], [1, 2, 3, 4, 5, 6]), + (1720072795.790373, [], [1, 2, 3, 4, 5, 6]), + (1720072933.175213, [], [1, 2, 3, 4, 5, 6]), + (1720138923.382269, [], [1, 2, 3, 4, 5, 6]), + (1720138952.892452, [], [1, 2, 3, 4, 5, 6]), + (1720138954.952138, [], [1, 2, 3, 4, 5, 6]), + (1720243737.055635, [], [1, 2, 3, 4, 5, 6]), + (1720243742.725476, [], [1, 2, 3, 4, 5, 6]), + (1720243744.812736, [], [1, 2, 3, 4, 5, 6]), + (1720278868.092914, [], [1, 2, 3, 4, 5, 6]), + (1720278981.120539, [], [1, 2, 3, 4, 5, 6]), + (1720278983.221413, [], [1, 2, 3, 4, 5, 6]), + (1720312851.319112, [], [1, 2, 3, 4, 5, 6]), + (1720312961.59678, [], [1, 2, 3, 4, 5, 6]), + (1720312963.701002, [], [1, 2, 3, 4, 5, 6]), + (1720401167.589016, [], [1, 2, 3, 4, 5, 6]), + (1720401192.232905, [], [1, 2, 3, 4, 5, 6]), + (1720488671.153932, [], [1, 2, 3, 4, 5, 6]), + (1720488673.262556, [], [1, 2, 3, 4, 5, 6]), + (1720498148.914747, [], [1, 2, 3, 4, 5, 6]), + (1720498151.01221, [], [1, 2, 3, 4, 5, 6]), + (1720585902.00157, [], [1, 2, 3, 4, 5, 6]), + (1720585904.068243, [], [1, 2, 3, 4, 5, 6]), + (1720627230.183177, [], [1, 2, 3, 4, 5, 6]), + (1720627251.343451, [], [1, 2, 3, 4, 5, 6]), + (1720627253.395817, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720049050.269443, [], [1, 2, 3, 4, 5, 6])], + [(1720406587.77676, [], [1, 2, 3, 4, 5, 6])], + [ + (1720201120.604139, [], [1, 2, 3, 4, 5, 6]), + (1720237348.670203, [], [1, 2, 3, 4, 5, 6]), + (1720503188.882528, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720137961.069782, [], [1, 2, 3, 4, 5, 6]), (1720233994.333193, [], [1, 2, 3, 4, 5, 6])], + [(1720148673.115174, [], [1, 2, 3, 4, 5, 6])], + [ + (1720017472.013793, [], [1, 2, 3, 4, 5, 6]), + (1720238395.438066, [], [1, 2, 3, 4, 5, 6]), + (1720481118.520931, [], [1, 2, 3, 4, 5, 6]), + (1720624077.141735, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720460985.277631, [], [1, 2, 3, 4, 5, 6])], + [(1720065960.10824, [], [1, 2, 3, 4, 5, 6]), (1720568092.250721, [], [1, 2, 3, 4, 5, 6])], + [(1719964803.220143, [], [1, 2, 3, 4, 5, 6]), (1720072690.78503, [], [1, 2, 3, 4, 5, 6])], + [(1719973118.028284, [], [1, 2, 3, 4, 5, 6])], + [(1720148963.270876, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055800.056897, [], [1, 2, 3, 4, 5, 6]), + (1720147428.656208, [], [1, 2, 3, 4, 5, 6]), + (1720237025.123131, [], [1, 2, 3, 4, 5, 6]), + (1720326902.440989, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719967443.988967, [], [1, 2, 3, 4, 5, 6]), + (1719967507.96168, [], [1, 2, 3, 4, 5, 6]), + (1719967510.01003, [], [1, 2, 3, 4, 5, 6]), + (1719967510.547419, [], [1, 2, 3, 4, 5, 6]), + (1719967512.630086, [], [1, 2, 3, 4, 5, 6]), + (1720138300.130825, [], [1, 2, 3, 4, 5, 6]), + (1720138300.228601, [], [1, 2, 3, 4, 5, 6]), + (1720138302.137449, [], [1, 2, 3, 4, 5, 6]), + (1720266243.760636, [], [1, 2, 3, 4, 5, 6]), + (1720266262.29505, [], [1, 2, 3, 4, 5, 6]), + (1720266262.382243, [], [1, 2, 3, 4, 5, 6]), + (1720266267.714044, [], [1, 2, 3, 4, 5, 6]), + (1720376066.44502, [], [1, 2, 3, 4, 5, 6]), + (1720376075.005446, [], [1, 2, 3, 4, 5, 6]), + (1720376075.055395, [], [1, 2, 3, 4, 5, 6]), + (1720376078.271297, [], [1, 2, 3, 4, 5, 6]), + (1720495615.317205, [], [1, 2, 3, 4, 5, 6]), + (1720495625.121167, [], [1, 2, 3, 4, 5, 6]), + (1720495627.190587, [], [1, 2, 3, 4, 5, 6]), + (1720495631.668389, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720094115.933922, [], [1, 2, 3, 4, 5, 6])], + [(1720327035.126978, [], [1, 2, 3, 4, 5, 6]), (1720652576.382878, [], [1, 2, 3, 4, 5, 6])], + [(1720494001.577927, [], [1, 2, 3, 4, 5, 6])], + [ + (1720102356.301353, [], [1, 2, 3, 4, 5, 6]), + (1720244955.2084, [], [1, 2, 3, 4, 5, 6]), + (1720393949.41044, [], [1, 2, 3, 4, 5, 6]), + (1720576986.579566, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720033048.609785, [], [1, 2, 3, 4, 5, 6])], + [(1720402968.773862, [], [1, 2, 3, 4, 5, 6])], + [(1720230211.716966, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059787.289306, [], [1, 2, 3, 4, 5, 6]), + (1720397378.208597, [], [1, 2, 3, 4, 5, 6]), + (1720481196.422422, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720003149.057462, [], [1, 2, 3, 4, 5, 6]), + (1720003372.883061, [], [1, 2, 3, 4, 5, 6]), + (1720003374.955179, [], [1, 2, 3, 4, 5, 6]), + (1720039541.861276, [], [1, 2, 3, 4, 5, 6]), + (1720039688.427965, [], [1, 2, 3, 4, 5, 6]), + (1720039690.485555, [], [1, 2, 3, 4, 5, 6]), + (1720048625.116329, [], [1, 2, 3, 4, 5, 6]), + (1720048725.117697, [], [1, 2, 3, 4, 5, 6]), + (1720141659.610639, [], [1, 2, 3, 4, 5, 6]), + (1720141661.665952, [], [1, 2, 3, 4, 5, 6]), + (1720196426.042225, [], [1, 2, 3, 4, 5, 6]), + (1720196487.09087, [], [1, 2, 3, 4, 5, 6]), + (1720196489.183893, [], [1, 2, 3, 4, 5, 6]), + (1720207066.952798, [], [1, 2, 3, 4, 5, 6]), + (1720207237.857105, [], [1, 2, 3, 4, 5, 6]), + (1720207239.919375, [], [1, 2, 3, 4, 5, 6]), + (1720271033.503072, [], [1, 2, 3, 4, 5, 6]), + (1720271035.587795, [], [1, 2, 3, 4, 5, 6]), + (1720583123.471438, [], [1, 2, 3, 4, 5, 6]), + (1720583125.576798, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720154856.626343, [], [1, 2, 3, 4, 5, 6]), + (1720226072.346309, [], [1, 2, 3, 4, 5, 6]), + (1720310601.449016, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242303.35601, [], [1, 2, 3, 4, 5, 6])], + [(1720078404.748142, [], [1, 2, 3, 4, 5, 6]), (1720147584.809447, [], [1, 2, 3, 4, 5, 6])], + [(1720178488.289574, [], [1, 2, 3, 4, 5, 6]), (1720306985.894457, [], [1, 2, 3, 4, 5, 6])], + [(1720146748.830901, [], [1, 2, 3, 4, 5, 6]), (1720406666.368212, [], [1, 2, 3, 4, 5, 6])], + [ + (1720023013.684634, [], [1, 2, 3, 4, 5, 6]), + (1720091577.184398, [], [1, 2, 3, 4, 5, 6]), + (1720415121.299085, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720056649.932043, [], [1, 2, 3, 4, 5, 6])], + [ + (1720323285.584787, [], [1, 2, 3, 4, 5, 6]), + (1720415154.592994, [], [1, 2, 3, 4, 5, 6]), + (1720437978.9498, [], [1, 2, 3, 4, 5, 6]), + (1720473849.744602, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984649.0919, [], [1, 2, 3, 4, 5, 6]), + (1719984690.14033, [], [1, 2, 3, 4, 5, 6]), + (1720049900.611104, [], [1, 2, 3, 4, 5, 6]), + (1720049902.719326, [], [1, 2, 3, 4, 5, 6]), + (1720070202.827545, [], [1, 2, 3, 4, 5, 6]), + (1720070219.8164, [], [1, 2, 3, 4, 5, 6]), + (1720070221.892056, [], [1, 2, 3, 4, 5, 6]), + (1720137406.074377, [], [1, 2, 3, 4, 5, 6]), + (1720137495.69452, [], [1, 2, 3, 4, 5, 6]), + (1720137497.797134, [], [1, 2, 3, 4, 5, 6]), + (1720328661.139393, [], [1, 2, 3, 4, 5, 6]), + (1720328739.939669, [], [1, 2, 3, 4, 5, 6]), + (1720498901.295947, [], [1, 2, 3, 4, 5, 6]), + (1720570987.624349, [], [1, 2, 3, 4, 5, 6]), + (1720602646.235039, [], [1, 2, 3, 4, 5, 6]), + (1720602812.376711, [], [1, 2, 3, 4, 5, 6]), + (1720602814.446349, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974528.289353, [], [1, 2, 3, 4, 5, 6]), + (1720395723.591687, [], [1, 2, 3, 4, 5, 6]), + (1720617442.004095, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719975592.337919, [], [1, 2, 3, 4, 5, 6])], + [(1720057591.780745, [], [1, 2, 3, 4, 5, 6]), (1720488152.255523, [], [1, 2, 3, 4, 5, 6])], + [ + (1720148926.955422, [], [1, 2, 3, 4, 5, 6]), + (1720232410.538746, [], [1, 2, 3, 4, 5, 6]), + (1720408447.752538, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242892.942203, [], [1, 2, 3, 4, 5, 6]), (1720322823.10199, [], [1, 2, 3, 4, 5, 6])], + [(1720400088.852099, [], [1, 2, 3, 4, 5, 6]), (1720652752.741385, [], [1, 2, 3, 4, 5, 6])], + [(1720412428.936544, [], [1, 2, 3, 4, 5, 6]), (1720574790.414977, [], [1, 2, 3, 4, 5, 6])], + [(1720069130.346249, [], [1, 2, 3, 4, 5, 6])], + [ + (1720157382.402748, [], [1, 2, 3, 4, 5, 6]), + (1720157476.064866, [], [1, 2, 3, 4, 5, 6]), + (1720157570.728611, [], [1, 2, 3, 4, 5, 6]), + (1720157662.381552, [], [1, 2, 3, 4, 5, 6]), + (1720168580.805279, [], [1, 2, 3, 4, 5, 6]), + (1720168760.793692, [], [1, 2, 3, 4, 5, 6]), + (1720230288.646191, [], [1, 2, 3, 4, 5, 6]), + (1720230430.103977, [], [1, 2, 3, 4, 5, 6]), + (1720243393.667459, [], [1, 2, 3, 4, 5, 6]), + (1720311604.919662, [], [1, 2, 3, 4, 5, 6]), + (1720311771.258364, [], [1, 2, 3, 4, 5, 6]), + (1720311773.310317, [], [1, 2, 3, 4, 5, 6]), + (1720324823.664232, [], [1, 2, 3, 4, 5, 6]), + (1720324825.721978, [], [1, 2, 3, 4, 5, 6]), + (1720393374.344255, [], [1, 2, 3, 4, 5, 6]), + (1720410680.226051, [], [1, 2, 3, 4, 5, 6]), + (1720410682.252163, [], [1, 2, 3, 4, 5, 6]), + (1720498394.961086, [], [1, 2, 3, 4, 5, 6]), + (1720498440.73496, [], [1, 2, 3, 4, 5, 6]), + (1720569716.948564, [], [1, 2, 3, 4, 5, 6]), + (1720569812.275586, [], [1, 2, 3, 4, 5, 6]), + (1720569814.333894, [], [1, 2, 3, 4, 5, 6]), + (1720569814.53133, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719978988.407662, [], [1, 2, 3, 4, 5, 6]), + (1719978990.511958, [], [1, 2, 3, 4, 5, 6]), + (1719984066.593696, [], [1, 2, 3, 4, 5, 6]), + (1719984323.803693, [], [1, 2, 3, 4, 5, 6]), + (1719984325.863773, [], [1, 2, 3, 4, 5, 6]), + (1720152078.595081, [], [1, 2, 3, 4, 5, 6]), + (1720225565.930941, [], [1, 2, 3, 4, 5, 6]), + (1720280378.036955, [], [1, 2, 3, 4, 5, 6]), + (1720326955.218979, [], [1, 2, 3, 4, 5, 6]), + (1720326957.292481, [], [1, 2, 3, 4, 5, 6]), + (1720331017.031137, [], [1, 2, 3, 4, 5, 6]), + (1720331019.121504, [], [1, 2, 3, 4, 5, 6]), + (1720410756.685202, [], [1, 2, 3, 4, 5, 6]), + (1720410758.796614, [], [1, 2, 3, 4, 5, 6]), + (1720417802.859919, [], [1, 2, 3, 4, 5, 6]), + (1720417804.989442, [], [1, 2, 3, 4, 5, 6]), + (1720487313.059882, [], [1, 2, 3, 4, 5, 6]), + (1720487498.533155, [], [1, 2, 3, 4, 5, 6]), + (1720487500.652063, [], [1, 2, 3, 4, 5, 6]), + (1720487503.708405, [], [1, 2, 3, 4, 5, 6]), + (1720487505.805861, [], [1, 2, 3, 4, 5, 6]), + (1720501546.266299, [], [1, 2, 3, 4, 5, 6]), + (1720501655.51812, [], [1, 2, 3, 4, 5, 6]), + (1720575741.153236, [], [1, 2, 3, 4, 5, 6]), + (1720575891.79104, [], [1, 2, 3, 4, 5, 6]), + (1720589520.786652, [], [1, 2, 3, 4, 5, 6]), + (1720589642.390304, [], [1, 2, 3, 4, 5, 6]), + (1720589644.452771, [], [1, 2, 3, 4, 5, 6]), + (1720615233.591986, [], [1, 2, 3, 4, 5, 6]), + (1720615337.890481, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719971542.018584, [], [1, 2, 3, 4, 5, 6]), + (1719971788.12398, [], [1, 2, 3, 4, 5, 6]), + (1720117268.913487, [], [1, 2, 3, 4, 5, 6]), + (1720117270.991206, [], [1, 2, 3, 4, 5, 6]), + (1720143198.612101, [], [1, 2, 3, 4, 5, 6]), + (1720143429.084839, [], [1, 2, 3, 4, 5, 6]), + (1720238625.308642, [], [1, 2, 3, 4, 5, 6]), + (1720238757.81922, [], [1, 2, 3, 4, 5, 6]), + (1720238759.894117, [], [1, 2, 3, 4, 5, 6]), + (1720330550.917977, [], [1, 2, 3, 4, 5, 6]), + (1720330626.459228, [], [1, 2, 3, 4, 5, 6]), + (1720378937.313156, [], [1, 2, 3, 4, 5, 6]), + (1720379038.375789, [], [1, 2, 3, 4, 5, 6]), + (1720386358.224787, [], [1, 2, 3, 4, 5, 6]), + (1720386360.275601, [], [1, 2, 3, 4, 5, 6]), + (1720416486.117358, [], [1, 2, 3, 4, 5, 6]), + (1720416608.109114, [], [1, 2, 3, 4, 5, 6]), + (1720493716.833205, [], [1, 2, 3, 4, 5, 6]), + (1720493844.641363, [], [1, 2, 3, 4, 5, 6]), + (1720493846.67691, [], [1, 2, 3, 4, 5, 6]), + (1720568118.486107, [], [1, 2, 3, 4, 5, 6]), + (1720568219.230995, [], [1, 2, 3, 4, 5, 6]), + (1720568221.334344, [], [1, 2, 3, 4, 5, 6]), + (1720574746.351324, [], [1, 2, 3, 4, 5, 6]), + (1720574815.297689, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720233195.120615, [], [1, 2, 3, 4, 5, 6]), + (1720393731.964556, [], [1, 2, 3, 4, 5, 6]), + (1720570257.699261, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720149655.238203, [], [1, 2, 3, 4, 5, 6]), + (1720352361.227124, [], [1, 2, 3, 4, 5, 6]), + (1720578697.147852, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720411277.985506, [], [1, 2, 3, 4, 5, 6]), (1720572981.673421, [], [1, 2, 3, 4, 5, 6])], + [(1720569584.93865, [], [1, 2, 3, 4, 5, 6])], + [(1720311303.894177, [], [1, 2, 3, 4, 5, 6])], + [(1720576463.87807, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982989.782732, [], [1, 2, 3, 4, 5, 6]), + (1720080708.007665, [], [1, 2, 3, 4, 5, 6]), + (1720234553.333259, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719983711.203136, [], [1, 2, 3, 4, 5, 6]), (1720395076.590109, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968905.802345, [], [1, 2, 3, 4, 5, 6]), + (1720054751.228152, [], [1, 2, 3, 4, 5, 6]), + (1720393228.571573, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151965.57438, [], [1, 2, 3, 4, 5, 6]), + (1720265917.280767, [], [1, 2, 3, 4, 5, 6]), + (1720414597.498797, [], [1, 2, 3, 4, 5, 6]), + (1720569352.211054, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720139623.448558, [], [1, 2, 3, 4, 5, 6]), (1720413909.371495, [], [1, 2, 3, 4, 5, 6])], + [ + (1720332156.972433, [], [1, 2, 3, 4, 5, 6]), + (1720486770.808084, [], [1, 2, 3, 4, 5, 6]), + (1720570506.129092, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720046377.309817, [], [1, 2, 3, 4, 5, 6]), (1720144405.103977, [], [1, 2, 3, 4, 5, 6])], + [(1720311749.460887, [], [1, 2, 3, 4, 5, 6]), (1720480404.801297, [], [1, 2, 3, 4, 5, 6])], + [(1719965504.779832, [], [1, 2, 3, 4, 5, 6])], + [(1720489219.425884, [], [1, 2, 3, 4, 5, 6])], + [(1720480774.306063, [], [1, 2, 3, 4, 5, 6])], + [(1719968486.759348, [], [1, 2, 3, 4, 5, 6]), (1720229505.650825, [], [1, 2, 3, 4, 5, 6])], + [(1720122526.844622, [], [1, 2, 3, 4, 5, 6]), (1720142320.524851, [], [1, 2, 3, 4, 5, 6])], + [(1720320995.267452, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984296.990609, [], [1, 2, 3, 4, 5, 6]), + (1719984350.22369, [], [1, 2, 3, 4, 5, 6]), + (1719984352.257572, [], [1, 2, 3, 4, 5, 6]), + (1720052997.807277, [], [1, 2, 3, 4, 5, 6]), + (1720053049.304507, [], [1, 2, 3, 4, 5, 6]), + (1720053051.378602, [], [1, 2, 3, 4, 5, 6]), + (1720150389.546489, [], [1, 2, 3, 4, 5, 6]), + (1720150391.660121, [], [1, 2, 3, 4, 5, 6]), + (1720168392.833864, [], [1, 2, 3, 4, 5, 6]), + (1720168394.931787, [], [1, 2, 3, 4, 5, 6]), + (1720218901.13549, [], [1, 2, 3, 4, 5, 6]), + (1720218984.364651, [], [1, 2, 3, 4, 5, 6]), + (1720218986.444335, [], [1, 2, 3, 4, 5, 6]), + (1720236871.413173, [], [1, 2, 3, 4, 5, 6]), + (1720236873.52, [], [1, 2, 3, 4, 5, 6]), + (1720318899.245869, [], [1, 2, 3, 4, 5, 6]), + (1720319077.080816, [], [1, 2, 3, 4, 5, 6]), + (1720319079.181328, [], [1, 2, 3, 4, 5, 6]), + (1720421585.966107, [], [1, 2, 3, 4, 5, 6]), + (1720421692.177002, [], [1, 2, 3, 4, 5, 6]), + (1720421694.269891, [], [1, 2, 3, 4, 5, 6]), + (1720484559.101295, [], [1, 2, 3, 4, 5, 6]), + (1720484634.686657, [], [1, 2, 3, 4, 5, 6]), + (1720484636.791229, [], [1, 2, 3, 4, 5, 6]), + (1720484640.876498, [], [1, 2, 3, 4, 5, 6]), + (1720484642.914839, [], [1, 2, 3, 4, 5, 6]), + (1720568978.955929, [], [1, 2, 3, 4, 5, 6]), + (1720569083.551067, [], [1, 2, 3, 4, 5, 6]), + (1720603472.634189, [], [1, 2, 3, 4, 5, 6]), + (1720603622.57534, [], [1, 2, 3, 4, 5, 6]), + (1720603624.69381, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720204831.715905, [], [1, 2, 3, 4, 5, 6])], + [(1719985836.773241, [], [1, 2, 3, 4, 5, 6]), (1720227063.151216, [], [1, 2, 3, 4, 5, 6])], + [(1720226998.434162, [], [1, 2, 3, 4, 5, 6])], + [(1720413326.470543, [], [1, 2, 3, 4, 5, 6])], + [(1720484747.542658, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975290.47648, [], [1, 2, 3, 4, 5, 6]), + (1719975421.539502, [], [1, 2, 3, 4, 5, 6]), + (1719975423.609947, [], [1, 2, 3, 4, 5, 6]), + (1719975424.130571, [], [1, 2, 3, 4, 5, 6]), + (1719975426.224942, [], [1, 2, 3, 4, 5, 6]), + (1720024823.782769, [], [1, 2, 3, 4, 5, 6]), + (1720024943.32157, [], [1, 2, 3, 4, 5, 6]), + (1720024945.350078, [], [1, 2, 3, 4, 5, 6]), + (1720024945.905366, [], [1, 2, 3, 4, 5, 6]), + (1720056619.663053, [], [1, 2, 3, 4, 5, 6]), + (1720056822.318399, [], [1, 2, 3, 4, 5, 6]), + (1720056824.36183, [], [1, 2, 3, 4, 5, 6]), + (1720072281.168332, [], [1, 2, 3, 4, 5, 6]), + (1720072283.222603, [], [1, 2, 3, 4, 5, 6]), + (1720134154.949613, [], [1, 2, 3, 4, 5, 6]), + (1720134376.323715, [], [1, 2, 3, 4, 5, 6]), + (1720134378.416906, [], [1, 2, 3, 4, 5, 6]), + (1720140112.874786, [], [1, 2, 3, 4, 5, 6]), + (1720140131.322854, [], [1, 2, 3, 4, 5, 6]), + (1720140133.38169, [], [1, 2, 3, 4, 5, 6]), + (1720238635.597737, [], [1, 2, 3, 4, 5, 6]), + (1720238637.672121, [], [1, 2, 3, 4, 5, 6]), + (1720418306.625113, [], [1, 2, 3, 4, 5, 6]), + (1720418333.5673, [], [1, 2, 3, 4, 5, 6]), + (1720487528.439985, [], [1, 2, 3, 4, 5, 6]), + (1720487546.337876, [], [1, 2, 3, 4, 5, 6]), + (1720487548.449392, [], [1, 2, 3, 4, 5, 6]), + (1720502509.125496, [], [1, 2, 3, 4, 5, 6]), + (1720502624.411704, [], [1, 2, 3, 4, 5, 6]), + (1720585053.028856, [], [1, 2, 3, 4, 5, 6]), + (1720585055.08891, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719982052.592392, [], [1, 2, 3, 4, 5, 6]), + (1719982258.203523, [], [1, 2, 3, 4, 5, 6]), + (1719985183.079021, [], [1, 2, 3, 4, 5, 6]), + (1719985305.780432, [], [1, 2, 3, 4, 5, 6]), + (1720055777.47168, [], [1, 2, 3, 4, 5, 6]), + (1720071284.174477, [], [1, 2, 3, 4, 5, 6]), + (1720152558.44266, [], [1, 2, 3, 4, 5, 6]), + (1720152671.333552, [], [1, 2, 3, 4, 5, 6]), + (1720152673.415669, [], [1, 2, 3, 4, 5, 6]), + (1720182134.94743, [], [1, 2, 3, 4, 5, 6]), + (1720182137.028427, [], [1, 2, 3, 4, 5, 6]), + (1720182138.842693, [], [1, 2, 3, 4, 5, 6]), + (1720182140.958073, [], [1, 2, 3, 4, 5, 6]), + (1720227575.333539, [], [1, 2, 3, 4, 5, 6]), + (1720227736.260264, [], [1, 2, 3, 4, 5, 6]), + (1720227738.300477, [], [1, 2, 3, 4, 5, 6]), + (1720236068.538351, [], [1, 2, 3, 4, 5, 6]), + (1720236070.60483, [], [1, 2, 3, 4, 5, 6]), + (1720317757.071545, [], [1, 2, 3, 4, 5, 6]), + (1720317866.130343, [], [1, 2, 3, 4, 5, 6]), + (1720317868.22788, [], [1, 2, 3, 4, 5, 6]), + (1720330462.980036, [], [1, 2, 3, 4, 5, 6]), + (1720330707.655663, [], [1, 2, 3, 4, 5, 6]), + (1720330709.750072, [], [1, 2, 3, 4, 5, 6]), + (1720373940.747523, [], [1, 2, 3, 4, 5, 6]), + (1720374027.697475, [], [1, 2, 3, 4, 5, 6]), + (1720374029.799405, [], [1, 2, 3, 4, 5, 6]), + (1720406670.90306, [], [1, 2, 3, 4, 5, 6]), + (1720406844.478936, [], [1, 2, 3, 4, 5, 6]), + (1720406846.592556, [], [1, 2, 3, 4, 5, 6]), + (1720406851.281701, [], [1, 2, 3, 4, 5, 6]), + (1720412576.66958, [], [1, 2, 3, 4, 5, 6]), + (1720412652.412012, [], [1, 2, 3, 4, 5, 6]), + (1720412654.533239, [], [1, 2, 3, 4, 5, 6]), + (1720486184.910769, [], [1, 2, 3, 4, 5, 6]), + (1720571066.49819, [], [1, 2, 3, 4, 5, 6]), + (1720626129.788949, [], [1, 2, 3, 4, 5, 6]), + (1720626261.332132, [], [1, 2, 3, 4, 5, 6]), + (1720626263.446226, [], [1, 2, 3, 4, 5, 6]), + (1720626267.27108, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719972691.157799, [], [1, 2, 3, 4, 5, 6]), + (1719972822.458675, [], [1, 2, 3, 4, 5, 6]), + (1719972824.589752, [], [1, 2, 3, 4, 5, 6]), + (1719993329.829737, [], [1, 2, 3, 4, 5, 6]), + (1720053155.834123, [], [1, 2, 3, 4, 5, 6]), + (1720053293.330528, [], [1, 2, 3, 4, 5, 6]), + (1720084358.281667, [], [1, 2, 3, 4, 5, 6]), + (1720084522.554824, [], [1, 2, 3, 4, 5, 6]), + (1720084524.658959, [], [1, 2, 3, 4, 5, 6]), + (1720153146.842639, [], [1, 2, 3, 4, 5, 6]), + (1720153159.620793, [], [1, 2, 3, 4, 5, 6]), + (1720223041.359927, [], [1, 2, 3, 4, 5, 6]), + (1720223107.178767, [], [1, 2, 3, 4, 5, 6]), + (1720223109.301943, [], [1, 2, 3, 4, 5, 6]), + (1720243608.145196, [], [1, 2, 3, 4, 5, 6]), + (1720243715.813915, [], [1, 2, 3, 4, 5, 6]), + (1720310055.295457, [], [1, 2, 3, 4, 5, 6]), + (1720310225.190394, [], [1, 2, 3, 4, 5, 6]), + (1720310227.250668, [], [1, 2, 3, 4, 5, 6]), + (1720374255.295948, [], [1, 2, 3, 4, 5, 6]), + (1720374257.390432, [], [1, 2, 3, 4, 5, 6]), + (1720397033.790744, [], [1, 2, 3, 4, 5, 6]), + (1720397192.93351, [], [1, 2, 3, 4, 5, 6]), + (1720489562.012912, [], [1, 2, 3, 4, 5, 6]), + (1720489620.124167, [], [1, 2, 3, 4, 5, 6]), + (1720489622.2461, [], [1, 2, 3, 4, 5, 6]), + (1720577615.944083, [], [1, 2, 3, 4, 5, 6]), + (1720595908.263871, [], [1, 2, 3, 4, 5, 6]), + (1720596022.795818, [], [1, 2, 3, 4, 5, 6]), + (1720596024.909409, [], [1, 2, 3, 4, 5, 6]), + (1720596025.112291, [], [1, 2, 3, 4, 5, 6]), + (1720596027.181848, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720410184.878056, [], [1, 2, 3, 4, 5, 6])], + [(1720446097.457008, [], [1, 2, 3, 4, 5, 6]), (1720582142.651169, [], [1, 2, 3, 4, 5, 6])], + [(1720496385.651227, [], [1, 2, 3, 4, 5, 6]), (1720588018.159792, [], [1, 2, 3, 4, 5, 6])], + [(1719975737.968693, [], [1, 2, 3, 4, 5, 6]), (1720069758.403541, [], [1, 2, 3, 4, 5, 6])], + [(1720573220.196789, [], [1, 2, 3, 4, 5, 6])], + [(1720435170.4772, [], [1, 2, 3, 4, 5, 6])], + [(1720156838.062846, [], [1, 2, 3, 4, 5, 6]), (1720588244.606338, [], [1, 2, 3, 4, 5, 6])], + [(1720072706.921977, [], [1, 2, 3, 4, 5, 6]), (1720588899.722119, [], [1, 2, 3, 4, 5, 6])], + [(1720058343.607628, [], [1, 2, 3, 4, 5, 6])], + [(1720314365.315505, [], [1, 2, 3, 4, 5, 6])], + [(1720502417.751936, [], [1, 2, 3, 4, 5, 6])], + [(1720302708.367359, [], [1, 2, 3, 4, 5, 6]), (1720585704.559633, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984540.656777, [], [1, 2, 3, 4, 5, 6]), + (1720146973.8651, [], [1, 2, 3, 4, 5, 6]), + (1720341798.159409, [], [1, 2, 3, 4, 5, 6]), + (1720482149.273983, [], [1, 2, 3, 4, 5, 6]), + (1720570969.604085, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720311167.154928, [], [1, 2, 3, 4, 5, 6]), (1720404884.472902, [], [1, 2, 3, 4, 5, 6])], + [(1720481370.176729, [], [1, 2, 3, 4, 5, 6])], + [(1720098969.778426, [], [1, 2, 3, 4, 5, 6]), (1720542708.023885, [], [1, 2, 3, 4, 5, 6])], + [ + (1720149583.597081, [], [1, 2, 3, 4, 5, 6]), + (1720314574.406545, [], [1, 2, 3, 4, 5, 6]), + (1720416038.659142, [], [1, 2, 3, 4, 5, 6]), + (1720572347.697131, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720033450.660601, [], [1, 2, 3, 4, 5, 6]), + (1720033450.752555, [], [1, 2, 3, 4, 5, 6]), + (1720033455.770905, [], [1, 2, 3, 4, 5, 6]), + (1720400227.89913, [], [1, 2, 3, 4, 5, 6]), + (1720400250.299243, [], [1, 2, 3, 4, 5, 6]), + (1720400252.394995, [], [1, 2, 3, 4, 5, 6]), + (1720417432.186774, [], [1, 2, 3, 4, 5, 6]), + (1720417504.33498, [], [1, 2, 3, 4, 5, 6]), + (1720417506.39095, [], [1, 2, 3, 4, 5, 6]), + (1720417509.515927, [], [1, 2, 3, 4, 5, 6]), + (1720417511.647347, [], [1, 2, 3, 4, 5, 6]), + (1720417512.311827, [], [1, 2, 3, 4, 5, 6]), + (1720512787.544525, [], [1, 2, 3, 4, 5, 6]), + (1720512787.637452, [], [1, 2, 3, 4, 5, 6]), + (1720512790.509437, [], [1, 2, 3, 4, 5, 6]), + (1720546384.085434, [], [1, 2, 3, 4, 5, 6]), + (1720546402.635026, [], [1, 2, 3, 4, 5, 6]), + (1720546404.721606, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720091612.414745, [], [1, 2, 3, 4, 5, 6]), (1720394180.692823, [], [1, 2, 3, 4, 5, 6])], + [(1720499565.692616, [], [1, 2, 3, 4, 5, 6])], + [ + (1720050050.500015, [], [1, 2, 3, 4, 5, 6]), + (1720050118.092842, [], [1, 2, 3, 4, 5, 6]), + (1720050225.108458, [], [1, 2, 3, 4, 5, 6]), + (1720065727.65261, [], [1, 2, 3, 4, 5, 6]), + (1720065830.735126, [], [1, 2, 3, 4, 5, 6]), + (1720065888.617825, [], [1, 2, 3, 4, 5, 6]), + (1720065986.346168, [], [1, 2, 3, 4, 5, 6]), + (1720066099.675624, [], [1, 2, 3, 4, 5, 6]), + (1720066274.481661, [], [1, 2, 3, 4, 5, 6]), + (1720066349.195281, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151340.048882, [], [1, 2, 3, 4, 5, 6]), + (1720314849.375028, [], [1, 2, 3, 4, 5, 6]), + (1720413869.9313, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720054419.118301, [], [1, 2, 3, 4, 5, 6]), (1720399142.008777, [], [1, 2, 3, 4, 5, 6])], + [ + (1720070403.925696, [], [1, 2, 3, 4, 5, 6]), + (1720244978.0606, [], [1, 2, 3, 4, 5, 6]), + (1720588083.280232, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720142944.669634, [], [1, 2, 3, 4, 5, 6]), + (1720317829.682224, [], [1, 2, 3, 4, 5, 6]), + (1720402172.873968, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720135813.27587, [], [1, 2, 3, 4, 5, 6]), + (1720344219.879026, [], [1, 2, 3, 4, 5, 6]), + (1720587780.127476, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720307062.497702, [], [1, 2, 3, 4, 5, 6])], + [(1720145491.250495, [], [1, 2, 3, 4, 5, 6]), (1720586242.178403, [], [1, 2, 3, 4, 5, 6])], + [(1720586566.015615, [], [1, 2, 3, 4, 5, 6])], + [(1720409077.369118, [], [1, 2, 3, 4, 5, 6]), (1720569722.833936, [], [1, 2, 3, 4, 5, 6])], + [(1720098442.029928, [], [1, 2, 3, 4, 5, 6]), (1720391796.053942, [], [1, 2, 3, 4, 5, 6])], + [(1720243240.863801, [], [1, 2, 3, 4, 5, 6]), (1720492060.909226, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967064.859664, [], [1, 2, 3, 4, 5, 6]), + (1720055971.205432, [], [1, 2, 3, 4, 5, 6]), + (1720056017.075129, [], [1, 2, 3, 4, 5, 6]), + (1720069354.406111, [], [1, 2, 3, 4, 5, 6]), + (1720069356.466395, [], [1, 2, 3, 4, 5, 6]), + (1720153990.829373, [], [1, 2, 3, 4, 5, 6]), + (1720154034.130574, [], [1, 2, 3, 4, 5, 6]), + (1720232518.365492, [], [1, 2, 3, 4, 5, 6]), + (1720232668.084829, [], [1, 2, 3, 4, 5, 6]), + (1720232670.147567, [], [1, 2, 3, 4, 5, 6]), + (1720247796.598987, [], [1, 2, 3, 4, 5, 6]), + (1720310742.132713, [], [1, 2, 3, 4, 5, 6]), + (1720310784.012123, [], [1, 2, 3, 4, 5, 6]), + (1720320091.334971, [], [1, 2, 3, 4, 5, 6]), + (1720357505.367765, [], [1, 2, 3, 4, 5, 6]), + (1720357507.406388, [], [1, 2, 3, 4, 5, 6]), + (1720404625.988586, [], [1, 2, 3, 4, 5, 6]), + (1720404726.50447, [], [1, 2, 3, 4, 5, 6]), + (1720404728.609789, [], [1, 2, 3, 4, 5, 6]), + (1720417440.696768, [], [1, 2, 3, 4, 5, 6]), + (1720460381.831877, [], [1, 2, 3, 4, 5, 6]), + (1720460489.831088, [], [1, 2, 3, 4, 5, 6]), + (1720492881.459734, [], [1, 2, 3, 4, 5, 6]), + (1720492883.570789, [], [1, 2, 3, 4, 5, 6]), + (1720580680.591028, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719975556.382414, [], [1, 2, 3, 4, 5, 6]), (1720231475.932226, [], [1, 2, 3, 4, 5, 6])], + [(1720569569.754826, [], [1, 2, 3, 4, 5, 6])], + [(1720043952.413223, [], [1, 2, 3, 4, 5, 6]), (1720225500.222696, [], [1, 2, 3, 4, 5, 6])], + [ + (1719967819.052883, [], [1, 2, 3, 4, 5, 6]), + (1720234292.697748, [], [1, 2, 3, 4, 5, 6]), + (1720397113.348799, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720087477.672876, [], [1, 2, 3, 4, 5, 6]), (1720438489.760021, [], [1, 2, 3, 4, 5, 6])], + [(1720577383.739689, [], [1, 2, 3, 4, 5, 6])], + [ + (1720047896.111507, [], [1, 2, 3, 4, 5, 6]), + (1720200244.93862, [], [1, 2, 3, 4, 5, 6]), + (1720464543.942733, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719970307.394382, [], [1, 2, 3, 4, 5, 6])], + [(1719982131.954439, [], [1, 2, 3, 4, 5, 6]), (1720238111.874625, [], [1, 2, 3, 4, 5, 6])], + [(1719966189.201726, [], [1, 2, 3, 4, 5, 6]), (1720150700.452652, [], [1, 2, 3, 4, 5, 6])], + [(1720142072.057736, [], [1, 2, 3, 4, 5, 6])], + [(1720063956.632264, [], [1, 2, 3, 4, 5, 6])], + [(1720057020.243682, [], [1, 2, 3, 4, 5, 6])], + [ + (1719973656.343743, [], [1, 2, 3, 4, 5, 6]), + (1719973767.615562, [], [1, 2, 3, 4, 5, 6]), + (1719973769.702368, [], [1, 2, 3, 4, 5, 6]), + (1720050422.073716, [], [1, 2, 3, 4, 5, 6]), + (1720060932.515015, [], [1, 2, 3, 4, 5, 6]), + (1720061076.268193, [], [1, 2, 3, 4, 5, 6]), + (1720061078.375626, [], [1, 2, 3, 4, 5, 6]), + (1720061082.695369, [], [1, 2, 3, 4, 5, 6]), + (1720061084.803345, [], [1, 2, 3, 4, 5, 6]), + (1720061088.695247, [], [1, 2, 3, 4, 5, 6]), + (1720061090.795592, [], [1, 2, 3, 4, 5, 6]), + (1720135057.814031, [], [1, 2, 3, 4, 5, 6]), + (1720135295.655111, [], [1, 2, 3, 4, 5, 6]), + (1720135297.734383, [], [1, 2, 3, 4, 5, 6]), + (1720156608.706907, [], [1, 2, 3, 4, 5, 6]), + (1720156610.771323, [], [1, 2, 3, 4, 5, 6]), + (1720208160.885537, [], [1, 2, 3, 4, 5, 6]), + (1720208274.821579, [], [1, 2, 3, 4, 5, 6]), + (1720208276.929569, [], [1, 2, 3, 4, 5, 6]), + (1720223270.224257, [], [1, 2, 3, 4, 5, 6]), + (1720223272.316827, [], [1, 2, 3, 4, 5, 6]), + (1720223275.15326, [], [1, 2, 3, 4, 5, 6]), + (1720261350.082829, [], [1, 2, 3, 4, 5, 6]), + (1720261423.329391, [], [1, 2, 3, 4, 5, 6]), + (1720261425.427693, [], [1, 2, 3, 4, 5, 6]), + (1720319448.712298, [], [1, 2, 3, 4, 5, 6]), + (1720319512.283877, [], [1, 2, 3, 4, 5, 6]), + (1720319514.384024, [], [1, 2, 3, 4, 5, 6]), + (1720397163.860459, [], [1, 2, 3, 4, 5, 6]), + (1720397201.824506, [], [1, 2, 3, 4, 5, 6]), + (1720397203.898302, [], [1, 2, 3, 4, 5, 6]), + (1720487465.098454, [], [1, 2, 3, 4, 5, 6]), + (1720487616.241062, [], [1, 2, 3, 4, 5, 6]), + (1720576783.799559, [], [1, 2, 3, 4, 5, 6]), + (1720576837.625767, [], [1, 2, 3, 4, 5, 6]), + (1720576839.747181, [], [1, 2, 3, 4, 5, 6]), + (1720621584.709553, [], [1, 2, 3, 4, 5, 6]), + (1720621686.14789, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720210871.32264, [], [1, 2, 3, 4, 5, 6]), (1720324564.785043, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976074.774068, [], [1, 2, 3, 4, 5, 6]), + (1720432422.057214, [], [1, 2, 3, 4, 5, 6]), + (1720498059.505822, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720537307.322626, [], [1, 2, 3, 4, 5, 6])], + [(1720051279.943281, [], [1, 2, 3, 4, 5, 6]), (1720326635.291429, [], [1, 2, 3, 4, 5, 6])], + [ + (1720317156.788498, [], [1, 2, 3, 4, 5, 6]), + (1720409687.362687, [], [1, 2, 3, 4, 5, 6]), + (1720499324.086042, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720058380.956986, [], [1, 2, 3, 4, 5, 6]), (1720150021.258812, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053489.906165, [], [1, 2, 3, 4, 5, 6]), + (1720139673.741326, [], [1, 2, 3, 4, 5, 6]), + (1720257769.454421, [], [1, 2, 3, 4, 5, 6]), + (1720393246.483443, [], [1, 2, 3, 4, 5, 6]), + (1720653126.121555, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719980029.84453, [], [1, 2, 3, 4, 5, 6])], + [(1720579706.660021, [], [1, 2, 3, 4, 5, 6])], + [(1720044200.360238, [], [1, 2, 3, 4, 5, 6])], + [(1720081853.12837, [], [1, 2, 3, 4, 5, 6])], + [(1720052881.805602, [], [1, 2, 3, 4, 5, 6])], + [ + (1720072654.6007, [], [1, 2, 3, 4, 5, 6]), + (1720238922.414211, [], [1, 2, 3, 4, 5, 6]), + (1720410048.118631, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720307869.769519, [], [1, 2, 3, 4, 5, 6])], + [ + (1720016591.216479, [], [1, 2, 3, 4, 5, 6]), + (1720157097.134758, [], [1, 2, 3, 4, 5, 6]), + (1720238731.063819, [], [1, 2, 3, 4, 5, 6]), + (1720575486.387284, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965711.424761, [], [1, 2, 3, 4, 5, 6]), + (1719965713.531779, [], [1, 2, 3, 4, 5, 6]), + (1720049361.440421, [], [1, 2, 3, 4, 5, 6]), + (1720049363.495467, [], [1, 2, 3, 4, 5, 6]), + (1720070584.34326, [], [1, 2, 3, 4, 5, 6]), + (1720070586.439897, [], [1, 2, 3, 4, 5, 6]), + (1720070588.168108, [], [1, 2, 3, 4, 5, 6]), + (1720154076.359366, [], [1, 2, 3, 4, 5, 6]), + (1720154271.555582, [], [1, 2, 3, 4, 5, 6]), + (1720221139.621509, [], [1, 2, 3, 4, 5, 6]), + (1720221264.378154, [], [1, 2, 3, 4, 5, 6]), + (1720221266.479342, [], [1, 2, 3, 4, 5, 6]), + (1720239415.452192, [], [1, 2, 3, 4, 5, 6]), + (1720239475.014596, [], [1, 2, 3, 4, 5, 6]), + (1720239477.07742, [], [1, 2, 3, 4, 5, 6]), + (1720313035.217622, [], [1, 2, 3, 4, 5, 6]), + (1720313041.373706, [], [1, 2, 3, 4, 5, 6]), + (1720313043.420222, [], [1, 2, 3, 4, 5, 6]), + (1720406631.8599, [], [1, 2, 3, 4, 5, 6]), + (1720406659.023715, [], [1, 2, 3, 4, 5, 6]), + (1720484615.165994, [], [1, 2, 3, 4, 5, 6]), + (1720484638.913162, [], [1, 2, 3, 4, 5, 6]), + (1720497880.450011, [], [1, 2, 3, 4, 5, 6]), + (1720497934.842426, [], [1, 2, 3, 4, 5, 6]), + (1720497936.912581, [], [1, 2, 3, 4, 5, 6]), + (1720540604.563371, [], [1, 2, 3, 4, 5, 6]), + (1720540779.42356, [], [1, 2, 3, 4, 5, 6]), + (1720540781.553641, [], [1, 2, 3, 4, 5, 6]), + (1720570083.468668, [], [1, 2, 3, 4, 5, 6]), + (1720570174.103962, [], [1, 2, 3, 4, 5, 6]), + (1720570176.16906, [], [1, 2, 3, 4, 5, 6]), + (1720583667.401678, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719984388.470208, [], [1, 2, 3, 4, 5, 6]), + (1720057777.644161, [], [1, 2, 3, 4, 5, 6]), + (1720057953.012529, [], [1, 2, 3, 4, 5, 6]), + (1720057955.119335, [], [1, 2, 3, 4, 5, 6]), + (1720148795.685708, [], [1, 2, 3, 4, 5, 6]), + (1720148805.802813, [], [1, 2, 3, 4, 5, 6]), + (1720148807.834184, [], [1, 2, 3, 4, 5, 6]), + (1720234649.035149, [], [1, 2, 3, 4, 5, 6]), + (1720234705.911474, [], [1, 2, 3, 4, 5, 6]), + (1720321956.32096, [], [1, 2, 3, 4, 5, 6]), + (1720321958.41442, [], [1, 2, 3, 4, 5, 6]), + (1720409662.220157, [], [1, 2, 3, 4, 5, 6]), + (1720409664.333692, [], [1, 2, 3, 4, 5, 6]), + (1720447795.315077, [], [1, 2, 3, 4, 5, 6]), + (1720447797.391813, [], [1, 2, 3, 4, 5, 6]), + (1720483016.135213, [], [1, 2, 3, 4, 5, 6]), + (1720483018.21033, [], [1, 2, 3, 4, 5, 6]), + (1720483019.948558, [], [1, 2, 3, 4, 5, 6]), + (1720573042.040836, [], [1, 2, 3, 4, 5, 6]), + (1720573166.473551, [], [1, 2, 3, 4, 5, 6]), + (1720624631.359534, [], [1, 2, 3, 4, 5, 6]), + (1720624673.478312, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720072684.014623, [], [1, 2, 3, 4, 5, 6]), + (1720308176.896274, [], [1, 2, 3, 4, 5, 6]), + (1720404626.250723, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719970599.437881, [], [1, 2, 3, 4, 5, 6]), + (1720066959.831691, [], [1, 2, 3, 4, 5, 6]), + (1720067155.392921, [], [1, 2, 3, 4, 5, 6]), + (1720067157.488109, [], [1, 2, 3, 4, 5, 6]), + (1720155932.952678, [], [1, 2, 3, 4, 5, 6]), + (1720156052.447154, [], [1, 2, 3, 4, 5, 6]), + (1720156054.565087, [], [1, 2, 3, 4, 5, 6]), + (1720176991.157569, [], [1, 2, 3, 4, 5, 6]), + (1720177007.156433, [], [1, 2, 3, 4, 5, 6]), + (1720197632.204363, [], [1, 2, 3, 4, 5, 6]), + (1720197634.246364, [], [1, 2, 3, 4, 5, 6]), + (1720245139.96838, [], [1, 2, 3, 4, 5, 6]), + (1720245142.060531, [], [1, 2, 3, 4, 5, 6]), + (1720313085.938317, [], [1, 2, 3, 4, 5, 6]), + (1720313087.991269, [], [1, 2, 3, 4, 5, 6]), + (1720382887.239454, [], [1, 2, 3, 4, 5, 6]), + (1720383102.784337, [], [1, 2, 3, 4, 5, 6]), + (1720383104.858248, [], [1, 2, 3, 4, 5, 6]), + (1720401645.882852, [], [1, 2, 3, 4, 5, 6]), + (1720401875.751914, [], [1, 2, 3, 4, 5, 6]), + (1720401877.871081, [], [1, 2, 3, 4, 5, 6]), + (1720401879.349072, [], [1, 2, 3, 4, 5, 6]), + (1720401881.439362, [], [1, 2, 3, 4, 5, 6]), + (1720414595.608826, [], [1, 2, 3, 4, 5, 6]), + (1720414633.289105, [], [1, 2, 3, 4, 5, 6]), + (1720414635.359202, [], [1, 2, 3, 4, 5, 6]), + (1720417003.580682, [], [1, 2, 3, 4, 5, 6]), + (1720417074.130853, [], [1, 2, 3, 4, 5, 6]), + (1720417076.197409, [], [1, 2, 3, 4, 5, 6]), + (1720480715.553431, [], [1, 2, 3, 4, 5, 6]), + (1720480828.705337, [], [1, 2, 3, 4, 5, 6]), + (1720480830.783164, [], [1, 2, 3, 4, 5, 6]), + (1720579482.271054, [], [1, 2, 3, 4, 5, 6]), + (1720579503.249382, [], [1, 2, 3, 4, 5, 6]), + (1720579505.309044, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720320578.9281, [], [1, 2, 3, 4, 5, 6]), + (1720320637.501704, [], [1, 2, 3, 4, 5, 6]), + (1720320671.3461, [], [1, 2, 3, 4, 5, 6]), + (1720320700.900689, [], [1, 2, 3, 4, 5, 6]), + (1720326925.640353, [], [1, 2, 3, 4, 5, 6]), + (1720326927.740401, [], [1, 2, 3, 4, 5, 6]), + (1720397880.433215, [], [1, 2, 3, 4, 5, 6]), + (1720397991.949085, [], [1, 2, 3, 4, 5, 6]), + (1720407395.883774, [], [1, 2, 3, 4, 5, 6]), + (1720478571.564518, [], [1, 2, 3, 4, 5, 6]), + (1720478573.689929, [], [1, 2, 3, 4, 5, 6]), + (1720496055.710657, [], [1, 2, 3, 4, 5, 6]), + (1720496250.423433, [], [1, 2, 3, 4, 5, 6]), + (1720496252.533919, [], [1, 2, 3, 4, 5, 6]), + (1720567595.861171, [], [1, 2, 3, 4, 5, 6]), + (1720567634.8402, [], [1, 2, 3, 4, 5, 6]), + (1720567636.90682, [], [1, 2, 3, 4, 5, 6]), + (1720652785.237133, [], [1, 2, 3, 4, 5, 6]), + (1720652836.758383, [], [1, 2, 3, 4, 5, 6]), + (1720652838.851539, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720076323.446198, [], [1, 2, 3, 4, 5, 6]), + (1720076387.202961, [], [1, 2, 3, 4, 5, 6]), + (1720076463.503404, [], [1, 2, 3, 4, 5, 6]), + (1720235511.111341, [], [1, 2, 3, 4, 5, 6]), + (1720235587.152102, [], [1, 2, 3, 4, 5, 6]), + (1720235604.104726, [], [1, 2, 3, 4, 5, 6]), + (1720235788.441489, [], [1, 2, 3, 4, 5, 6]), + (1720235897.829327, [], [1, 2, 3, 4, 5, 6]), + (1720235943.361057, [], [1, 2, 3, 4, 5, 6]), + (1720236088.122922, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719969865.146509, [], [1, 2, 3, 4, 5, 6])], + [ + (1720088372.900286, [], [1, 2, 3, 4, 5, 6]), + (1720220494.799398, [], [1, 2, 3, 4, 5, 6]), + (1720488909.409034, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720063420.61895, [], [1, 2, 3, 4, 5, 6]), + (1720320803.577679, [], [1, 2, 3, 4, 5, 6]), + (1720401999.385093, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720051291.94966, [], [1, 2, 3, 4, 5, 6]), (1720313692.587964, [], [1, 2, 3, 4, 5, 6])], + [(1720509709.170957, [], [1, 2, 3, 4, 5, 6]), (1720652928.475583, [], [1, 2, 3, 4, 5, 6])], + [(1719976500.586248, [], [1, 2, 3, 4, 5, 6])], + [(1720063184.061031, [], [1, 2, 3, 4, 5, 6])], + [(1720147998.634564, [], [1, 2, 3, 4, 5, 6]), (1720575037.093899, [], [1, 2, 3, 4, 5, 6])], + [(1720594897.858543, [], [1, 2, 3, 4, 5, 6])], + [ + (1720238660.290085, [], [1, 2, 3, 4, 5, 6]), + (1720306835.46462, [], [1, 2, 3, 4, 5, 6]), + (1720401110.356341, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719970976.422012, [], [1, 2, 3, 4, 5, 6]), + (1720051629.137902, [], [1, 2, 3, 4, 5, 6]), + (1720301759.327348, [], [1, 2, 3, 4, 5, 6]), + (1720646663.705407, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720077214.628033, [], [1, 2, 3, 4, 5, 6]), (1720579842.451312, [], [1, 2, 3, 4, 5, 6])], + [(1720403179.578891, [], [1, 2, 3, 4, 5, 6]), (1720573175.772465, [], [1, 2, 3, 4, 5, 6])], + [ + (1720418161.36421, [], [1, 2, 3, 4, 5, 6]), + (1720418260.71249, [], [1, 2, 3, 4, 5, 6]), + (1720418315.726571, [], [1, 2, 3, 4, 5, 6]), + (1720418380.031953, [], [1, 2, 3, 4, 5, 6]), + (1720491482.634884, [], [1, 2, 3, 4, 5, 6]), + (1720491528.332034, [], [1, 2, 3, 4, 5, 6]), + (1720491530.434212, [], [1, 2, 3, 4, 5, 6]), + (1720573999.084897, [], [1, 2, 3, 4, 5, 6]), + (1720574047.543743, [], [1, 2, 3, 4, 5, 6]), + (1720574049.630747, [], [1, 2, 3, 4, 5, 6]), + (1720616534.181229, [], [1, 2, 3, 4, 5, 6]), + (1720616536.227681, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719971505.975792, [], [1, 2, 3, 4, 5, 6]), + (1720309386.016213, [], [1, 2, 3, 4, 5, 6]), + (1720570539.167487, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720026981.201845, [], [1, 2, 3, 4, 5, 6]), + (1720063393.228975, [], [1, 2, 3, 4, 5, 6]), + (1720569870.489752, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720151334.562097, [], [1, 2, 3, 4, 5, 6]), + (1720311454.894847, [], [1, 2, 3, 4, 5, 6]), + (1720483363.072169, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720051176.858561, [], [1, 2, 3, 4, 5, 6]), (1720567049.0934, [], [1, 2, 3, 4, 5, 6])], + [ + (1720300278.98565, [], [1, 2, 3, 4, 5, 6]), + (1720397622.151994, [], [1, 2, 3, 4, 5, 6]), + (1720487075.583534, [], [1, 2, 3, 4, 5, 6]), + (1720572927.092976, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720588311.663694, [], [1, 2, 3, 4, 5, 6])], + [(1720323338.447054, [], [1, 2, 3, 4, 5, 6])], + [(1720499740.21156, [], [1, 2, 3, 4, 5, 6])], + [(1720239215.924016, [], [1, 2, 3, 4, 5, 6]), (1720407168.197067, [], [1, 2, 3, 4, 5, 6])], + [(1720496803.3956, [], [1, 2, 3, 4, 5, 6])], + [(1720578053.220463, [], [1, 2, 3, 4, 5, 6])], + [(1720400566.962842, [], [1, 2, 3, 4, 5, 6]), (1720652817.676145, [], [1, 2, 3, 4, 5, 6])], + [ + (1720234566.739671, [], [1, 2, 3, 4, 5, 6]), + (1720335553.524142, [], [1, 2, 3, 4, 5, 6]), + (1720576366.993741, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720058095.694475, [], [1, 2, 3, 4, 5, 6])], + [(1720499846.305083, [], [1, 2, 3, 4, 5, 6])], + [(1720069736.856188, [], [1, 2, 3, 4, 5, 6]), (1720327054.018462, [], [1, 2, 3, 4, 5, 6])], + [(1720446989.50202, [], [1, 2, 3, 4, 5, 6]), (1720579246.321269, [], [1, 2, 3, 4, 5, 6])], + [ + (1720065515.046196, [], [1, 2, 3, 4, 5, 6]), + (1720237193.252454, [], [1, 2, 3, 4, 5, 6]), + (1720402549.014306, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719965737.195869, [], [1, 2, 3, 4, 5, 6]), + (1720057334.427369, [], [1, 2, 3, 4, 5, 6]), + (1720221205.840325, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720366343.985963, [], [1, 2, 3, 4, 5, 6])], + [(1720195598.557753, [], [1, 2, 3, 4, 5, 6])], + [(1719980678.939359, [], [1, 2, 3, 4, 5, 6]), (1720144995.169791, [], [1, 2, 3, 4, 5, 6])], + [(1720583721.214132, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054537.756175, [], [1, 2, 3, 4, 5, 6]), + (1720182546.976397, [], [1, 2, 3, 4, 5, 6]), + (1720415420.418491, [], [1, 2, 3, 4, 5, 6]), + (1720491014.558376, [], [1, 2, 3, 4, 5, 6]), + (1720568712.840731, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720061881.320656, [], [1, 2, 3, 4, 5, 6]), + (1720352102.705661, [], [1, 2, 3, 4, 5, 6]), + (1720405007.368123, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720578542.112522, [], [1, 2, 3, 4, 5, 6]), (1720652582.644175, [], [1, 2, 3, 4, 5, 6])], + [(1720243826.422296, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980177.888359, [], [1, 2, 3, 4, 5, 6]), + (1720151191.015847, [], [1, 2, 3, 4, 5, 6]), + (1720578744.147878, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720401711.559261, [], [1, 2, 3, 4, 5, 6])], + [ + (1720166076.219132, [], [1, 2, 3, 4, 5, 6]), + (1720603429.438791, [], [1, 2, 3, 4, 5, 6]), + (1720653029.910009, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060428.267185, [], [1, 2, 3, 4, 5, 6]), (1720330543.204866, [], [1, 2, 3, 4, 5, 6])], + [(1720023596.681592, [], [1, 2, 3, 4, 5, 6]), (1720492712.578171, [], [1, 2, 3, 4, 5, 6])], + [ + (1719969468.250895, [], [1, 2, 3, 4, 5, 6]), + (1719969673.12603, [], [1, 2, 3, 4, 5, 6]), + (1719984075.563213, [], [1, 2, 3, 4, 5, 6]), + (1719984158.542506, [], [1, 2, 3, 4, 5, 6]), + (1719984160.639094, [], [1, 2, 3, 4, 5, 6]), + (1719984161.840978, [], [1, 2, 3, 4, 5, 6]), + (1720063496.706949, [], [1, 2, 3, 4, 5, 6]), + (1720063498.73801, [], [1, 2, 3, 4, 5, 6]), + (1720071811.706281, [], [1, 2, 3, 4, 5, 6]), + (1720071892.339384, [], [1, 2, 3, 4, 5, 6]), + (1720071894.401443, [], [1, 2, 3, 4, 5, 6]), + (1720141092.670014, [], [1, 2, 3, 4, 5, 6]), + (1720141281.129314, [], [1, 2, 3, 4, 5, 6]), + (1720141283.221705, [], [1, 2, 3, 4, 5, 6]), + (1720242813.561025, [], [1, 2, 3, 4, 5, 6]), + (1720242887.120065, [], [1, 2, 3, 4, 5, 6]), + (1720242889.197122, [], [1, 2, 3, 4, 5, 6]), + (1720314744.527265, [], [1, 2, 3, 4, 5, 6]), + (1720314759.642908, [], [1, 2, 3, 4, 5, 6]), + (1720314761.711826, [], [1, 2, 3, 4, 5, 6]), + (1720371097.307425, [], [1, 2, 3, 4, 5, 6]), + (1720371255.548011, [], [1, 2, 3, 4, 5, 6]), + (1720387058.372995, [], [1, 2, 3, 4, 5, 6]), + (1720387060.502073, [], [1, 2, 3, 4, 5, 6]), + (1720404975.528018, [], [1, 2, 3, 4, 5, 6]), + (1720405017.654969, [], [1, 2, 3, 4, 5, 6]), + (1720405019.759385, [], [1, 2, 3, 4, 5, 6]), + (1720415081.995346, [], [1, 2, 3, 4, 5, 6]), + (1720415260.662438, [], [1, 2, 3, 4, 5, 6]), + (1720415262.742795, [], [1, 2, 3, 4, 5, 6]), + (1720485117.023333, [], [1, 2, 3, 4, 5, 6]), + (1720485119.095263, [], [1, 2, 3, 4, 5, 6]), + (1720499098.798714, [], [1, 2, 3, 4, 5, 6]), + (1720499235.930954, [], [1, 2, 3, 4, 5, 6]), + (1720499238.042676, [], [1, 2, 3, 4, 5, 6]), + (1720569739.038396, [], [1, 2, 3, 4, 5, 6]), + (1720569853.204944, [], [1, 2, 3, 4, 5, 6]), + (1720569855.2363, [], [1, 2, 3, 4, 5, 6]), + (1720602936.713875, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720242969.422529, [], [1, 2, 3, 4, 5, 6])], + [ + (1719964838.834772, [], [1, 2, 3, 4, 5, 6]), + (1720091821.44426, [], [1, 2, 3, 4, 5, 6]), + (1720233192.310563, [], [1, 2, 3, 4, 5, 6]), + (1720328141.79034, [], [1, 2, 3, 4, 5, 6]), + (1720496224.014897, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720152285.903631, [], [1, 2, 3, 4, 5, 6]), + (1720243334.372125, [], [1, 2, 3, 4, 5, 6]), + (1720379463.678962, [], [1, 2, 3, 4, 5, 6]), + (1720542353.379097, [], [1, 2, 3, 4, 5, 6]), + (1720649368.688756, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720226523.433881, [], [1, 2, 3, 4, 5, 6]), + (1720226623.918185, [], [1, 2, 3, 4, 5, 6]), + (1720226651.194519, [], [1, 2, 3, 4, 5, 6]), + (1720226742.683406, [], [1, 2, 3, 4, 5, 6]), + (1720330187.550014, [], [1, 2, 3, 4, 5, 6]), + (1720330210.231169, [], [1, 2, 3, 4, 5, 6]), + (1720410272.539479, [], [1, 2, 3, 4, 5, 6]), + (1720410274.655647, [], [1, 2, 3, 4, 5, 6]), + (1720480303.2625, [], [1, 2, 3, 4, 5, 6]), + (1720480415.671856, [], [1, 2, 3, 4, 5, 6]), + (1720480417.738288, [], [1, 2, 3, 4, 5, 6]), + (1720504693.498524, [], [1, 2, 3, 4, 5, 6]), + (1720504764.21831, [], [1, 2, 3, 4, 5, 6]), + (1720504766.268173, [], [1, 2, 3, 4, 5, 6]), + (1720568377.567722, [], [1, 2, 3, 4, 5, 6]), + (1720568452.706691, [], [1, 2, 3, 4, 5, 6]), + (1720568454.778127, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720150751.139246, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064945.077586, [], [1, 2, 3, 4, 5, 6]), + (1720176851.77124, [], [1, 2, 3, 4, 5, 6]), + (1720413751.53369, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720067667.982332, [], [1, 2, 3, 4, 5, 6]), + (1720498323.491767, [], [1, 2, 3, 4, 5, 6]), + (1720640332.912224, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720586416.962046, [], [1, 2, 3, 4, 5, 6])], + [(1720403065.106609, [], [1, 2, 3, 4, 5, 6]), (1720468529.097883, [], [1, 2, 3, 4, 5, 6])], + [(1719976409.626599, [], [1, 2, 3, 4, 5, 6]), (1720617974.74258, [], [1, 2, 3, 4, 5, 6])], + [(1720155789.338418, [], [1, 2, 3, 4, 5, 6])], + [(1719965523.519862, [], [1, 2, 3, 4, 5, 6])], + [(1720492317.02938, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971602.527103, [], [1, 2, 3, 4, 5, 6]), + (1720069918.036547, [], [1, 2, 3, 4, 5, 6]), + (1720149900.77775, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720143447.493022, [], [1, 2, 3, 4, 5, 6]), (1720292005.708479, [], [1, 2, 3, 4, 5, 6])], + [(1720316731.010367, [], [1, 2, 3, 4, 5, 6])], + [(1720061643.180585, [], [1, 2, 3, 4, 5, 6])], + [ + (1719998587.453659, [], [1, 2, 3, 4, 5, 6]), + (1720141642.077196, [], [1, 2, 3, 4, 5, 6]), + (1720308402.56405, [], [1, 2, 3, 4, 5, 6]), + (1720416584.28358, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720587211.681421, [], [1, 2, 3, 4, 5, 6])], + [(1720155992.271335, [], [1, 2, 3, 4, 5, 6]), (1720572458.818891, [], [1, 2, 3, 4, 5, 6])], + [(1720052898.053761, [], [1, 2, 3, 4, 5, 6])], + [(1720221610.587492, [], [1, 2, 3, 4, 5, 6]), (1720312064.403238, [], [1, 2, 3, 4, 5, 6])], + [ + (1720147178.948005, [], [1, 2, 3, 4, 5, 6]), + (1720315892.264762, [], [1, 2, 3, 4, 5, 6]), + (1720484335.142158, [], [1, 2, 3, 4, 5, 6]), + (1720625426.867126, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720225964.225383, [], [1, 2, 3, 4, 5, 6]), (1720486617.901554, [], [1, 2, 3, 4, 5, 6])], + [(1720064206.744859, [], [1, 2, 3, 4, 5, 6]), (1720148855.512919, [], [1, 2, 3, 4, 5, 6])], + [ + (1719978022.859036, [], [1, 2, 3, 4, 5, 6]), + (1720225065.814898, [], [1, 2, 3, 4, 5, 6]), + (1720379679.901663, [], [1, 2, 3, 4, 5, 6]), + (1720486481.106043, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720072107.565425, [], [1, 2, 3, 4, 5, 6]), (1720244247.598994, [], [1, 2, 3, 4, 5, 6])], + [(1720418305.8177, [], [1, 2, 3, 4, 5, 6])], + [(1719982059.871483, [], [1, 2, 3, 4, 5, 6])], + [(1719978817.113542, [], [1, 2, 3, 4, 5, 6])], + [(1720138229.977179, [], [1, 2, 3, 4, 5, 6])], + [(1720069967.627991, [], [1, 2, 3, 4, 5, 6]), (1720327850.533176, [], [1, 2, 3, 4, 5, 6])], + [(1720047008.96098, [], [1, 2, 3, 4, 5, 6]), (1720399885.70332, [], [1, 2, 3, 4, 5, 6])], + [(1720241326.183043, [], [1, 2, 3, 4, 5, 6])], + [ + (1719974240.573502, [], [1, 2, 3, 4, 5, 6]), + (1719974319.570753, [], [1, 2, 3, 4, 5, 6]), + (1719974321.645618, [], [1, 2, 3, 4, 5, 6]), + (1720023281.111928, [], [1, 2, 3, 4, 5, 6]), + (1720023484.341612, [], [1, 2, 3, 4, 5, 6]), + (1720023486.442918, [], [1, 2, 3, 4, 5, 6]), + (1720056803.840688, [], [1, 2, 3, 4, 5, 6]), + (1720056805.906524, [], [1, 2, 3, 4, 5, 6]), + (1720056810.106719, [], [1, 2, 3, 4, 5, 6]), + (1720056812.20004, [], [1, 2, 3, 4, 5, 6]), + (1720070833.346034, [], [1, 2, 3, 4, 5, 6]), + (1720070835.405627, [], [1, 2, 3, 4, 5, 6]), + (1720070839.751918, [], [1, 2, 3, 4, 5, 6]), + (1720070841.8631, [], [1, 2, 3, 4, 5, 6]), + (1720143274.991396, [], [1, 2, 3, 4, 5, 6]), + (1720143497.358536, [], [1, 2, 3, 4, 5, 6]), + (1720205396.067954, [], [1, 2, 3, 4, 5, 6]), + (1720205567.621928, [], [1, 2, 3, 4, 5, 6]), + (1720235968.291387, [], [1, 2, 3, 4, 5, 6]), + (1720236089.012578, [], [1, 2, 3, 4, 5, 6]), + (1720236091.096483, [], [1, 2, 3, 4, 5, 6]), + (1720299309.484376, [], [1, 2, 3, 4, 5, 6]), + (1720299348.029909, [], [1, 2, 3, 4, 5, 6]), + (1720299350.111093, [], [1, 2, 3, 4, 5, 6]), + (1720320587.866423, [], [1, 2, 3, 4, 5, 6]), + (1720320589.944508, [], [1, 2, 3, 4, 5, 6]), + (1720400218.389368, [], [1, 2, 3, 4, 5, 6]), + (1720400220.487059, [], [1, 2, 3, 4, 5, 6]), + (1720492976.763916, [], [1, 2, 3, 4, 5, 6]), + (1720493019.503907, [], [1, 2, 3, 4, 5, 6]), + (1720493021.579652, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720122662.758187, [], [1, 2, 3, 4, 5, 6])], + [ + (1720005466.226878, [], [1, 2, 3, 4, 5, 6]), + (1720152200.991189, [], [1, 2, 3, 4, 5, 6]), + (1720481835.844194, [], [1, 2, 3, 4, 5, 6]), + (1720580183.828864, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070662.89957, [], [1, 2, 3, 4, 5, 6])], + [(1720142183.650314, [], [1, 2, 3, 4, 5, 6]), (1720411252.551579, [], [1, 2, 3, 4, 5, 6])], + [(1720072507.675137, [], [1, 2, 3, 4, 5, 6]), (1720238913.204492, [], [1, 2, 3, 4, 5, 6])], + [(1720218011.114445, [], [1, 2, 3, 4, 5, 6])], + [(1720302385.101079, [], [1, 2, 3, 4, 5, 6])], + [(1720570186.762002, [], [1, 2, 3, 4, 5, 6])], + [(1720157661.668233, [], [1, 2, 3, 4, 5, 6])], + [ + (1720002096.475822, [], [1, 2, 3, 4, 5, 6]), + (1720002098.548489, [], [1, 2, 3, 4, 5, 6]), + (1720066947.715378, [], [1, 2, 3, 4, 5, 6]), + (1720066949.844651, [], [1, 2, 3, 4, 5, 6]), + (1720105199.7729, [], [1, 2, 3, 4, 5, 6]), + (1720105370.978068, [], [1, 2, 3, 4, 5, 6]), + (1720105373.072451, [], [1, 2, 3, 4, 5, 6]), + (1720134686.761223, [], [1, 2, 3, 4, 5, 6]), + (1720134688.87969, [], [1, 2, 3, 4, 5, 6]), + (1720154938.58341, [], [1, 2, 3, 4, 5, 6]), + (1720219977.007902, [], [1, 2, 3, 4, 5, 6]), + (1720219979.064108, [], [1, 2, 3, 4, 5, 6]), + (1720250673.20197, [], [1, 2, 3, 4, 5, 6]), + (1720250796.014257, [], [1, 2, 3, 4, 5, 6]), + (1720313894.440119, [], [1, 2, 3, 4, 5, 6]), + (1720313896.495204, [], [1, 2, 3, 4, 5, 6]), + (1720313896.774105, [], [1, 2, 3, 4, 5, 6]), + (1720396853.1643, [], [1, 2, 3, 4, 5, 6]), + (1720396855.257174, [], [1, 2, 3, 4, 5, 6]), + (1720489852.456446, [], [1, 2, 3, 4, 5, 6]), + (1720489986.86769, [], [1, 2, 3, 4, 5, 6]), + (1720569682.852233, [], [1, 2, 3, 4, 5, 6]), + (1720569767.225411, [], [1, 2, 3, 4, 5, 6]), + (1720569769.334261, [], [1, 2, 3, 4, 5, 6]), + (1720581192.763754, [], [1, 2, 3, 4, 5, 6]), + (1720581262.544992, [], [1, 2, 3, 4, 5, 6]), + (1720581264.629216, [], [1, 2, 3, 4, 5, 6]), + (1720581266.203535, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720226441.129883, [], [1, 2, 3, 4, 5, 6]), + (1720226519.948161, [], [1, 2, 3, 4, 5, 6]), + (1720226639.444269, [], [1, 2, 3, 4, 5, 6]), + (1720226731.198095, [], [1, 2, 3, 4, 5, 6]), + (1720226779.385516, [], [1, 2, 3, 4, 5, 6]), + (1720226788.674966, [], [1, 2, 3, 4, 5, 6]), + (1720226923.560385, [], [1, 2, 3, 4, 5, 6]), + (1720487951.436457, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720038002.665527, [], [1, 2, 3, 4, 5, 6]), + (1720488750.698306, [], [1, 2, 3, 4, 5, 6]), + (1720589885.270178, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720580260.501907, [], [1, 2, 3, 4, 5, 6])], + [(1720237662.32438, [], [1, 2, 3, 4, 5, 6]), (1720309542.106408, [], [1, 2, 3, 4, 5, 6])], + [(1720573441.412558, [], [1, 2, 3, 4, 5, 6])], + [(1720598006.382998, [], [1, 2, 3, 4, 5, 6])], + [(1720249262.676111, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968867.096755, [], [1, 2, 3, 4, 5, 6]), + (1720406515.635103, [], [1, 2, 3, 4, 5, 6]), + (1720568473.896114, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720063183.618875, [], [1, 2, 3, 4, 5, 6])], + [(1720313109.473272, [], [1, 2, 3, 4, 5, 6]), (1720418083.174351, [], [1, 2, 3, 4, 5, 6])], + [(1720106024.609, [], [1, 2, 3, 4, 5, 6])], + [(1720058454.321955, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062639.625591, [], [1, 2, 3, 4, 5, 6]), + (1720221670.858026, [], [1, 2, 3, 4, 5, 6]), + (1720496857.495022, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974856.395033, [], [1, 2, 3, 4, 5, 6]), + (1720053608.017225, [], [1, 2, 3, 4, 5, 6]), + (1720053669.477447, [], [1, 2, 3, 4, 5, 6]), + (1720053671.560518, [], [1, 2, 3, 4, 5, 6]), + (1720062649.578553, [], [1, 2, 3, 4, 5, 6]), + (1720062651.66265, [], [1, 2, 3, 4, 5, 6]), + (1720062653.377042, [], [1, 2, 3, 4, 5, 6]), + (1720062655.44862, [], [1, 2, 3, 4, 5, 6]), + (1720082692.43711, [], [1, 2, 3, 4, 5, 6]), + (1720082771.847834, [], [1, 2, 3, 4, 5, 6]), + (1720153671.808662, [], [1, 2, 3, 4, 5, 6]), + (1720153713.489374, [], [1, 2, 3, 4, 5, 6]), + (1720153715.599402, [], [1, 2, 3, 4, 5, 6]), + (1720153716.387598, [], [1, 2, 3, 4, 5, 6]), + (1720153718.446591, [], [1, 2, 3, 4, 5, 6]), + (1720239246.152588, [], [1, 2, 3, 4, 5, 6]), + (1720239248.241024, [], [1, 2, 3, 4, 5, 6]), + (1720239250.77294, [], [1, 2, 3, 4, 5, 6]), + (1720309956.683905, [], [1, 2, 3, 4, 5, 6]), + (1720310071.602061, [], [1, 2, 3, 4, 5, 6]), + (1720325462.049867, [], [1, 2, 3, 4, 5, 6]), + (1720325503.88631, [], [1, 2, 3, 4, 5, 6]), + (1720325506.004479, [], [1, 2, 3, 4, 5, 6]), + (1720412942.492135, [], [1, 2, 3, 4, 5, 6]), + (1720458829.36376, [], [1, 2, 3, 4, 5, 6]), + (1720458949.800013, [], [1, 2, 3, 4, 5, 6]), + (1720458951.887504, [], [1, 2, 3, 4, 5, 6]), + (1720492407.820081, [], [1, 2, 3, 4, 5, 6]), + (1720492444.404348, [], [1, 2, 3, 4, 5, 6]), + (1720492446.466946, [], [1, 2, 3, 4, 5, 6]), + (1720575932.543872, [], [1, 2, 3, 4, 5, 6]), + (1720576033.410802, [], [1, 2, 3, 4, 5, 6]), + (1720576035.469127, [], [1, 2, 3, 4, 5, 6]), + (1720576036.44253, [], [1, 2, 3, 4, 5, 6]), + (1720647287.059052, [], [1, 2, 3, 4, 5, 6]), + (1720647289.160943, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720330899.088291, [], [1, 2, 3, 4, 5, 6])], + [ + (1720081793.462679, [], [1, 2, 3, 4, 5, 6]), + (1720081976.131384, [], [1, 2, 3, 4, 5, 6]), + (1720081976.187009, [], [1, 2, 3, 4, 5, 6]), + (1720081980.325716, [], [1, 2, 3, 4, 5, 6]), + (1720091170.691618, [], [1, 2, 3, 4, 5, 6]), + (1720091356.840132, [], [1, 2, 3, 4, 5, 6]), + (1720091358.928927, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720238399.047753, [], [1, 2, 3, 4, 5, 6]), (1720567368.683366, [], [1, 2, 3, 4, 5, 6])], + [(1720583646.429286, [], [1, 2, 3, 4, 5, 6])], + [(1720330049.16818, [], [1, 2, 3, 4, 5, 6])], + [(1720578076.222297, [], [1, 2, 3, 4, 5, 6])], + [(1719972331.112716, [], [1, 2, 3, 4, 5, 6]), (1720051745.533132, [], [1, 2, 3, 4, 5, 6])], + [(1720146463.601388, [], [1, 2, 3, 4, 5, 6]), (1720474541.840768, [], [1, 2, 3, 4, 5, 6])], + [ + (1720051898.891617, [], [1, 2, 3, 4, 5, 6]), + (1720173013.609275, [], [1, 2, 3, 4, 5, 6]), + (1720320493.657042, [], [1, 2, 3, 4, 5, 6]), + (1720345690.851927, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720408818.140609, [], [1, 2, 3, 4, 5, 6]), (1720486964.900766, [], [1, 2, 3, 4, 5, 6])], + [ + (1720043728.363322, [], [1, 2, 3, 4, 5, 6]), + (1720311975.271982, [], [1, 2, 3, 4, 5, 6]), + (1720571578.431424, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719972681.535491, [], [1, 2, 3, 4, 5, 6]), (1720481018.937328, [], [1, 2, 3, 4, 5, 6])], + [ + (1720026382.553301, [], [1, 2, 3, 4, 5, 6]), + (1720149100.706808, [], [1, 2, 3, 4, 5, 6]), + (1720307942.507634, [], [1, 2, 3, 4, 5, 6]), + (1720570495.6023, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720488173.923373, [], [1, 2, 3, 4, 5, 6]), (1720568447.027354, [], [1, 2, 3, 4, 5, 6])], + [(1720567270.126647, [], [1, 2, 3, 4, 5, 6])], + [(1720499324.726509, [], [1, 2, 3, 4, 5, 6])], + [(1719967975.358552, [], [1, 2, 3, 4, 5, 6]), (1720326137.056104, [], [1, 2, 3, 4, 5, 6])], + [(1720328587.433829, [], [1, 2, 3, 4, 5, 6])], + [ + (1720055336.001624, [], [1, 2, 3, 4, 5, 6]), + (1720138863.165013, [], [1, 2, 3, 4, 5, 6]), + (1720395924.519387, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720069032.516709, [], [1, 2, 3, 4, 5, 6])], + [(1720055010.518163, [], [1, 2, 3, 4, 5, 6]), (1720099339.150894, [], [1, 2, 3, 4, 5, 6])], + [(1720585549.317132, [], [1, 2, 3, 4, 5, 6])], + [ + (1720409221.504822, [], [1, 2, 3, 4, 5, 6]), + (1720519728.003909, [], [1, 2, 3, 4, 5, 6]), + (1720567616.396835, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720244626.143033, [], [1, 2, 3, 4, 5, 6]), + (1720408635.777109, [], [1, 2, 3, 4, 5, 6]), + (1720575490.310245, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720650559.825556, [], [1, 2, 3, 4, 5, 6])], + [(1720582328.399682, [], [1, 2, 3, 4, 5, 6])], + [(1720394085.218172, [], [1, 2, 3, 4, 5, 6])], + [ + (1720066065.444143, [], [1, 2, 3, 4, 5, 6]), + (1720191993.66672, [], [1, 2, 3, 4, 5, 6]), + (1720491329.586545, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071639.073353, [], [1, 2, 3, 4, 5, 6]), + (1720147406.207381, [], [1, 2, 3, 4, 5, 6]), + (1720311129.234658, [], [1, 2, 3, 4, 5, 6]), + (1720573354.037576, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720240131.733437, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984704.452873, [], [1, 2, 3, 4, 5, 6]), + (1720154844.587696, [], [1, 2, 3, 4, 5, 6]), + (1720322613.231449, [], [1, 2, 3, 4, 5, 6]), + (1720569714.825725, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720486946.256934, [], [1, 2, 3, 4, 5, 6]), (1720647691.877446, [], [1, 2, 3, 4, 5, 6])], + [ + (1719993805.960143, [], [1, 2, 3, 4, 5, 6]), + (1720143738.25635, [], [1, 2, 3, 4, 5, 6]), + (1720309437.19401, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720398437.265534, [], [1, 2, 3, 4, 5, 6]), + (1720509793.976335, [], [1, 2, 3, 4, 5, 6]), + (1720629661.586274, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720062670.596985, [], [1, 2, 3, 4, 5, 6]), (1720438472.029491, [], [1, 2, 3, 4, 5, 6])], + [(1720329032.038648, [], [1, 2, 3, 4, 5, 6])], + [(1719984110.370743, [], [1, 2, 3, 4, 5, 6])], + [(1719986035.664892, [], [1, 2, 3, 4, 5, 6]), (1720241225.374801, [], [1, 2, 3, 4, 5, 6])], + [ + (1720105075.810739, [], [1, 2, 3, 4, 5, 6]), + (1720150414.362845, [], [1, 2, 3, 4, 5, 6]), + (1720308174.216706, [], [1, 2, 3, 4, 5, 6]), + (1720412581.759663, [], [1, 2, 3, 4, 5, 6]), + (1720567554.209717, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720537016.009206, [], [1, 2, 3, 4, 5, 6]), (1720589963.468469, [], [1, 2, 3, 4, 5, 6])], + [(1720487391.820061, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975756.8334, [], [1, 2, 3, 4, 5, 6]), + (1720155078.652264, [], [1, 2, 3, 4, 5, 6]), + (1720400698.199527, [], [1, 2, 3, 4, 5, 6]), + (1720496832.791723, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720018681.490249, [], [1, 2, 3, 4, 5, 6]), + (1720136925.765051, [], [1, 2, 3, 4, 5, 6]), + (1720299976.82238, [], [1, 2, 3, 4, 5, 6]), + (1720569027.317754, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720154725.361924, [], [1, 2, 3, 4, 5, 6]), + (1720243905.535885, [], [1, 2, 3, 4, 5, 6]), + (1720315512.512864, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720588095.521256, [], [1, 2, 3, 4, 5, 6])], + [(1719983442.727069, [], [1, 2, 3, 4, 5, 6]), (1720152453.391462, [], [1, 2, 3, 4, 5, 6])], + [(1720405182.158931, [], [1, 2, 3, 4, 5, 6]), (1720575807.583852, [], [1, 2, 3, 4, 5, 6])], + [(1720069878.030512, [], [1, 2, 3, 4, 5, 6]), (1720168504.534482, [], [1, 2, 3, 4, 5, 6])], + [(1720239119.238416, [], [1, 2, 3, 4, 5, 6])], + [(1720576430.211377, [], [1, 2, 3, 4, 5, 6])], + [ + (1719972965.48613, [], [1, 2, 3, 4, 5, 6]), + (1720240976.867243, [], [1, 2, 3, 4, 5, 6]), + (1720404977.394327, [], [1, 2, 3, 4, 5, 6]), + (1720537004.359466, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720487019.528315, [], [1, 2, 3, 4, 5, 6]), (1720581959.239135, [], [1, 2, 3, 4, 5, 6])], + [(1720652926.990055, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052099.960777, [], [1, 2, 3, 4, 5, 6]), + (1720138855.98453, [], [1, 2, 3, 4, 5, 6]), + (1720138921.586511, [], [1, 2, 3, 4, 5, 6]), + (1720139014.744606, [], [1, 2, 3, 4, 5, 6]), + (1720139182.595009, [], [1, 2, 3, 4, 5, 6]), + (1720139192.3206, [], [1, 2, 3, 4, 5, 6]), + (1720581909.908771, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720065955.899244, [], [1, 2, 3, 4, 5, 6]), + (1720236828.579322, [], [1, 2, 3, 4, 5, 6]), + (1720308640.597753, [], [1, 2, 3, 4, 5, 6]), + (1720579634.738256, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719980159.276761, [], [1, 2, 3, 4, 5, 6]), + (1720308552.424302, [], [1, 2, 3, 4, 5, 6]), + (1720653256.063729, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719985271.443965, [], [1, 2, 3, 4, 5, 6]), + (1720220543.115385, [], [1, 2, 3, 4, 5, 6]), + (1720315297.143816, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720330015.747888, [], [1, 2, 3, 4, 5, 6]), (1720481610.656596, [], [1, 2, 3, 4, 5, 6])], + [(1720224041.283015, [], [1, 2, 3, 4, 5, 6]), (1720567371.834809, [], [1, 2, 3, 4, 5, 6])], + [ + (1720244217.827624, [], [1, 2, 3, 4, 5, 6]), + (1720402557.505715, [], [1, 2, 3, 4, 5, 6]), + (1720502124.284452, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720415547.576014, [], [1, 2, 3, 4, 5, 6]), (1720531682.711315, [], [1, 2, 3, 4, 5, 6])], + [(1720407411.272993, [], [1, 2, 3, 4, 5, 6]), (1720574508.629738, [], [1, 2, 3, 4, 5, 6])], + [(1720257290.163002, [], [1, 2, 3, 4, 5, 6]), (1720492975.717018, [], [1, 2, 3, 4, 5, 6])], + [(1720144145.711511, [], [1, 2, 3, 4, 5, 6]), (1720401163.125022, [], [1, 2, 3, 4, 5, 6])], + [(1720355601.346282, [], [1, 2, 3, 4, 5, 6]), (1720572069.286814, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968339.260056, [], [1, 2, 3, 4, 5, 6]), + (1719968341.365428, [], [1, 2, 3, 4, 5, 6]), + (1719968343.993978, [], [1, 2, 3, 4, 5, 6]), + (1719968346.031381, [], [1, 2, 3, 4, 5, 6]), + (1719968349.431552, [], [1, 2, 3, 4, 5, 6]), + (1719968351.559689, [], [1, 2, 3, 4, 5, 6]), + (1719976134.941126, [], [1, 2, 3, 4, 5, 6]), + (1719976278.477066, [], [1, 2, 3, 4, 5, 6]), + (1719976280.56988, [], [1, 2, 3, 4, 5, 6]), + (1720052757.855887, [], [1, 2, 3, 4, 5, 6]), + (1720052759.915085, [], [1, 2, 3, 4, 5, 6]), + (1720110094.313929, [], [1, 2, 3, 4, 5, 6]), + (1720142517.707832, [], [1, 2, 3, 4, 5, 6]), + (1720142570.10559, [], [1, 2, 3, 4, 5, 6]), + (1720142572.151412, [], [1, 2, 3, 4, 5, 6]), + (1720142576.179553, [], [1, 2, 3, 4, 5, 6]), + (1720237055.807105, [], [1, 2, 3, 4, 5, 6]), + (1720237203.321556, [], [1, 2, 3, 4, 5, 6]), + (1720237205.419793, [], [1, 2, 3, 4, 5, 6]), + (1720316912.566247, [], [1, 2, 3, 4, 5, 6]), + (1720317055.804333, [], [1, 2, 3, 4, 5, 6]), + (1720317057.925258, [], [1, 2, 3, 4, 5, 6]), + (1720317058.399149, [], [1, 2, 3, 4, 5, 6]), + (1720317060.481448, [], [1, 2, 3, 4, 5, 6]), + (1720393849.766518, [], [1, 2, 3, 4, 5, 6]), + (1720393921.300236, [], [1, 2, 3, 4, 5, 6]), + (1720406796.853939, [], [1, 2, 3, 4, 5, 6]), + (1720406798.933918, [], [1, 2, 3, 4, 5, 6]), + (1720482599.505433, [], [1, 2, 3, 4, 5, 6]), + (1720482663.255581, [], [1, 2, 3, 4, 5, 6]), + (1720482665.27704, [], [1, 2, 3, 4, 5, 6]), + (1720492023.699542, [], [1, 2, 3, 4, 5, 6]), + (1720492025.737059, [], [1, 2, 3, 4, 5, 6]), + (1720500142.609638, [], [1, 2, 3, 4, 5, 6]), + (1720500250.895423, [], [1, 2, 3, 4, 5, 6]), + (1720525828.241699, [], [1, 2, 3, 4, 5, 6]), + (1720525830.335737, [], [1, 2, 3, 4, 5, 6]), + (1720543473.185403, [], [1, 2, 3, 4, 5, 6]), + (1720543629.193018, [], [1, 2, 3, 4, 5, 6]), + (1720543631.258205, [], [1, 2, 3, 4, 5, 6]), + (1720566115.315069, [], [1, 2, 3, 4, 5, 6]), + (1720566235.252146, [], [1, 2, 3, 4, 5, 6]), + (1720566237.371673, [], [1, 2, 3, 4, 5, 6]), + (1720566239.622085, [], [1, 2, 3, 4, 5, 6]), + (1720566241.74061, [], [1, 2, 3, 4, 5, 6]), + (1720652422.12376, [], [1, 2, 3, 4, 5, 6]), + (1720652589.161105, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720332940.235911, [], [1, 2, 3, 4, 5, 6])], + [(1720065527.859172, [], [1, 2, 3, 4, 5, 6])], + [(1720568368.543876, [], [1, 2, 3, 4, 5, 6]), (1720635472.219669, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968828.538353, [], [1, 2, 3, 4, 5, 6]), + (1720051948.377763, [], [1, 2, 3, 4, 5, 6]), + (1720299205.556357, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720235830.179481, [], [1, 2, 3, 4, 5, 6]), + (1720235953.147018, [], [1, 2, 3, 4, 5, 6]), + (1720236018.20472, [], [1, 2, 3, 4, 5, 6]), + (1720236089.138704, [], [1, 2, 3, 4, 5, 6]), + (1720236119.593712, [], [1, 2, 3, 4, 5, 6]), + (1720236141.83499, [], [1, 2, 3, 4, 5, 6]), + (1720311050.201652, [], [1, 2, 3, 4, 5, 6]), + (1720311052.280309, [], [1, 2, 3, 4, 5, 6]), + (1720395484.534496, [], [1, 2, 3, 4, 5, 6]), + (1720491406.080018, [], [1, 2, 3, 4, 5, 6]), + (1720491430.598198, [], [1, 2, 3, 4, 5, 6]), + (1720491432.661821, [], [1, 2, 3, 4, 5, 6]), + (1720572678.481313, [], [1, 2, 3, 4, 5, 6]), + (1720572808.45491, [], [1, 2, 3, 4, 5, 6]), + (1720572810.563889, [], [1, 2, 3, 4, 5, 6]), + (1720603175.70942, [], [1, 2, 3, 4, 5, 6]), + (1720603202.06502, [], [1, 2, 3, 4, 5, 6]), + (1720603204.156746, [], [1, 2, 3, 4, 5, 6]), + (1720652491.405509, [], [1, 2, 3, 4, 5, 6]), + (1720652598.039059, [], [1, 2, 3, 4, 5, 6]), + (1720652600.082367, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720153161.725292, [], [1, 2, 3, 4, 5, 6])], + [(1720494662.408865, [], [1, 2, 3, 4, 5, 6]), (1720568597.855422, [], [1, 2, 3, 4, 5, 6])], + [(1720416466.753208, [], [1, 2, 3, 4, 5, 6])], + [(1719978991.390168, [], [1, 2, 3, 4, 5, 6])], + [(1720072031.976782, [], [1, 2, 3, 4, 5, 6]), (1720584690.251602, [], [1, 2, 3, 4, 5, 6])], + [(1720575076.950008, [], [1, 2, 3, 4, 5, 6])], + [(1720231712.798613, [], [1, 2, 3, 4, 5, 6]), (1720398045.987903, [], [1, 2, 3, 4, 5, 6])], + [(1720056840.047309, [], [1, 2, 3, 4, 5, 6]), (1720625851.477544, [], [1, 2, 3, 4, 5, 6])], + [ + (1720053429.359158, [], [1, 2, 3, 4, 5, 6]), + (1720053508.29626, [], [1, 2, 3, 4, 5, 6]), + (1720053570.605172, [], [1, 2, 3, 4, 5, 6]), + (1720053639.380777, [], [1, 2, 3, 4, 5, 6]), + (1720066181.00997, [], [1, 2, 3, 4, 5, 6]), + (1720066215.053405, [], [1, 2, 3, 4, 5, 6]), + (1720066217.116517, [], [1, 2, 3, 4, 5, 6]), + (1720143779.66573, [], [1, 2, 3, 4, 5, 6]), + (1720143781.713526, [], [1, 2, 3, 4, 5, 6]), + (1720222105.35254, [], [1, 2, 3, 4, 5, 6]), + (1720317654.056711, [], [1, 2, 3, 4, 5, 6]), + (1720317708.563828, [], [1, 2, 3, 4, 5, 6]), + (1720480329.549535, [], [1, 2, 3, 4, 5, 6]), + (1720480520.417693, [], [1, 2, 3, 4, 5, 6]), + (1720480522.54519, [], [1, 2, 3, 4, 5, 6]), + (1720480523.499363, [], [1, 2, 3, 4, 5, 6]), + (1720577037.242221, [], [1, 2, 3, 4, 5, 6]), + (1720577039.306434, [], [1, 2, 3, 4, 5, 6]), + (1720639329.717862, [], [1, 2, 3, 4, 5, 6]), + (1720639469.331454, [], [1, 2, 3, 4, 5, 6]), + (1720639471.36127, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720024453.629276, [], [1, 2, 3, 4, 5, 6]), (1720316176.77774, [], [1, 2, 3, 4, 5, 6])], + [ + (1720068883.919311, [], [1, 2, 3, 4, 5, 6]), + (1720319773.101818, [], [1, 2, 3, 4, 5, 6]), + (1720586957.747953, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720056927.404781, [], [1, 2, 3, 4, 5, 6])], + [ + (1720054270.638059, [], [1, 2, 3, 4, 5, 6]), + (1720153565.870327, [], [1, 2, 3, 4, 5, 6]), + (1720334693.538652, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720152641.309698, [], [1, 2, 3, 4, 5, 6]), (1720576318.23299, [], [1, 2, 3, 4, 5, 6])], + [ + (1720404934.034946, [], [1, 2, 3, 4, 5, 6]), + (1720476937.980269, [], [1, 2, 3, 4, 5, 6]), + (1720652925.317718, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720583121.659416, [], [1, 2, 3, 4, 5, 6])], + [(1719974918.036807, [], [1, 2, 3, 4, 5, 6])], + [(1720052839.789235, [], [1, 2, 3, 4, 5, 6]), (1720309756.887443, [], [1, 2, 3, 4, 5, 6])], + [(1720214816.601474, [], [1, 2, 3, 4, 5, 6]), (1720628643.350003, [], [1, 2, 3, 4, 5, 6])], + [(1719990813.089809, [], [1, 2, 3, 4, 5, 6]), (1720312746.860016, [], [1, 2, 3, 4, 5, 6])], + [(1720072040.294779, [], [1, 2, 3, 4, 5, 6]), (1720573666.820699, [], [1, 2, 3, 4, 5, 6])], + [(1720221192.01312, [], [1, 2, 3, 4, 5, 6])], + [(1720143165.437476, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965593.95106, [], [1, 2, 3, 4, 5, 6]), + (1720062861.422969, [], [1, 2, 3, 4, 5, 6]), + (1720503564.270709, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720310060.645551, [], [1, 2, 3, 4, 5, 6]), + (1720482309.217878, [], [1, 2, 3, 4, 5, 6]), + (1720591036.349001, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157768.51272, [], [1, 2, 3, 4, 5, 6])], + [(1720149445.473387, [], [1, 2, 3, 4, 5, 6]), (1720438026.869011, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060489.013219, [], [1, 2, 3, 4, 5, 6]), + (1720415886.629529, [], [1, 2, 3, 4, 5, 6]), + (1720580867.871164, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720414273.276015, [], [1, 2, 3, 4, 5, 6]), (1720487097.033528, [], [1, 2, 3, 4, 5, 6])], + [(1719966569.062726, [], [1, 2, 3, 4, 5, 6])], + [ + (1719966533.368239, [], [1, 2, 3, 4, 5, 6]), + (1720318867.369239, [], [1, 2, 3, 4, 5, 6]), + (1720424434.84027, [], [1, 2, 3, 4, 5, 6]), + (1720566607.607309, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720608001.068576, [], [1, 2, 3, 4, 5, 6])], + [ + (1720062804.379267, [], [1, 2, 3, 4, 5, 6]), + (1720226534.49236, [], [1, 2, 3, 4, 5, 6]), + (1720321084.499585, [], [1, 2, 3, 4, 5, 6]), + (1720450358.303395, [], [1, 2, 3, 4, 5, 6]), + (1720577114.020932, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720138095.209939, [], [1, 2, 3, 4, 5, 6])], + [ + (1720109654.682718, [], [1, 2, 3, 4, 5, 6]), + (1720109832.815741, [], [1, 2, 3, 4, 5, 6]), + (1720109959.88633, [], [1, 2, 3, 4, 5, 6]), + (1720110033.900336, [], [1, 2, 3, 4, 5, 6]), + (1720110090.159457, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719973270.54798, [], [1, 2, 3, 4, 5, 6])], + [(1720404747.93651, [], [1, 2, 3, 4, 5, 6])], + [(1719975896.00888, [], [1, 2, 3, 4, 5, 6]), (1720412877.994145, [], [1, 2, 3, 4, 5, 6])], + [(1720488415.324703, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979329.168652, [], [1, 2, 3, 4, 5, 6]), + (1719979433.834943, [], [1, 2, 3, 4, 5, 6]), + (1719979537.706541, [], [1, 2, 3, 4, 5, 6]), + (1720054525.517489, [], [1, 2, 3, 4, 5, 6]), + (1720054576.808031, [], [1, 2, 3, 4, 5, 6]), + (1720054637.310552, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720008269.86362, [], [1, 2, 3, 4, 5, 6]), (1720228186.690288, [], [1, 2, 3, 4, 5, 6])], + [(1719980906.390651, [], [1, 2, 3, 4, 5, 6])], + [(1720049804.519108, [], [1, 2, 3, 4, 5, 6])], + [(1719982824.989151, [], [1, 2, 3, 4, 5, 6])], + [ + (1720052793.231176, [], [1, 2, 3, 4, 5, 6]), + (1720416383.522419, [], [1, 2, 3, 4, 5, 6]), + (1720576203.462386, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720141956.096351, [], [1, 2, 3, 4, 5, 6]), (1720501761.653384, [], [1, 2, 3, 4, 5, 6])], + [(1719968214.670337, [], [1, 2, 3, 4, 5, 6])], + [ + (1720061582.741936, [], [1, 2, 3, 4, 5, 6]), + (1720148352.805998, [], [1, 2, 3, 4, 5, 6]), + (1720320650.836088, [], [1, 2, 3, 4, 5, 6]), + (1720480540.757287, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720066746.296404, [], [1, 2, 3, 4, 5, 6]), (1720470635.779369, [], [1, 2, 3, 4, 5, 6])], + [(1720036143.99876, [], [1, 2, 3, 4, 5, 6]), (1720235115.25383, [], [1, 2, 3, 4, 5, 6])], + [ + (1720325739.301864, [], [1, 2, 3, 4, 5, 6]), + (1720366758.48691, [], [1, 2, 3, 4, 5, 6]), + (1720579671.285769, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720406674.612278, [], [1, 2, 3, 4, 5, 6])], + [(1720569843.609239, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060843.491976, [], [1, 2, 3, 4, 5, 6]), + (1720147665.305258, [], [1, 2, 3, 4, 5, 6]), + (1720501529.904655, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720046373.265399, [], [1, 2, 3, 4, 5, 6]), + (1720046375.332994, [], [1, 2, 3, 4, 5, 6]), + (1720068144.411279, [], [1, 2, 3, 4, 5, 6]), + (1720068261.195225, [], [1, 2, 3, 4, 5, 6]), + (1720131629.331825, [], [1, 2, 3, 4, 5, 6]), + (1720131717.731289, [], [1, 2, 3, 4, 5, 6]), + (1720143208.108341, [], [1, 2, 3, 4, 5, 6]), + (1720224375.336718, [], [1, 2, 3, 4, 5, 6]), + (1720224489.89131, [], [1, 2, 3, 4, 5, 6]), + (1720239983.991454, [], [1, 2, 3, 4, 5, 6]), + (1720240023.957522, [], [1, 2, 3, 4, 5, 6]), + (1720240026.023994, [], [1, 2, 3, 4, 5, 6]), + (1720288870.449116, [], [1, 2, 3, 4, 5, 6]), + (1720319014.330473, [], [1, 2, 3, 4, 5, 6]), + (1720319153.071162, [], [1, 2, 3, 4, 5, 6]), + (1720319155.11854, [], [1, 2, 3, 4, 5, 6]), + (1720356008.754634, [], [1, 2, 3, 4, 5, 6]), + (1720356170.017209, [], [1, 2, 3, 4, 5, 6]), + (1720414281.753569, [], [1, 2, 3, 4, 5, 6]), + (1720466888.126284, [], [1, 2, 3, 4, 5, 6]), + (1720466890.175399, [], [1, 2, 3, 4, 5, 6]), + (1720496852.884055, [], [1, 2, 3, 4, 5, 6]), + (1720496900.157534, [], [1, 2, 3, 4, 5, 6]), + (1720496902.257177, [], [1, 2, 3, 4, 5, 6]), + (1720517711.484252, [], [1, 2, 3, 4, 5, 6]), + (1720517785.617389, [], [1, 2, 3, 4, 5, 6]), + (1720517787.722386, [], [1, 2, 3, 4, 5, 6]), + (1720574636.301281, [], [1, 2, 3, 4, 5, 6]), + (1720574638.402501, [], [1, 2, 3, 4, 5, 6]), + (1720631962.467861, [], [1, 2, 3, 4, 5, 6]), + (1720632073.350096, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720222983.951049, [], [1, 2, 3, 4, 5, 6]), (1720416117.193584, [], [1, 2, 3, 4, 5, 6])], + [ + (1719971140.695348, [], [1, 2, 3, 4, 5, 6]), + (1720135102.372106, [], [1, 2, 3, 4, 5, 6]), + (1720241528.560118, [], [1, 2, 3, 4, 5, 6]), + (1720494221.442123, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720321074.976216, [], [1, 2, 3, 4, 5, 6]), (1720409891.326419, [], [1, 2, 3, 4, 5, 6])], + [ + (1720048681.074873, [], [1, 2, 3, 4, 5, 6]), + (1720048799.279747, [], [1, 2, 3, 4, 5, 6]), + (1720048801.388645, [], [1, 2, 3, 4, 5, 6]), + (1720078114.283264, [], [1, 2, 3, 4, 5, 6]), + (1720078331.228227, [], [1, 2, 3, 4, 5, 6]), + (1720143813.123392, [], [1, 2, 3, 4, 5, 6]), + (1720143818.771163, [], [1, 2, 3, 4, 5, 6]), + (1720235193.521271, [], [1, 2, 3, 4, 5, 6]), + (1720235195.596896, [], [1, 2, 3, 4, 5, 6]), + (1720325140.395885, [], [1, 2, 3, 4, 5, 6]), + (1720325267.478348, [], [1, 2, 3, 4, 5, 6]), + (1720411316.426439, [], [1, 2, 3, 4, 5, 6]), + (1720411410.991238, [], [1, 2, 3, 4, 5, 6]), + (1720411413.049352, [], [1, 2, 3, 4, 5, 6]), + (1720585972.027756, [], [1, 2, 3, 4, 5, 6]), + (1720586043.355429, [], [1, 2, 3, 4, 5, 6]), + (1720586045.457795, [], [1, 2, 3, 4, 5, 6]), + (1720615162.541609, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719966754.275194, [], [1, 2, 3, 4, 5, 6]), + (1720490216.464205, [], [1, 2, 3, 4, 5, 6]), + (1720553382.681907, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720002644.294528, [], [1, 2, 3, 4, 5, 6]), (1720230090.842106, [], [1, 2, 3, 4, 5, 6])], + [(1720146769.696788, [], [1, 2, 3, 4, 5, 6])], + [ + (1720165756.310512, [], [1, 2, 3, 4, 5, 6]), + (1720410912.566749, [], [1, 2, 3, 4, 5, 6]), + (1720570647.832366, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719976353.576494, [], [1, 2, 3, 4, 5, 6]), + (1720377185.297147, [], [1, 2, 3, 4, 5, 6]), + (1720377289.215042, [], [1, 2, 3, 4, 5, 6]), + (1720377308.468995, [], [1, 2, 3, 4, 5, 6]), + (1720377428.407697, [], [1, 2, 3, 4, 5, 6]), + (1720377485.735576, [], [1, 2, 3, 4, 5, 6]), + (1720377529.508166, [], [1, 2, 3, 4, 5, 6]), + (1720377719.383399, [], [1, 2, 3, 4, 5, 6]), + (1720377809.666048, [], [1, 2, 3, 4, 5, 6]), + (1720377903.918773, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719980213.693083, [], [1, 2, 3, 4, 5, 6]), (1720140384.252931, [], [1, 2, 3, 4, 5, 6])], + [(1720063871.378525, [], [1, 2, 3, 4, 5, 6]), (1720573199.935932, [], [1, 2, 3, 4, 5, 6])], + [(1719985832.719319, [], [1, 2, 3, 4, 5, 6]), (1720406386.34727, [], [1, 2, 3, 4, 5, 6])], + [(1720235695.246766, [], [1, 2, 3, 4, 5, 6]), (1720397629.747797, [], [1, 2, 3, 4, 5, 6])], + [ + (1719968874.938189, [], [1, 2, 3, 4, 5, 6]), + (1719969009.652547, [], [1, 2, 3, 4, 5, 6]), + (1719985700.913806, [], [1, 2, 3, 4, 5, 6]), + (1719985791.512554, [], [1, 2, 3, 4, 5, 6]), + (1720148300.05719, [], [1, 2, 3, 4, 5, 6]), + (1720148304.627225, [], [1, 2, 3, 4, 5, 6]), + (1720148306.685703, [], [1, 2, 3, 4, 5, 6]), + (1720227741.213642, [], [1, 2, 3, 4, 5, 6]), + (1720227808.775173, [], [1, 2, 3, 4, 5, 6]), + (1720295172.811284, [], [1, 2, 3, 4, 5, 6]), + (1720295262.745855, [], [1, 2, 3, 4, 5, 6]), + (1720295264.827116, [], [1, 2, 3, 4, 5, 6]), + (1720295269.130924, [], [1, 2, 3, 4, 5, 6]), + (1720295271.214758, [], [1, 2, 3, 4, 5, 6]), + (1720295276.000757, [], [1, 2, 3, 4, 5, 6]), + (1720295278.050173, [], [1, 2, 3, 4, 5, 6]), + (1720295281.951474, [], [1, 2, 3, 4, 5, 6]), + (1720314791.722567, [], [1, 2, 3, 4, 5, 6]), + (1720314793.809493, [], [1, 2, 3, 4, 5, 6]), + (1720314794.091414, [], [1, 2, 3, 4, 5, 6]), + (1720314796.180282, [], [1, 2, 3, 4, 5, 6]), + (1720334621.693568, [], [1, 2, 3, 4, 5, 6]), + (1720334651.208509, [], [1, 2, 3, 4, 5, 6]), + (1720334653.289286, [], [1, 2, 3, 4, 5, 6]), + (1720405990.429231, [], [1, 2, 3, 4, 5, 6]), + (1720406072.368399, [], [1, 2, 3, 4, 5, 6]), + (1720406074.479232, [], [1, 2, 3, 4, 5, 6]), + (1720502938.778116, [], [1, 2, 3, 4, 5, 6]), + (1720502940.865312, [], [1, 2, 3, 4, 5, 6]), + (1720575136.293441, [], [1, 2, 3, 4, 5, 6]), + (1720575193.500487, [], [1, 2, 3, 4, 5, 6]), + (1720575195.593287, [], [1, 2, 3, 4, 5, 6]), + (1720584066.099625, [], [1, 2, 3, 4, 5, 6]), + (1720584068.181406, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720401234.664166, [], [1, 2, 3, 4, 5, 6])], + [(1720338995.593562, [], [1, 2, 3, 4, 5, 6]), (1720569421.058122, [], [1, 2, 3, 4, 5, 6])], + [ + (1720152780.103003, [], [1, 2, 3, 4, 5, 6]), + (1720318021.21162, [], [1, 2, 3, 4, 5, 6]), + (1720568682.771219, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720153767.474128, [], [1, 2, 3, 4, 5, 6]), + (1720153989.99445, [], [1, 2, 3, 4, 5, 6]), + (1720154117.339532, [], [1, 2, 3, 4, 5, 6]), + (1720154188.47243, [], [1, 2, 3, 4, 5, 6]), + (1720227169.047901, [], [1, 2, 3, 4, 5, 6]), + (1720227171.113334, [], [1, 2, 3, 4, 5, 6]), + (1720290328.719511, [], [1, 2, 3, 4, 5, 6]), + (1720290370.18464, [], [1, 2, 3, 4, 5, 6]), + (1720308949.583306, [], [1, 2, 3, 4, 5, 6]), + (1720309021.205367, [], [1, 2, 3, 4, 5, 6]), + (1720309023.255742, [], [1, 2, 3, 4, 5, 6]), + (1720400031.238045, [], [1, 2, 3, 4, 5, 6]), + (1720400050.534361, [], [1, 2, 3, 4, 5, 6]), + (1720400052.591865, [], [1, 2, 3, 4, 5, 6]), + (1720475517.057002, [], [1, 2, 3, 4, 5, 6]), + (1720475631.221119, [], [1, 2, 3, 4, 5, 6]), + (1720489341.891834, [], [1, 2, 3, 4, 5, 6]), + (1720489520.813888, [], [1, 2, 3, 4, 5, 6]), + (1720522911.41822, [], [1, 2, 3, 4, 5, 6]), + (1720522980.415637, [], [1, 2, 3, 4, 5, 6]), + (1720522982.44762, [], [1, 2, 3, 4, 5, 6]), + (1720590435.585175, [], [1, 2, 3, 4, 5, 6]), + (1720590601.259611, [], [1, 2, 3, 4, 5, 6]), + (1720590603.315457, [], [1, 2, 3, 4, 5, 6]), + (1720590604.638539, [], [1, 2, 3, 4, 5, 6]), + (1720590606.730642, [], [1, 2, 3, 4, 5, 6]), + (1720612053.860624, [], [1, 2, 3, 4, 5, 6]), + (1720612129.921877, [], [1, 2, 3, 4, 5, 6]), + (1720612132.011818, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720426949.173587, [], [1, 2, 3, 4, 5, 6])], + [(1720569102.034496, [], [1, 2, 3, 4, 5, 6])], + [(1720034790.744638, [], [1, 2, 3, 4, 5, 6]), (1720307075.973997, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226287.705985, [], [1, 2, 3, 4, 5, 6]), + (1720397684.607266, [], [1, 2, 3, 4, 5, 6]), + (1720488997.884315, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720228730.18975, [], [1, 2, 3, 4, 5, 6]), + (1720407495.769529, [], [1, 2, 3, 4, 5, 6]), + (1720486995.921451, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720489080.310875, [], [1, 2, 3, 4, 5, 6]), (1720579311.992295, [], [1, 2, 3, 4, 5, 6])], + [(1720138007.433538, [], [1, 2, 3, 4, 5, 6]), (1720243741.609597, [], [1, 2, 3, 4, 5, 6])], + [(1720528666.459324, [], [1, 2, 3, 4, 5, 6])], + [(1719970772.701878, [], [1, 2, 3, 4, 5, 6]), (1720141632.061506, [], [1, 2, 3, 4, 5, 6])], + [(1720068110.038208, [], [1, 2, 3, 4, 5, 6])], + [(1720047191.032235, [], [1, 2, 3, 4, 5, 6])], + [(1719976436.118248, [], [1, 2, 3, 4, 5, 6]), (1720307037.853977, [], [1, 2, 3, 4, 5, 6])], + [(1719972036.639217, [], [1, 2, 3, 4, 5, 6]), (1720057689.829017, [], [1, 2, 3, 4, 5, 6])], + [(1720110461.39165, [], [1, 2, 3, 4, 5, 6]), (1720507249.36072, [], [1, 2, 3, 4, 5, 6])], + [(1719973197.847086, [], [1, 2, 3, 4, 5, 6])], + [ + (1720069338.721539, [], [1, 2, 3, 4, 5, 6]), + (1720524537.017222, [], [1, 2, 3, 4, 5, 6]), + (1720603176.268707, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720142471.621654, [], [1, 2, 3, 4, 5, 6]), (1720221877.173433, [], [1, 2, 3, 4, 5, 6])], + [(1720141771.947396, [], [1, 2, 3, 4, 5, 6])], + [(1720563222.366935, [], [1, 2, 3, 4, 5, 6])], + [(1720237444.558492, [], [1, 2, 3, 4, 5, 6]), (1720586951.821255, [], [1, 2, 3, 4, 5, 6])], + [ + (1720066787.226665, [], [1, 2, 3, 4, 5, 6]), + (1720138501.376918, [], [1, 2, 3, 4, 5, 6]), + (1720332897.490345, [], [1, 2, 3, 4, 5, 6]), + (1720498267.095353, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979972.999784, [], [1, 2, 3, 4, 5, 6]), (1720326547.318274, [], [1, 2, 3, 4, 5, 6])], + [(1720156142.458551, [], [1, 2, 3, 4, 5, 6]), (1720222674.830191, [], [1, 2, 3, 4, 5, 6])], + [ + (1719976247.399205, [], [1, 2, 3, 4, 5, 6]), + (1719976421.138728, [], [1, 2, 3, 4, 5, 6]), + (1719976457.121053, [], [1, 2, 3, 4, 5, 6]), + (1719976566.522486, [], [1, 2, 3, 4, 5, 6]), + (1720060116.807539, [], [1, 2, 3, 4, 5, 6]), + (1720142201.528128, [], [1, 2, 3, 4, 5, 6]), + (1720142320.790244, [], [1, 2, 3, 4, 5, 6]), + (1720142322.873716, [], [1, 2, 3, 4, 5, 6]), + (1720142323.209429, [], [1, 2, 3, 4, 5, 6]), + (1720142325.32365, [], [1, 2, 3, 4, 5, 6]), + (1720158309.47212, [], [1, 2, 3, 4, 5, 6]), + (1720158464.699924, [], [1, 2, 3, 4, 5, 6]), + (1720158466.803077, [], [1, 2, 3, 4, 5, 6]), + (1720235827.780639, [], [1, 2, 3, 4, 5, 6]), + (1720235829.873017, [], [1, 2, 3, 4, 5, 6]), + (1720235831.516786, [], [1, 2, 3, 4, 5, 6]), + (1720235833.64015, [], [1, 2, 3, 4, 5, 6]), + (1720308111.792929, [], [1, 2, 3, 4, 5, 6]), + (1720308113.917634, [], [1, 2, 3, 4, 5, 6]), + (1720330424.153222, [], [1, 2, 3, 4, 5, 6]), + (1720330426.274619, [], [1, 2, 3, 4, 5, 6]), + (1720397440.529792, [], [1, 2, 3, 4, 5, 6]), + (1720397517.527169, [], [1, 2, 3, 4, 5, 6]), + (1720397519.567891, [], [1, 2, 3, 4, 5, 6]), + (1720489794.692916, [], [1, 2, 3, 4, 5, 6]), + (1720489888.559008, [], [1, 2, 3, 4, 5, 6]), + (1720489890.678539, [], [1, 2, 3, 4, 5, 6]), + (1720577363.385966, [], [1, 2, 3, 4, 5, 6]), + (1720577493.034855, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720042932.668232, [], [1, 2, 3, 4, 5, 6])], + [ + (1720059740.224971, [], [1, 2, 3, 4, 5, 6]), + (1720141837.883794, [], [1, 2, 3, 4, 5, 6]), + (1720405792.751871, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720050443.838424, [], [1, 2, 3, 4, 5, 6]), (1720124719.146549, [], [1, 2, 3, 4, 5, 6])], + [(1719974887.67444, [], [1, 2, 3, 4, 5, 6]), (1720568466.68215, [], [1, 2, 3, 4, 5, 6])], + [(1720065592.314345, [], [1, 2, 3, 4, 5, 6]), (1720205463.888972, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226332.701569, [], [1, 2, 3, 4, 5, 6]), + (1720396620.155135, [], [1, 2, 3, 4, 5, 6]), + (1720492327.218299, [], [1, 2, 3, 4, 5, 6]), + (1720574416.447233, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720045016.618738, [], [1, 2, 3, 4, 5, 6])], + [ + (1720041923.403398, [], [1, 2, 3, 4, 5, 6]), + (1720041925.529856, [], [1, 2, 3, 4, 5, 6]), + (1720041929.344695, [], [1, 2, 3, 4, 5, 6]), + (1720071082.001532, [], [1, 2, 3, 4, 5, 6]), + (1720071201.167727, [], [1, 2, 3, 4, 5, 6]), + (1720071203.259989, [], [1, 2, 3, 4, 5, 6]), + (1720113938.478909, [], [1, 2, 3, 4, 5, 6]), + (1720114019.141219, [], [1, 2, 3, 4, 5, 6]), + (1720147954.114345, [], [1, 2, 3, 4, 5, 6]), + (1720148058.186186, [], [1, 2, 3, 4, 5, 6]), + (1720148060.244565, [], [1, 2, 3, 4, 5, 6]), + (1720230463.28606, [], [1, 2, 3, 4, 5, 6]), + (1720230465.339869, [], [1, 2, 3, 4, 5, 6]), + (1720306384.513301, [], [1, 2, 3, 4, 5, 6]), + (1720306386.603107, [], [1, 2, 3, 4, 5, 6]), + (1720327575.975525, [], [1, 2, 3, 4, 5, 6]), + (1720327821.751969, [], [1, 2, 3, 4, 5, 6]), + (1720327823.781901, [], [1, 2, 3, 4, 5, 6]), + (1720410348.159738, [], [1, 2, 3, 4, 5, 6]), + (1720410448.341114, [], [1, 2, 3, 4, 5, 6]), + (1720457570.237639, [], [1, 2, 3, 4, 5, 6]), + (1720457731.119754, [], [1, 2, 3, 4, 5, 6]), + (1720457733.248545, [], [1, 2, 3, 4, 5, 6]), + (1720499264.385485, [], [1, 2, 3, 4, 5, 6]), + (1720499470.033411, [], [1, 2, 3, 4, 5, 6]), + (1720499472.085357, [], [1, 2, 3, 4, 5, 6]), + (1720543986.94937, [], [1, 2, 3, 4, 5, 6]), + (1720570525.581032, [], [1, 2, 3, 4, 5, 6]), + (1720570749.619565, [], [1, 2, 3, 4, 5, 6]), + (1720585164.854344, [], [1, 2, 3, 4, 5, 6]), + (1720585249.748529, [], [1, 2, 3, 4, 5, 6]), + (1720585251.810485, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720489237.597076, [], [1, 2, 3, 4, 5, 6]), + (1720489298.656835, [], [1, 2, 3, 4, 5, 6]), + (1720489340.853931, [], [1, 2, 3, 4, 5, 6]), + (1720489414.715662, [], [1, 2, 3, 4, 5, 6]), + (1720489578.362748, [], [1, 2, 3, 4, 5, 6]), + (1720489722.080922, [], [1, 2, 3, 4, 5, 6]), + (1720489846.161597, [], [1, 2, 3, 4, 5, 6]), + (1720489902.616032, [], [1, 2, 3, 4, 5, 6]), + (1720489979.179271, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720226541.700259, [], [1, 2, 3, 4, 5, 6]), (1720459357.712795, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137147.179349, [], [1, 2, 3, 4, 5, 6]), + (1720241160.346244, [], [1, 2, 3, 4, 5, 6]), + (1720393844.000636, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720233961.944441, [], [1, 2, 3, 4, 5, 6]), (1720475993.227865, [], [1, 2, 3, 4, 5, 6])], + [(1720339946.523179, [], [1, 2, 3, 4, 5, 6])], + [(1720314335.142585, [], [1, 2, 3, 4, 5, 6])], + [(1720307070.122528, [], [1, 2, 3, 4, 5, 6]), (1720396806.823553, [], [1, 2, 3, 4, 5, 6])], + [(1720134885.254524, [], [1, 2, 3, 4, 5, 6]), (1720582472.172677, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064989.52196, [], [1, 2, 3, 4, 5, 6]), + (1720317471.54011, [], [1, 2, 3, 4, 5, 6]), + (1720405569.646675, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720230240.375249, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981989.67667, [], [1, 2, 3, 4, 5, 6]), + (1719982209.356573, [], [1, 2, 3, 4, 5, 6]), + (1719982211.412871, [], [1, 2, 3, 4, 5, 6]), + (1719990729.511533, [], [1, 2, 3, 4, 5, 6]), + (1719990731.571772, [], [1, 2, 3, 4, 5, 6]), + (1720052018.486278, [], [1, 2, 3, 4, 5, 6]), + (1720052084.895512, [], [1, 2, 3, 4, 5, 6]), + (1720052086.959136, [], [1, 2, 3, 4, 5, 6]), + (1720063752.458627, [], [1, 2, 3, 4, 5, 6]), + (1720063775.402517, [], [1, 2, 3, 4, 5, 6]), + (1720131365.952047, [], [1, 2, 3, 4, 5, 6]), + (1720131512.217778, [], [1, 2, 3, 4, 5, 6]), + (1720131514.344434, [], [1, 2, 3, 4, 5, 6]), + (1720155954.588913, [], [1, 2, 3, 4, 5, 6]), + (1720156049.221823, [], [1, 2, 3, 4, 5, 6]), + (1720326057.45718, [], [1, 2, 3, 4, 5, 6]), + (1720404760.882693, [], [1, 2, 3, 4, 5, 6]), + (1720404806.729924, [], [1, 2, 3, 4, 5, 6]), + (1720404808.834418, [], [1, 2, 3, 4, 5, 6]), + (1720416517.018963, [], [1, 2, 3, 4, 5, 6]), + (1720494367.532053, [], [1, 2, 3, 4, 5, 6]), + (1720500247.551019, [], [1, 2, 3, 4, 5, 6]), + (1720500294.606063, [], [1, 2, 3, 4, 5, 6]), + (1720500296.694825, [], [1, 2, 3, 4, 5, 6]), + (1720500299.259697, [], [1, 2, 3, 4, 5, 6]), + (1720500301.365635, [], [1, 2, 3, 4, 5, 6]), + (1720572338.244531, [], [1, 2, 3, 4, 5, 6]), + (1720572428.794186, [], [1, 2, 3, 4, 5, 6]), + (1720572430.860491, [], [1, 2, 3, 4, 5, 6]), + (1720600877.354363, [], [1, 2, 3, 4, 5, 6]), + (1720601092.109844, [], [1, 2, 3, 4, 5, 6]), + (1720601094.164843, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720230142.987461, [], [1, 2, 3, 4, 5, 6])], + [(1720040834.068992, [], [1, 2, 3, 4, 5, 6])], + [ + (1719984036.646786, [], [1, 2, 3, 4, 5, 6]), + (1720138775.775437, [], [1, 2, 3, 4, 5, 6]), + (1720337436.06649, [], [1, 2, 3, 4, 5, 6]), + (1720567415.54222, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720501680.278938, [], [1, 2, 3, 4, 5, 6]), (1720645969.459141, [], [1, 2, 3, 4, 5, 6])], + [(1719981148.135955, [], [1, 2, 3, 4, 5, 6]), (1720574648.013669, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982323.222591, [], [1, 2, 3, 4, 5, 6]), + (1720101646.380659, [], [1, 2, 3, 4, 5, 6]), + (1720493833.121559, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719977634.84991, [], [1, 2, 3, 4, 5, 6])], + [(1720494761.805988, [], [1, 2, 3, 4, 5, 6]), (1720595943.849674, [], [1, 2, 3, 4, 5, 6])], + [ + (1720155432.624618, [], [1, 2, 3, 4, 5, 6]), + (1720500643.020756, [], [1, 2, 3, 4, 5, 6]), + (1720584683.624928, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720245035.196003, [], [1, 2, 3, 4, 5, 6])], + [(1720313673.855042, [], [1, 2, 3, 4, 5, 6]), (1720381149.495776, [], [1, 2, 3, 4, 5, 6])], + [(1720019520.657419, [], [1, 2, 3, 4, 5, 6])], + [(1719981920.434976, [], [1, 2, 3, 4, 5, 6]), (1720128572.036838, [], [1, 2, 3, 4, 5, 6])], + [(1720136202.220094, [], [1, 2, 3, 4, 5, 6]), (1720279940.922158, [], [1, 2, 3, 4, 5, 6])], + [(1720462395.987553, [], [1, 2, 3, 4, 5, 6])], + [ + (1720653383.244, [], [1, 2, 3, 4, 5, 6]), + (1720653383.325, [], [1, 2, 3, 4, 5, 6]), + (1720653391.627, [], [1, 2, 3, 4, 5, 6]), + (1720653392.102, [], [1, 2, 3, 4, 5, 6]), + (1720653392.298, [], [1, 2, 3, 4, 5, 6]), + (1720653394.934, [], [1, 2, 3, 4, 5, 6]), + (1720653396.411, [], [1, 2, 3, 4, 5, 6]), + (1720653433.093, [], [1, 2, 3, 4, 5, 6]), + (1720653433.236, [], [1, 2, 3, 4, 5, 6]), + (1720653434.991, [], [1, 2, 3, 4, 5, 6]), + (1720653435.037, [], [1, 2, 3, 4, 5, 6]), + (1720653501.654, [], [1, 2, 3, 4, 5, 6]), + (1720653501.71, [], [1, 2, 3, 4, 5, 6]), + (1720653504.799, [], [1, 2, 3, 4, 5, 6]), + (1720653506.446, [], [1, 2, 3, 4, 5, 6]), + (1720653507.872, [], [1, 2, 3, 4, 5, 6]), + (1720654003.023, [], [1, 2, 3, 4, 5, 6]), + (1720654003.148, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719977539.575178, [], [1, 2, 3, 4, 5, 6]), + (1720223726.13705, [], [1, 2, 3, 4, 5, 6]), + (1720396336.894644, [], [1, 2, 3, 4, 5, 6]), + (1720587683.68083, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720327049.710872, [], [1, 2, 3, 4, 5, 6])], + [(1720223003.678086, [], [1, 2, 3, 4, 5, 6]), (1720320656.874702, [], [1, 2, 3, 4, 5, 6])], + [(1720149475.628567, [], [1, 2, 3, 4, 5, 6]), (1720228859.277837, [], [1, 2, 3, 4, 5, 6])], + [(1720049864.230497, [], [1, 2, 3, 4, 5, 6])], + [(1720198432.201466, [], [1, 2, 3, 4, 5, 6])], + [ + (1719975613.439717, [], [1, 2, 3, 4, 5, 6]), + (1719975648.049123, [], [1, 2, 3, 4, 5, 6]), + (1720056914.90013, [], [1, 2, 3, 4, 5, 6]), + (1720057026.542911, [], [1, 2, 3, 4, 5, 6]), + (1720061926.526142, [], [1, 2, 3, 4, 5, 6]), + (1720062007.614611, [], [1, 2, 3, 4, 5, 6]), + (1720147419.43368, [], [1, 2, 3, 4, 5, 6]), + (1720147421.485277, [], [1, 2, 3, 4, 5, 6]), + (1720234139.651394, [], [1, 2, 3, 4, 5, 6]), + (1720234141.758276, [], [1, 2, 3, 4, 5, 6]), + (1720315191.984726, [], [1, 2, 3, 4, 5, 6]), + (1720315194.093018, [], [1, 2, 3, 4, 5, 6]), + (1720315195.836394, [], [1, 2, 3, 4, 5, 6]), + (1720395738.54726, [], [1, 2, 3, 4, 5, 6]), + (1720395740.684533, [], [1, 2, 3, 4, 5, 6]), + (1720410342.218884, [], [1, 2, 3, 4, 5, 6]), + (1720410455.568303, [], [1, 2, 3, 4, 5, 6]), + (1720496479.412713, [], [1, 2, 3, 4, 5, 6]), + (1720496636.329168, [], [1, 2, 3, 4, 5, 6]), + (1720568810.362519, [], [1, 2, 3, 4, 5, 6]), + (1720569040.475975, [], [1, 2, 3, 4, 5, 6]), + (1720652557.884167, [], [1, 2, 3, 4, 5, 6]), + (1720652630.129755, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720233899.203213, [], [1, 2, 3, 4, 5, 6]), + (1720463796.00711, [], [1, 2, 3, 4, 5, 6]), + (1720567454.878169, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720218589.331391, [], [1, 2, 3, 4, 5, 6]), + (1720572246.733219, [], [1, 2, 3, 4, 5, 6]), + (1720585861.133309, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720030179.060188, [], [1, 2, 3, 4, 5, 6]), + (1720330759.17762, [], [1, 2, 3, 4, 5, 6]), + (1720494515.69797, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719979902.797865, [], [1, 2, 3, 4, 5, 6])], + [ + (1720070638.173492, [], [1, 2, 3, 4, 5, 6]), + (1720070819.083453, [], [1, 2, 3, 4, 5, 6]), + (1720070899.802295, [], [1, 2, 3, 4, 5, 6]), + (1720318683.767078, [], [1, 2, 3, 4, 5, 6]), + (1720318886.533145, [], [1, 2, 3, 4, 5, 6]), + (1720318966.57212, [], [1, 2, 3, 4, 5, 6]), + (1720318995.968059, [], [1, 2, 3, 4, 5, 6]), + (1720319139.50433, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720146380.443011, [], [1, 2, 3, 4, 5, 6]), (1720567703.854081, [], [1, 2, 3, 4, 5, 6])], + [(1720485224.936769, [], [1, 2, 3, 4, 5, 6])], + [(1720039180.636756, [], [1, 2, 3, 4, 5, 6]), (1720320703.675688, [], [1, 2, 3, 4, 5, 6])], + [(1720572877.111346, [], [1, 2, 3, 4, 5, 6])], + [(1720149009.624794, [], [1, 2, 3, 4, 5, 6])], + [ + (1720146066.037668, [], [1, 2, 3, 4, 5, 6]), + (1720233446.585623, [], [1, 2, 3, 4, 5, 6]), + (1720397647.223612, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720071535.98588, [], [1, 2, 3, 4, 5, 6]), + (1720222696.675857, [], [1, 2, 3, 4, 5, 6]), + (1720581710.534385, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720320564.297259, [], [1, 2, 3, 4, 5, 6])], + [(1720221596.179609, [], [1, 2, 3, 4, 5, 6]), (1720502714.197196, [], [1, 2, 3, 4, 5, 6])], + [(1720553799.408143, [], [1, 2, 3, 4, 5, 6])], + [ + (1720244362.654861, [], [1, 2, 3, 4, 5, 6]), + (1720412405.21556, [], [1, 2, 3, 4, 5, 6]), + (1720566429.648086, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720225993.003411, [], [1, 2, 3, 4, 5, 6]), (1720576860.143664, [], [1, 2, 3, 4, 5, 6])], + [(1720050680.82043, [], [1, 2, 3, 4, 5, 6])], + [(1719979231.190542, [], [1, 2, 3, 4, 5, 6]), (1720395944.084001, [], [1, 2, 3, 4, 5, 6])], + [(1720483644.896944, [], [1, 2, 3, 4, 5, 6])], + [ + (1720238837.512808, [], [1, 2, 3, 4, 5, 6]), + (1720400917.965225, [], [1, 2, 3, 4, 5, 6]), + (1720499924.896186, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978670.650293, [], [1, 2, 3, 4, 5, 6]), (1720312908.844749, [], [1, 2, 3, 4, 5, 6])], + [ + (1720064475.615309, [], [1, 2, 3, 4, 5, 6]), + (1720311005.200102, [], [1, 2, 3, 4, 5, 6]), + (1720398033.682041, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1719974842.261587, [], [1, 2, 3, 4, 5, 6]), + (1720070482.809945, [], [1, 2, 3, 4, 5, 6]), + (1720492054.306253, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719985346.86067, [], [1, 2, 3, 4, 5, 6])], + [(1720494586.311794, [], [1, 2, 3, 4, 5, 6])], + [ + (1720151598.2787, [], [1, 2, 3, 4, 5, 6]), + (1720243336.399964, [], [1, 2, 3, 4, 5, 6]), + (1720394460.006175, [], [1, 2, 3, 4, 5, 6]), + (1720584803.786632, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720044414.812182, [], [1, 2, 3, 4, 5, 6]), (1720161374.32603, [], [1, 2, 3, 4, 5, 6])], + [(1720242818.837344, [], [1, 2, 3, 4, 5, 6])], + [(1720407806.948096, [], [1, 2, 3, 4, 5, 6]), (1720511793.967125, [], [1, 2, 3, 4, 5, 6])], + [(1720177465.166679, [], [1, 2, 3, 4, 5, 6])], + [(1720152899.613121, [], [1, 2, 3, 4, 5, 6])], + [(1720144169.768087, [], [1, 2, 3, 4, 5, 6])], + [(1720050900.326256, [], [1, 2, 3, 4, 5, 6]), (1720592651.789908, [], [1, 2, 3, 4, 5, 6])], + [(1720136535.399876, [], [1, 2, 3, 4, 5, 6]), (1720345016.561725, [], [1, 2, 3, 4, 5, 6])], + [(1720148677.039505, [], [1, 2, 3, 4, 5, 6])], + [(1720103982.765975, [], [1, 2, 3, 4, 5, 6]), (1720223275.492349, [], [1, 2, 3, 4, 5, 6])], + [(1719966246.265247, [], [1, 2, 3, 4, 5, 6])], + [(1720048787.249996, [], [1, 2, 3, 4, 5, 6]), (1720588475.186395, [], [1, 2, 3, 4, 5, 6])], + [ + (1720406823.932911, [], [1, 2, 3, 4, 5, 6]), + (1720406854.864424, [], [1, 2, 3, 4, 5, 6]), + (1720406898.943281, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720106989.608336, [], [1, 2, 3, 4, 5, 6])], + [(1719965156.233365, [], [1, 2, 3, 4, 5, 6]), (1720156113.65034, [], [1, 2, 3, 4, 5, 6])], + [(1720237894.767081, [], [1, 2, 3, 4, 5, 6])], + [ + (1720236335.89358, [], [1, 2, 3, 4, 5, 6]), + (1720311377.453215, [], [1, 2, 3, 4, 5, 6]), + (1720406308.416613, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720580297.715893, [], [1, 2, 3, 4, 5, 6])], + [(1719983515.156201, [], [1, 2, 3, 4, 5, 6]), (1720243011.26844, [], [1, 2, 3, 4, 5, 6])], + [(1720412740.206646, [], [1, 2, 3, 4, 5, 6])], + [(1720573676.882026, [], [1, 2, 3, 4, 5, 6])], + [(1720069113.016836, [], [1, 2, 3, 4, 5, 6])], + [(1720065156.88711, [], [1, 2, 3, 4, 5, 6]), (1720342013.62189, [], [1, 2, 3, 4, 5, 6])], + [(1720414414.37316, [], [1, 2, 3, 4, 5, 6]), (1720576057.542994, [], [1, 2, 3, 4, 5, 6])], + [ + (1719965980.977528, [], [1, 2, 3, 4, 5, 6]), + (1720328208.291947, [], [1, 2, 3, 4, 5, 6]), + (1720586256.843288, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719993285.557781, [], [1, 2, 3, 4, 5, 6])], + [ + (1720157474.360894, [], [1, 2, 3, 4, 5, 6]), + (1720317049.692797, [], [1, 2, 3, 4, 5, 6]), + (1720418157.354486, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720492206.117992, [], [1, 2, 3, 4, 5, 6])], + [(1720566094.344648, [], [1, 2, 3, 4, 5, 6])], + [(1719974058.930323, [], [1, 2, 3, 4, 5, 6]), (1720059173.893018, [], [1, 2, 3, 4, 5, 6])], + [(1720517061.661773, [], [1, 2, 3, 4, 5, 6])], + [(1720052300.009912, [], [1, 2, 3, 4, 5, 6]), (1720447926.535749, [], [1, 2, 3, 4, 5, 6])], + [(1720060153.321408, [], [1, 2, 3, 4, 5, 6]), (1720498576.79341, [], [1, 2, 3, 4, 5, 6])], + [(1720415193.154478, [], [1, 2, 3, 4, 5, 6]), (1720494529.74019, [], [1, 2, 3, 4, 5, 6])], + [ + (1719980354.732889, [], [1, 2, 3, 4, 5, 6]), + (1719980356.852338, [], [1, 2, 3, 4, 5, 6]), + (1719980359.805901, [], [1, 2, 3, 4, 5, 6]), + (1719980361.898886, [], [1, 2, 3, 4, 5, 6]), + (1719980364.204398, [], [1, 2, 3, 4, 5, 6]), + (1720063622.210305, [], [1, 2, 3, 4, 5, 6]), + (1720063766.011158, [], [1, 2, 3, 4, 5, 6]), + (1720063768.060823, [], [1, 2, 3, 4, 5, 6]), + (1720134932.57792, [], [1, 2, 3, 4, 5, 6]), + (1720148426.91756, [], [1, 2, 3, 4, 5, 6]), + (1720148428.987966, [], [1, 2, 3, 4, 5, 6]), + (1720237634.85931, [], [1, 2, 3, 4, 5, 6]), + (1720237687.961173, [], [1, 2, 3, 4, 5, 6]), + (1720321596.679301, [], [1, 2, 3, 4, 5, 6]), + (1720394727.592533, [], [1, 2, 3, 4, 5, 6]), + (1720394743.278857, [], [1, 2, 3, 4, 5, 6]), + (1720408408.443408, [], [1, 2, 3, 4, 5, 6]), + (1720419213.527306, [], [1, 2, 3, 4, 5, 6]), + (1720419321.981, [], [1, 2, 3, 4, 5, 6]), + (1720419324.073269, [], [1, 2, 3, 4, 5, 6]), + (1720497577.385151, [], [1, 2, 3, 4, 5, 6]), + (1720497694.789568, [], [1, 2, 3, 4, 5, 6]), + (1720497696.883431, [], [1, 2, 3, 4, 5, 6]), + (1720584999.597212, [], [1, 2, 3, 4, 5, 6]), + (1720585001.687849, [], [1, 2, 3, 4, 5, 6]), + (1720585005.063862, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720134251.830614, [], [1, 2, 3, 4, 5, 6]), + (1720407054.526951, [], [1, 2, 3, 4, 5, 6]), + (1720543564.686466, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720395362.215692, [], [1, 2, 3, 4, 5, 6]), (1720500480.122778, [], [1, 2, 3, 4, 5, 6])], + [ + (1720058412.695383, [], [1, 2, 3, 4, 5, 6]), + (1720228775.865928, [], [1, 2, 3, 4, 5, 6]), + (1720503282.31697, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720614350.980502, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982519.149294, [], [1, 2, 3, 4, 5, 6]), + (1719982589.903207, [], [1, 2, 3, 4, 5, 6]), + (1719982591.966235, [], [1, 2, 3, 4, 5, 6]), + (1719982594.824529, [], [1, 2, 3, 4, 5, 6]), + (1720056059.106374, [], [1, 2, 3, 4, 5, 6]), + (1720056099.353137, [], [1, 2, 3, 4, 5, 6]), + (1720056101.438857, [], [1, 2, 3, 4, 5, 6]), + (1720084141.137944, [], [1, 2, 3, 4, 5, 6]), + (1720084195.578773, [], [1, 2, 3, 4, 5, 6]), + (1720136869.202173, [], [1, 2, 3, 4, 5, 6]), + (1720223064.342828, [], [1, 2, 3, 4, 5, 6]), + (1720223120.591787, [], [1, 2, 3, 4, 5, 6]), + (1720223122.696149, [], [1, 2, 3, 4, 5, 6]), + (1720239556.237398, [], [1, 2, 3, 4, 5, 6]), + (1720239630.045363, [], [1, 2, 3, 4, 5, 6]), + (1720239632.137037, [], [1, 2, 3, 4, 5, 6]), + (1720312988.468776, [], [1, 2, 3, 4, 5, 6]), + (1720313161.594176, [], [1, 2, 3, 4, 5, 6]), + (1720313163.656358, [], [1, 2, 3, 4, 5, 6]), + (1720413652.862676, [], [1, 2, 3, 4, 5, 6]), + (1720413773.395596, [], [1, 2, 3, 4, 5, 6]), + (1720484458.010065, [], [1, 2, 3, 4, 5, 6]), + (1720484503.114542, [], [1, 2, 3, 4, 5, 6]), + (1720484505.173957, [], [1, 2, 3, 4, 5, 6]), + (1720570920.862746, [], [1, 2, 3, 4, 5, 6]), + (1720571065.994777, [], [1, 2, 3, 4, 5, 6]), + (1720571068.086575, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720138634.579801, [], [1, 2, 3, 4, 5, 6]), (1720394701.653755, [], [1, 2, 3, 4, 5, 6])], + [(1720404840.88735, [], [1, 2, 3, 4, 5, 6]), (1720570759.329975, [], [1, 2, 3, 4, 5, 6])], + [(1720474997.255842, [], [1, 2, 3, 4, 5, 6])], + [ + (1719964981.812038, [], [1, 2, 3, 4, 5, 6]), + (1719965079.656724, [], [1, 2, 3, 4, 5, 6]), + (1719965081.766625, [], [1, 2, 3, 4, 5, 6]), + (1720017945.346535, [], [1, 2, 3, 4, 5, 6]), + (1720018196.228851, [], [1, 2, 3, 4, 5, 6]), + (1720018198.332037, [], [1, 2, 3, 4, 5, 6]), + (1720071944.789981, [], [1, 2, 3, 4, 5, 6]), + (1720071989.860765, [], [1, 2, 3, 4, 5, 6]), + (1720071991.963241, [], [1, 2, 3, 4, 5, 6]), + (1720226601.357382, [], [1, 2, 3, 4, 5, 6]), + (1720226662.671017, [], [1, 2, 3, 4, 5, 6]), + (1720226664.720854, [], [1, 2, 3, 4, 5, 6]), + (1720226666.697991, [], [1, 2, 3, 4, 5, 6]), + (1720245432.525672, [], [1, 2, 3, 4, 5, 6]), + (1720245586.690365, [], [1, 2, 3, 4, 5, 6]), + (1720245588.811888, [], [1, 2, 3, 4, 5, 6]), + (1720313288.75101, [], [1, 2, 3, 4, 5, 6]), + (1720313438.935319, [], [1, 2, 3, 4, 5, 6]), + (1720313440.997298, [], [1, 2, 3, 4, 5, 6]), + (1720325185.461926, [], [1, 2, 3, 4, 5, 6]), + (1720325279.708469, [], [1, 2, 3, 4, 5, 6]), + (1720325281.823994, [], [1, 2, 3, 4, 5, 6]), + (1720325284.895173, [], [1, 2, 3, 4, 5, 6]), + (1720325286.963747, [], [1, 2, 3, 4, 5, 6]), + (1720351212.007507, [], [1, 2, 3, 4, 5, 6]), + (1720351417.722923, [], [1, 2, 3, 4, 5, 6]), + (1720351419.786979, [], [1, 2, 3, 4, 5, 6]), + (1720410234.644402, [], [1, 2, 3, 4, 5, 6]), + (1720410236.746729, [], [1, 2, 3, 4, 5, 6]), + (1720484087.598816, [], [1, 2, 3, 4, 5, 6]), + (1720484089.656452, [], [1, 2, 3, 4, 5, 6]), + (1720560975.588946, [], [1, 2, 3, 4, 5, 6]), + (1720561062.767708, [], [1, 2, 3, 4, 5, 6]), + (1720576170.001406, [], [1, 2, 3, 4, 5, 6]), + (1720576274.339938, [], [1, 2, 3, 4, 5, 6]), + (1720634969.318238, [], [1, 2, 3, 4, 5, 6]), + (1720634971.383262, [], [1, 2, 3, 4, 5, 6]), + (1720634973.669218, [], [1, 2, 3, 4, 5, 6]), + (1720634975.727614, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720047138.987663, [], [1, 2, 3, 4, 5, 6]), + (1720239116.860589, [], [1, 2, 3, 4, 5, 6]), + (1720567216.089602, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720062114.160483, [], [1, 2, 3, 4, 5, 6])], + [(1719974901.32474, [], [1, 2, 3, 4, 5, 6]), (1720224712.94567, [], [1, 2, 3, 4, 5, 6])], + [ + (1719985511.407849, [], [1, 2, 3, 4, 5, 6]), + (1720140363.584567, [], [1, 2, 3, 4, 5, 6]), + (1720573348.34834, [], [1, 2, 3, 4, 5, 6]), + (1720649971.95392, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720325668.53657, [], [1, 2, 3, 4, 5, 6]), (1720407800.484026, [], [1, 2, 3, 4, 5, 6])], + [(1720313988.784401, [], [1, 2, 3, 4, 5, 6])], + [ + (1720137608.121513, [], [1, 2, 3, 4, 5, 6]), + (1720230219.916298, [], [1, 2, 3, 4, 5, 6]), + (1720576804.122481, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060264.40999, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982324.891431, [], [1, 2, 3, 4, 5, 6]), + (1719982326.95929, [], [1, 2, 3, 4, 5, 6]), + (1720072365.964045, [], [1, 2, 3, 4, 5, 6]), + (1720072368.013382, [], [1, 2, 3, 4, 5, 6]), + (1720185779.887725, [], [1, 2, 3, 4, 5, 6]), + (1720185782.013458, [], [1, 2, 3, 4, 5, 6]), + (1720230424.054008, [], [1, 2, 3, 4, 5, 6]), + (1720329463.631365, [], [1, 2, 3, 4, 5, 6]), + (1720329503.210461, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719978020.337077, [], [1, 2, 3, 4, 5, 6]), (1720378278.012205, [], [1, 2, 3, 4, 5, 6])], + [ + (1720198700.302556, [], [1, 2, 3, 4, 5, 6]), + (1720417057.718199, [], [1, 2, 3, 4, 5, 6]), + (1720584860.786802, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720627126.675399, [], [1, 2, 3, 4, 5, 6])], + [(1720072022.286212, [], [1, 2, 3, 4, 5, 6]), (1720366423.980574, [], [1, 2, 3, 4, 5, 6])], + [(1720221042.039954, [], [1, 2, 3, 4, 5, 6])], + [ + (1720060635.731519, [], [1, 2, 3, 4, 5, 6]), + (1720210299.946067, [], [1, 2, 3, 4, 5, 6]), + (1720283154.070272, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720402469.930097, [], [1, 2, 3, 4, 5, 6])], + [ + (1719972611.185894, [], [1, 2, 3, 4, 5, 6]), + (1720227219.185837, [], [1, 2, 3, 4, 5, 6]), + (1720565623.051185, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720060770.015072, [], [1, 2, 3, 4, 5, 6])], + [ + (1719979906.872722, [], [1, 2, 3, 4, 5, 6]), + (1719980094.376717, [], [1, 2, 3, 4, 5, 6]), + (1719980096.476308, [], [1, 2, 3, 4, 5, 6]), + (1720067152.682142, [], [1, 2, 3, 4, 5, 6]), + (1720067174.598435, [], [1, 2, 3, 4, 5, 6]), + (1720137635.975558, [], [1, 2, 3, 4, 5, 6]), + (1720137733.593423, [], [1, 2, 3, 4, 5, 6]), + (1720224539.774939, [], [1, 2, 3, 4, 5, 6]), + (1720323664.982932, [], [1, 2, 3, 4, 5, 6]), + (1720400336.830381, [], [1, 2, 3, 4, 5, 6]), + (1720400497.747426, [], [1, 2, 3, 4, 5, 6]), + (1720400499.843107, [], [1, 2, 3, 4, 5, 6]), + (1720486404.88152, [], [1, 2, 3, 4, 5, 6]), + (1720486460.387837, [], [1, 2, 3, 4, 5, 6]), + (1720486462.465262, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720494960.521565, [], [1, 2, 3, 4, 5, 6])], + [(1720492283.522426, [], [1, 2, 3, 4, 5, 6])], + [(1720588131.39025, [], [1, 2, 3, 4, 5, 6])], + [(1719965172.184078, [], [1, 2, 3, 4, 5, 6])], + [(1720313653.224728, [], [1, 2, 3, 4, 5, 6])], + [(1720133961.331413, [], [1, 2, 3, 4, 5, 6])], + [(1719969914.979558, [], [1, 2, 3, 4, 5, 6])], + [ + (1720051155.959984, [], [1, 2, 3, 4, 5, 6]), + (1720318569.685111, [], [1, 2, 3, 4, 5, 6]), + (1720499729.951734, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720328273.411971, [], [1, 2, 3, 4, 5, 6])], + [ + (1719982315.965122, [], [1, 2, 3, 4, 5, 6]), + (1720423276.150804, [], [1, 2, 3, 4, 5, 6]), + (1720586911.740203, [], [1, 2, 3, 4, 5, 6]), + ], + [(1719968679.211527, [], [1, 2, 3, 4, 5, 6])], + [(1720063388.278848, [], [1, 2, 3, 4, 5, 6]), (1720416336.796001, [], [1, 2, 3, 4, 5, 6])], + [(1720398479.735494, [], [1, 2, 3, 4, 5, 6]), (1720493260.033312, [], [1, 2, 3, 4, 5, 6])], + [ + (1720489609.661573, [], [1, 2, 3, 4, 5, 6]), + (1720489700.750791, [], [1, 2, 3, 4, 5, 6]), + (1720489717.546997, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720500732.208908, [], [1, 2, 3, 4, 5, 6])], + [ + (1720153118.225066, [], [1, 2, 3, 4, 5, 6]), + (1720314031.634943, [], [1, 2, 3, 4, 5, 6]), + (1720590337.724401, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720070140.554847, [], [1, 2, 3, 4, 5, 6]), (1720137932.433554, [], [1, 2, 3, 4, 5, 6])], + [(1719992154.926275, [], [1, 2, 3, 4, 5, 6]), (1720320574.945082, [], [1, 2, 3, 4, 5, 6])], + [(1719984916.520671, [], [1, 2, 3, 4, 5, 6]), (1720569849.178614, [], [1, 2, 3, 4, 5, 6])], + [(1720140614.641046, [], [1, 2, 3, 4, 5, 6]), (1720395184.350061, [], [1, 2, 3, 4, 5, 6])], + [(1720310387.035179, [], [1, 2, 3, 4, 5, 6]), (1720473940.199193, [], [1, 2, 3, 4, 5, 6])], + [(1720062920.051834, [], [1, 2, 3, 4, 5, 6]), (1720226181.474055, [], [1, 2, 3, 4, 5, 6])], + [(1720470329.222623, [], [1, 2, 3, 4, 5, 6])], + [(1720582334.499662, [], [1, 2, 3, 4, 5, 6])], + [(1720443828.896214, [], [1, 2, 3, 4, 5, 6]), (1720580682.756419, [], [1, 2, 3, 4, 5, 6])], + [ + (1720226425.344326, [], [1, 2, 3, 4, 5, 6]), + (1720310598.961662, [], [1, 2, 3, 4, 5, 6]), + (1720589761.631011, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720584698.862455, [], [1, 2, 3, 4, 5, 6])], + [(1720164879.185564, [], [1, 2, 3, 4, 5, 6]), (1720323846.480885, [], [1, 2, 3, 4, 5, 6])], + [(1720051096.071376, [], [1, 2, 3, 4, 5, 6]), (1720157299.452758, [], [1, 2, 3, 4, 5, 6])], + [ + (1720223524.412388, [], [1, 2, 3, 4, 5, 6]), + (1720326592.782923, [], [1, 2, 3, 4, 5, 6]), + (1720578100.065601, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720313647.455076, [], [1, 2, 3, 4, 5, 6]), (1720559337.211802, [], [1, 2, 3, 4, 5, 6])], + [ + (1719981335.449573, [], [1, 2, 3, 4, 5, 6]), + (1720067595.1521, [], [1, 2, 3, 4, 5, 6]), + (1720319132.823969, [], [1, 2, 3, 4, 5, 6]), + (1720491547.165147, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720019886.753884, [], [1, 2, 3, 4, 5, 6]), + (1720153101.556554, [], [1, 2, 3, 4, 5, 6]), + (1720313536.357232, [], [1, 2, 3, 4, 5, 6]), + (1720485395.202604, [], [1, 2, 3, 4, 5, 6]), + (1720568839.562655, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720310841.194378, [], [1, 2, 3, 4, 5, 6]), + (1720310912.574061, [], [1, 2, 3, 4, 5, 6]), + (1720310914.655803, [], [1, 2, 3, 4, 5, 6]), + (1720587828.804404, [], [1, 2, 3, 4, 5, 6]), + (1720588071.078858, [], [1, 2, 3, 4, 5, 6]), + (1720588073.115074, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720055953.618503, [], [1, 2, 3, 4, 5, 6]), + (1720223652.080905, [], [1, 2, 3, 4, 5, 6]), + (1720308372.703732, [], [1, 2, 3, 4, 5, 6]), + (1720624033.359415, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720173756.125859, [], [1, 2, 3, 4, 5, 6]), + (1720315128.683231, [], [1, 2, 3, 4, 5, 6]), + (1720466410.646777, [], [1, 2, 3, 4, 5, 6]), + ], + [(1720157263.810637, [], [1, 2, 3, 4, 5, 6]), (1720235081.49838, [], [1, 2, 3, 4, 5, 6])], + [ + (1720229216.005254, [], [1, 2, 3, 4, 5, 6]), + (1720557735.625871, [], [1, 2, 3, 4, 5, 6]), + (1720627294.521232, [], [1, 2, 3, 4, 5, 6]), + ], + [ + (1720394336.326148, [], [1, 2, 3, 4, 5, 6]), + (1720394382.775033, [], [1, 2, 3, 4, 5, 6]), + (1720394404.054186, [], [1, 2, 3, 4, 5, 6]), + (1720394488.655765, [], [1, 2, 3, 4, 5, 6]), + (1720394583.815862, [], [1, 2, 3, 4, 5, 6]), + (1720394609.744123, [], [1, 2, 3, 4, 5, 6]), + (1720394643.351845, [], [1, 2, 3, 4, 5, 6]), + (1720394661.447752, [], [1, 2, 3, 4, 5, 6]), + (1720394715.354794, [], [1, 2, 3, 4, 5, 6]), + ], + ] + for b in a: + calculate_funnel_from_user_events(6, 1123200, "first_touch", "ordered", [[]], b) diff --git a/posthog/user_scripts/aggregate_funnel.py b/posthog/user_scripts/aggregate_funnel.py new file mode 100755 index 0000000000000..162918a819625 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel.py @@ -0,0 +1,144 @@ +#!/usr/bin/python3 +import json +import sys +from dataclasses import dataclass, replace +from itertools import groupby, permutations +from typing import Any, cast +from collections.abc import Sequence + + +def parse_args(line): + args = json.loads(line) + return [ + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] + + +@dataclass(frozen=True) +class EnteredTimestamp: + timestamp: Any + timings: Any + + +# each one can be multiple steps here +# it only matters when they entered the funnel - you can propagate the time from the previous step when you update +# This function is defined for Clickhouse in user_defined_functions.xml along with types +# num_steps is the total number of steps in the funnel +# conversion_window_limit is in seconds +# events is a array of tuples of (timestamp, breakdown, [steps]) +# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. +# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. +def calculate_funnel_from_user_events( + num_steps: int, + conversion_window_limit_seconds: int, + breakdown_attribution_type: str, + funnel_order_type: str, + prop_vals: list[Any], + events: Sequence[tuple[float, list[str] | int | str, list[int]]], +): + default_entered_timestamp = EnteredTimestamp(0, []) + max_step = [0, default_entered_timestamp] + # If the attribution mode is a breakdown step, set this to the integer that represents that step + breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None + + # This function returns an Array. We build up an array of strings to return here. + results: list[tuple[int, Any, list[float]]] = [] + + # Process an event. If this hits an exclusion, return False, else return True. + def process_event(timestamp, breakdown, steps, *, entered_timestamp, prop_val) -> bool: + # iterate the steps in reverse so we don't count this event multiple times + for step in reversed(steps): + exclusion = False + if step < 0: + exclusion = True + step = -step + + in_match_window = timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds + already_reached_this_step_with_same_entered_timestamp = ( + entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp + ) + + if in_match_window and not already_reached_this_step_with_same_entered_timestamp: + if exclusion: + results.append((-1, prop_val, [])) + return False + is_unmatched_step_attribution = ( + breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown + ) + if not is_unmatched_step_attribution: + entered_timestamp[step] = replace( + entered_timestamp[step - 1], timings=[*entered_timestamp[step - 1].timings, timestamp] + ) + if step > max_step[0]: + max_step[:] = (step, entered_timestamp[step]) + + if funnel_order_type == "strict": + for i in range(len(entered_timestamp)): + if i not in steps: + entered_timestamp[i] = default_entered_timestamp + + return True + + # We call this for each possible breakdown value. + def loop_prop_val(prop_val): + # an array of when the user entered the funnel + # entered_timestamp = [(0, "", [])] * (num_steps + 1) + max_step[:] = [0, default_entered_timestamp] + entered_timestamp: list[EnteredTimestamp] = [default_entered_timestamp] * (num_steps + 1) + + def add_max_step(): + i = cast(int, max_step[0]) + final = cast(EnteredTimestamp, max_step[1]) + results.append((i - 1, prop_val, [final.timings[i] - final.timings[i - 1] for i in range(1, i)])) + + filtered_events = ( + ((timestamp, breakdown, steps) for (timestamp, breakdown, steps) in events if breakdown == prop_val) + if breakdown_attribution_type == "all_events" + else events + ) + for timestamp, events_with_same_timestamp_iterator in groupby(filtered_events, key=lambda x: x[0]): + events_with_same_timestamp = tuple(events_with_same_timestamp_iterator) + entered_timestamp[0] = EnteredTimestamp(timestamp, []) + if len(events_with_same_timestamp) == 1: + if not process_event( + *events_with_same_timestamp[0], entered_timestamp=entered_timestamp, prop_val=prop_val + ): + return + else: + # This is a special case for events with the same timestamp + # We play all of their permutations and most generously take the ones that advanced the furthest + # This has quite bad performance, and can probably be optimized through clever but annoying logic + # but shouldn't be hit too often + entered_timestamps = [] + for events_group_perm in permutations(events_with_same_timestamp): + entered_timestamps.append(list(entered_timestamp)) + for event in events_group_perm: + if not process_event(*event, entered_timestamp=entered_timestamps[-1], prop_val=prop_val): + # If any of the permutations hits an exclusion, we exclude this user. + # This isn't an important implementation detail and we could do something smarter here. + return + for i in range(len(entered_timestamp)): + entered_timestamp[i] = max((x[i] for x in entered_timestamps), key=lambda x: x.timestamp) + + # If we have hit the goal, we can terminate early + if entered_timestamp[num_steps].timestamp > 0: + add_max_step() + return + + # Find the furthest step we have made it to and print it + add_max_step() + return + + [loop_prop_val(prop_val) for prop_val in prop_vals] + print(json.dumps({"result": results}), end="\n") # noqa: T201 + + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array.py b/posthog/user_scripts/aggregate_funnel_array.py new file mode 100755 index 0000000000000..17b053bb7d448 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel import parse_args, calculate_funnel_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array_trends.py b/posthog/user_scripts/aggregate_funnel_array_trends.py new file mode 100755 index 0000000000000..15e93f5452797 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array_trends.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_array_trends_test.py b/posthog/user_scripts/aggregate_funnel_array_trends_test.py new file mode 100755 index 0000000000000..44d3cc9b8f059 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_array_trends_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 + +from aggregate_funnel_trends import calculate_funnel_trends_from_user_events, parse_args +import sys +import json + +if __name__ == "__main__": + for line in sys.stdin: + try: + calculate_funnel_trends_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_cohort.py b/posthog/user_scripts/aggregate_funnel_cohort.py new file mode 100755 index 0000000000000..17b053bb7d448 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_cohort.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel import parse_args, calculate_funnel_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_cohort_trends.py b/posthog/user_scripts/aggregate_funnel_cohort_trends.py new file mode 100755 index 0000000000000..15e93f5452797 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_cohort_trends.py @@ -0,0 +1,9 @@ +#!/usr/bin/python3 +import sys + +from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_test.py b/posthog/user_scripts/aggregate_funnel_test.py new file mode 100755 index 0000000000000..e0689b82af21c --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 +import json + +from aggregate_funnel import calculate_funnel_from_user_events, parse_args +import sys + +if __name__ == "__main__": + for line in sys.stdin: + try: + calculate_funnel_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_trends.py b/posthog/user_scripts/aggregate_funnel_trends.py new file mode 100755 index 0000000000000..0aa96b7a19b96 --- /dev/null +++ b/posthog/user_scripts/aggregate_funnel_trends.py @@ -0,0 +1,131 @@ +#!/usr/bin/python3 +import sys +from dataclasses import dataclass, replace +from typing import Any +from collections.abc import Sequence +import json + + +def parse_args(line): + args = json.loads(line) + return [ + int(args["from_step"]), + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] + + +@dataclass(frozen=True) +class EnteredTimestamp: + timestamp: Any + timings: Any + + +# each one can be multiple steps here +# it only matters when they entered the funnel - you can propagate the time from the previous step when you update +# This function is defined for Clickhouse in user_defined_functions.xml along with types +# num_steps is the total number of steps in the funnel +# conversion_window_limit is in seconds +# events is a array of tuples of (timestamp, breakdown, [steps]) +# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. +# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. +def calculate_funnel_trends_from_user_events( + from_step: int, + num_steps: int, + conversion_window_limit_seconds: int, + breakdown_attribution_type: str, + funnel_order_type: str, + prop_vals: list[Any], + events: Sequence[tuple[float, int, list[str] | int | str, list[int]]], +): + default_entered_timestamp = EnteredTimestamp(0, []) + # If the attribution mode is a breakdown step, set this to the integer that represents that step + breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None + + # Results is a map of start intervals to success or failure. If an interval isn't here, it means the + # user didn't enter + results = {} + + # We call this for each possible breakdown value. + def loop_prop_val(prop_val): + # we need to track every distinct entry into the funnel through to the end + filtered_events = ( + ( + (timestamp, interval_start, breakdown, steps) + for (timestamp, interval_start, breakdown, steps) in events + if breakdown == prop_val + ) + if breakdown_attribution_type == "all_events" + else events + ) + list_of_entered_timestamps = [] + + for timestamp, interval_start, breakdown, steps in filtered_events: + for step in reversed(steps): + exclusion = False + if step < 0: + exclusion = True + step = -step + # Special code to handle the first step + # Potential Optimization: we could skip tracking here if the user has already completed the funnel for this interval + if step == 1: + entered_timestamp = [default_entered_timestamp] * (num_steps + 1) + # Set the interval start at 0, which is what we want to return if this works. + # For strict funnels, we need to track if the "from_step" has been hit + # Abuse the timings field on the 0th index entered_timestamp to have the elt True if we have + entered_timestamp[0] = EnteredTimestamp(interval_start, [True] if from_step == 0 else []) + entered_timestamp[1] = EnteredTimestamp(timestamp, [timestamp]) + list_of_entered_timestamps.append(entered_timestamp) + else: + for entered_timestamp in list_of_entered_timestamps[:]: + in_match_window = ( + timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds + ) + already_reached_this_step_with_same_entered_timestamp = ( + entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp + ) + if in_match_window and not already_reached_this_step_with_same_entered_timestamp: + if exclusion: + # this is a complete failure, exclude this person, don't print anything, don't count + return False + is_unmatched_step_attribution = ( + breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown + ) + if not is_unmatched_step_attribution: + entered_timestamp[step] = replace( + entered_timestamp[step - 1], + timings=[*entered_timestamp[step - 1].timings, timestamp], + ) + # check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps + if entered_timestamp[num_steps].timestamp > 0: + results[entered_timestamp[0].timestamp] = (1, prop_val) + list_of_entered_timestamps.remove(entered_timestamp) + # If we have hit the from_step threshold, record it (abuse the timings field) + elif step == from_step + 1: + entered_timestamp[0].timings.append(True) + + # At the end of the event, clear all steps that weren't done by that event + if funnel_order_type == "strict": + for entered_timestamp in list_of_entered_timestamps[:]: + for i in range(1, len(entered_timestamp)): + if i not in steps: + entered_timestamp[i] = default_entered_timestamp + + # At this point, everything left in entered_timestamps is a failure, if it has made it to from_step + for entered_timestamp in list_of_entered_timestamps: + if entered_timestamp[0].timestamp not in results and len(entered_timestamp[0].timings) > 0: + results[entered_timestamp[0].timestamp] = (-1, prop_val) + + [loop_prop_val(prop_val) for prop_val in prop_vals] + result = [(interval_start, success_bool, prop_val) for interval_start, (success_bool, prop_val) in results.items()] + print(json.dumps({"result": result}), end="\n") # noqa: T201 + + +if __name__ == "__main__": + for line in sys.stdin: + calculate_funnel_trends_from_user_events(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/utils.py b/posthog/utils.py index 39bf6d606982f..aaf02658b42d1 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -58,6 +58,7 @@ from posthog.models import Team, User DATERANGE_MAP = { + "second": datetime.timedelta(seconds=1), "minute": datetime.timedelta(minutes=1), "hour": datetime.timedelta(hours=1), "day": datetime.timedelta(days=1), diff --git a/posthog/warehouse/api/external_data_schema.py b/posthog/warehouse/api/external_data_schema.py index 154fd848ff524..3f98702db64f7 100644 --- a/posthog/warehouse/api/external_data_schema.py +++ b/posthog/warehouse/api/external_data_schema.py @@ -46,6 +46,7 @@ class ExternalDataSchemaSerializer(serializers.ModelSerializer): incremental_field = serializers.SerializerMethodField(read_only=True) incremental_field_type = serializers.SerializerMethodField(read_only=True) sync_frequency = serializers.SerializerMethodField(read_only=True) + status = serializers.SerializerMethodField(read_only=True) class Meta: model = ExternalDataSchema @@ -74,6 +75,12 @@ class Meta: "status", ] + def get_status(self, schema: ExternalDataSchema) -> str | None: + if schema.status == ExternalDataSchema.Status.CANCELLED: + return "Billing limits" + + return schema.status + def get_incremental(self, schema: ExternalDataSchema) -> bool: return schema.is_incremental diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 54a3960f0960a..24439fcecdc19 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -24,6 +24,7 @@ from posthog.hogql.database.database import create_hogql_database from posthog.temporal.data_imports.pipelines.stripe import validate_credentials as validate_stripe_credentials from posthog.temporal.data_imports.pipelines.zendesk import validate_credentials as validate_zendesk_credentials +from posthog.temporal.data_imports.pipelines.vitally import validate_credentials as validate_vitally_credentials from posthog.temporal.data_imports.pipelines.schemas import ( PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING, PIPELINE_TYPE_INCREMENTAL_FIELDS_MAPPING, @@ -89,6 +90,7 @@ def get_generic_sql_error(source_type: ExternalDataSource.Type): class ExternalDataJobSerializers(serializers.ModelSerializer): schema = serializers.SerializerMethodField(read_only=True) + status = serializers.SerializerMethodField(read_only=True) class Meta: model = ExternalDataJob @@ -113,6 +115,12 @@ class Meta: "workflow_run_id", ] + def get_status(self, instance: ExternalDataJob): + if instance.status == ExternalDataJob.Status.CANCELLED: + return "Billing limits" + + return instance.status + def get_schema(self, instance: ExternalDataJob): return SimpleExternalDataSchemaSerializer( instance.schema, many=False, read_only=True, context=self.context @@ -167,7 +175,7 @@ def get_status(self, instance: ExternalDataSource) -> str: if any_failures: return ExternalDataSchema.Status.ERROR elif any_cancelled: - return ExternalDataSchema.Status.CANCELLED + return "Billing limits" elif any_paused: return ExternalDataSchema.Status.PAUSED elif any_running: @@ -273,6 +281,8 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response: new_source_model = self._handle_zendesk_source(request, *args, **kwargs) elif source_type == ExternalDataSource.Type.SALESFORCE: new_source_model = self._handle_salesforce_source(request, *args, **kwargs) + elif source_type == ExternalDataSource.Type.VITALLY: + new_source_model = self._handle_vitally_source(request, *args, **kwargs) elif source_type in [ ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL, @@ -388,6 +398,28 @@ def _handle_stripe_source(self, request: Request, *args: Any, **kwargs: Any) -> return new_source_model + def _handle_vitally_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: + payload = request.data["payload"] + secret_token = payload.get("secret_token") + region = payload.get("region") + subdomain = payload.get("subdomain", None) + prefix = request.data.get("prefix", None) + source_type = request.data["source_type"] + + # TODO: remove dummy vars + new_source_model = ExternalDataSource.objects.create( + source_id=str(uuid.uuid4()), + connection_id=str(uuid.uuid4()), + destination_id=str(uuid.uuid4()), + team=self.team, + status="Running", + source_type=source_type, + job_inputs={"secret_token": secret_token, "region": region, "subdomain": subdomain}, + prefix=prefix, + ) + + return new_source_model + def _handle_zendesk_source(self, request: Request, *args: Any, **kwargs: Any) -> ExternalDataSource: payload = request.data["payload"] api_key = payload.get("api_key") @@ -683,6 +715,15 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any): status=status.HTTP_400_BAD_REQUEST, data={"message": "Invalid credentials: Zendesk credentials are incorrect"}, ) + elif source_type == ExternalDataSource.Type.VITALLY: + secret_token = request.data.get("secret_token", "") + region = request.data.get("region", "") + subdomain = request.data.get("subdomain", "") + if not validate_vitally_credentials(subdomain=subdomain, secret_token=secret_token, region=region): + return Response( + status=status.HTTP_400_BAD_REQUEST, + data={"message": "Invalid credentials: Zendesk credentials are incorrect"}, + ) # Get schemas and validate SQL credentials if source_type in [ diff --git a/posthog/warehouse/api/saved_query.py b/posthog/warehouse/api/saved_query.py index db437861f967a..0c233001af365 100644 --- a/posthog/warehouse/api/saved_query.py +++ b/posthog/warehouse/api/saved_query.py @@ -160,25 +160,25 @@ def ancestors(self, request: request.Request, *args, **kwargs) -> response.Respo look further back into the ancestor tree. If `level` overshoots (i.e. points to only ancestors beyond the root), we return an empty list. """ - level = request.data.get("level", 1) + up_to_level = request.data.get("level", None) saved_query = self.get_object() saved_query_id = saved_query.id.hex - lquery = f"*{{{level},}}.{saved_query_id}" + lquery = f"*{{1,}}.{saved_query_id}" paths = DataWarehouseModelPath.objects.filter(team=saved_query.team, path__lquery=lquery) if not paths: return response.Response({"ancestors": []}) - ancestors = set() + ancestors: set[str] = set() for model_path in paths: - offset = len(model_path.path) - level - 1 # -1 corrects for level being 1-indexed + if up_to_level is None: + start = 0 + else: + start = (int(up_to_level) * -1) - 1 - if offset < 0: - continue - - ancestors.add(model_path.path[offset]) + ancestors = ancestors.union(model_path.path[start:-1]) return response.Response({"ancestors": ancestors}) @@ -190,25 +190,25 @@ def descendants(self, request: request.Request, *args, **kwargs) -> response.Res look further ahead into the descendants tree. If `level` overshoots (i.e. points to only descendants further than a leaf), we return an empty list. """ - level = request.data.get("level", 1) + up_to_level = request.data.get("level", None) saved_query = self.get_object() saved_query_id = saved_query.id.hex - lquery = f"*.{saved_query_id}.*{{{level},}}" + lquery = f"*.{saved_query_id}.*{{1,}}" paths = DataWarehouseModelPath.objects.filter(team=saved_query.team, path__lquery=lquery) if not paths: return response.Response({"descendants": []}) - descendants = set() - + descendants: set[str] = set() for model_path in paths: - offset = model_path.path.index(saved_query_id) + level - - if offset > len(model_path.path): - continue + start = model_path.path.index(saved_query_id) + 1 + if up_to_level is None: + end = len(model_path.path) + else: + end = start + up_to_level - descendants.add(model_path.path[offset]) + descendants = descendants.union(model_path.path[start:end]) return response.Response({"descendants": descendants}) diff --git a/posthog/warehouse/api/test/test_saved_query.py b/posthog/warehouse/api/test/test_saved_query.py index 80deaad72ca3a..a0abdf02c5e98 100644 --- a/posthog/warehouse/api/test/test_saved_query.py +++ b/posthog/warehouse/api/test/test_saved_query.py @@ -230,24 +230,33 @@ def test_ancestors(self): self.assertEqual(response.status_code, 200, response.content) child_ancestors = response.json()["ancestors"] - self.assertEqual(child_ancestors, [uuid.UUID(saved_query_parent_id).hex]) + child_ancestors.sort() + self.assertEqual(child_ancestors, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) response = self.client.post( - f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 2} + f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 1} ) + self.assertEqual(response.status_code, 200, response.content) + child_ancestors_level_1 = response.json()["ancestors"] + child_ancestors_level_1.sort() + self.assertEqual(child_ancestors_level_1, [uuid.UUID(saved_query_parent_id).hex]) + + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 2} + ) self.assertEqual(response.status_code, 200, response.content) child_ancestors_level_2 = response.json()["ancestors"] child_ancestors_level_2.sort() - self.assertEqual(child_ancestors_level_2, ["events", "persons"]) + self.assertEqual(child_ancestors_level_2, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/ancestors", {"level": 10} ) - self.assertEqual(response.status_code, 200, response.content) child_ancestors_level_10 = response.json()["ancestors"] - self.assertEqual(child_ancestors_level_10, []) + child_ancestors_level_10.sort() + self.assertEqual(child_ancestors_level_2, sorted([uuid.UUID(saved_query_parent_id).hex, "events", "persons"])) def test_descendants(self): query = """\ @@ -281,23 +290,69 @@ def test_descendants(self): }, ) + response_grand_child = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/", + { + "name": "event_view_3", + "query": { + "kind": "HogQLQuery", + "query": "select event as event from event_view_2", + }, + }, + ) + self.assertEqual(response_parent.status_code, 201, response_parent.content) self.assertEqual(response_child.status_code, 201, response_child.content) + self.assertEqual(response_grand_child.status_code, 201, response_grand_child.content) saved_query_parent_id = response_parent.json()["id"] saved_query_child_id = response_child.json()["id"] + saved_query_grand_child_id = response_grand_child.json()["id"] response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_parent_id}/descendants", ) self.assertEqual(response.status_code, 200, response.content) parent_descendants = response.json()["descendants"] - self.assertEqual(parent_descendants, [uuid.UUID(saved_query_child_id).hex]) + self.assertEqual( + sorted(parent_descendants), + sorted([uuid.UUID(saved_query_child_id).hex, uuid.UUID(saved_query_grand_child_id).hex]), + ) + + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_parent_id}/descendants", {"level": 1} + ) + + self.assertEqual(response.status_code, 200, response.content) + parent_descendants_level_1 = response.json()["descendants"] + self.assertEqual( + parent_descendants_level_1, + [uuid.UUID(saved_query_child_id).hex], + ) + + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_parent_id}/descendants", {"level": 2} + ) + + self.assertEqual(response.status_code, 200, response.content) + parent_descendants_level_2 = response.json()["descendants"] + self.assertEqual( + sorted(parent_descendants_level_2), + sorted([uuid.UUID(saved_query_child_id).hex, uuid.UUID(saved_query_grand_child_id).hex]), + ) response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_child_id}/descendants", ) + self.assertEqual(response.status_code, 200, response.content) + child_ancestors = response.json()["descendants"] + self.assertEqual(child_ancestors, [uuid.UUID(saved_query_grand_child_id).hex]) + + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/{saved_query_grand_child_id}/descendants", + ) + self.assertEqual(response.status_code, 200, response.content) child_ancestors = response.json()["descendants"] self.assertEqual(child_ancestors, []) diff --git a/posthog/warehouse/external_data_source/jobs.py b/posthog/warehouse/external_data_source/jobs.py index 0bfbd95760391..27d2ae22b6b8d 100644 --- a/posthog/warehouse/external_data_source/jobs.py +++ b/posthog/warehouse/external_data_source/jobs.py @@ -1,5 +1,5 @@ from uuid import UUID - +from posthog.warehouse.util import database_sync_to_async from posthog.warehouse.models.external_data_job import ExternalDataJob from posthog.warehouse.models.external_data_schema import ExternalDataSchema from posthog.warehouse.models.external_data_source import ExternalDataSource @@ -29,16 +29,19 @@ def create_external_data_job( return job -def update_external_job_status(run_id: UUID, team_id: int, status: str, latest_error: str | None) -> ExternalDataJob: - model = ExternalDataJob.objects.get(id=run_id, team_id=team_id) +@database_sync_to_async +def aupdate_external_job_status( + job_id: str, team_id: int, status: ExternalDataJob.Status, latest_error: str | None +) -> ExternalDataJob: + model = ExternalDataJob.objects.get(id=job_id, team_id=team_id) model.status = status model.latest_error = latest_error model.save() if status == ExternalDataJob.Status.FAILED: - schema_status = ExternalDataSchema.Status.ERROR + schema_status: ExternalDataSchema.Status = ExternalDataSchema.Status.ERROR else: - schema_status = status + schema_status = status # type: ignore schema = ExternalDataSchema.objects.get(id=model.schema_id, team_id=team_id) schema.status = schema_status diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 83f16eaa9aa1f..a3ba7730aaaa3 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -90,7 +90,9 @@ def aget_schema_if_exists(schema_name: str, team_id: int, source_id: uuid.UUID) @database_sync_to_async def aget_schema_by_id(schema_id: str, team_id: int) -> ExternalDataSchema | None: - return ExternalDataSchema.objects.prefetch_related("source").get(id=schema_id, team_id=team_id) + return ( + ExternalDataSchema.objects.prefetch_related("source").exclude(deleted=True).get(id=schema_id, team_id=team_id) + ) @database_sync_to_async diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py index 6f9fe14e01dd9..14dd7c99dd88c 100644 --- a/posthog/warehouse/models/external_data_source.py +++ b/posthog/warehouse/models/external_data_source.py @@ -23,6 +23,7 @@ class Type(models.TextChoices): SALESFORCE = "Salesforce", "Salesforce" MYSQL = "MySQL", "MySQL" MSSQL = "MSSQL", "MSSQL" + VITALLY = "Vitally", "Vitally" class Status(models.TextChoices): RUNNING = "Running", "Running" diff --git a/posthog/warehouse/types.py b/posthog/warehouse/types.py index 57455ac361232..910367854f23c 100644 --- a/posthog/warehouse/types.py +++ b/posthog/warehouse/types.py @@ -1,8 +1,8 @@ -from enum import Enum +from enum import StrEnum from typing import TypedDict -class IncrementalFieldType(Enum): +class IncrementalFieldType(StrEnum): Integer = "integer" Numeric = "numeric" # For snowflake DateTime = "datetime" diff --git a/requirements-dev.in b/requirements-dev.in index 70206b9805c6d..f1158cbc54960 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -22,7 +22,7 @@ Faker==17.5.0 fakeredis[lua]==2.23.3 freezegun==1.2.2 inline-snapshot==0.12.* -packaging==23.1 +packaging==24.1 black~=23.9.1 boto3-stubs[s3] types-markdown==3.3.9 diff --git a/requirements-dev.txt b/requirements-dev.txt index a93ee65d48acb..72fe00092b60e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -171,7 +171,7 @@ openapi-schema-validator==0.6.2 # via openapi-spec-validator openapi-spec-validator==0.7.1 # via -r requirements-dev.in -packaging==23.1 +packaging==24.1 # via # -c requirements.txt # -r requirements-dev.in diff --git a/requirements.in b/requirements.in index 2e0332d76ec58..959a0b2dc75de 100644 --- a/requirements.in +++ b/requirements.in @@ -45,12 +45,15 @@ gunicorn==20.1.0 infi-clickhouse-orm@ git+https://github.com/PostHog/infi.clickhouse_orm@9578c79f29635ee2c1d01b7979e89adab8383de2 kafka-python==2.0.2 kombu==5.3.2 +langchain==0.2.15 +langchain-openai==0.1.23 +langsmith==0.1.106 lzstring==1.0.4 natsort==8.4.0 nanoid==2.0.0 numpy==1.23.3 openpyxl==3.1.2 -orjson==3.9.10 +orjson==3.10.7 pandas==2.2.0 paramiko==3.4.0 Pillow==10.2.0 @@ -87,7 +90,7 @@ sshtunnel==0.4.0 statshog==1.0.6 structlog==23.2.0 sqlparse==0.4.4 -temporalio==1.6.0 +temporalio==1.7.0 token-bucket==0.3.0 toronado==0.1.0 webdriver_manager==4.0.1 @@ -96,8 +99,8 @@ mimesis==5.2.1 more-itertools==9.0.0 django-two-factor-auth==1.14.0 phonenumberslite==8.13.6 -openai==1.10.0 -tiktoken==0.6.0 +openai==1.43.0 +tiktoken==0.7.0 nh3==0.2.14 hogql-parser==1.0.40 zxcvbn==4.4.28 diff --git a/requirements.txt b/requirements.txt index 484a579627303..e31cbdd5d09bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,6 +11,7 @@ aiohttp==3.9.3 # -r requirements.in # aiobotocore # geoip2 + # langchain # s3fs aioitertools==0.11.0 # via aiobotocore @@ -278,7 +279,9 @@ hogql-parser==1.0.40 httpcore==1.0.2 # via httpx httpx==0.26.0 - # via openai + # via + # langsmith + # openai humanize==4.9.0 # via dlt idna==2.8 @@ -300,14 +303,20 @@ isodate==0.6.1 # via # python3-saml # zeep +jiter==0.5.0 + # via openai jmespath==1.0.0 # via # boto3 # botocore joblib==1.3.2 # via scikit-learn +jsonpatch==1.33 + # via langchain-core jsonpath-ng==1.6.0 # via dlt +jsonpointer==3.0.0 + # via jsonpatch jsonschema==4.20.0 # via drf-spectacular jsonschema-specifications==2023.12.1 @@ -320,6 +329,22 @@ kombu==5.3.2 # via # -r requirements.in # celery +langchain==0.2.15 + # via -r requirements.in +langchain-core==0.2.36 + # via + # langchain + # langchain-openai + # langchain-text-splitters +langchain-openai==0.1.23 + # via -r requirements.in +langchain-text-splitters==0.2.2 + # via langchain +langsmith==0.1.106 + # via + # -r requirements.in + # langchain + # langchain-core lxml==4.9.4 # via # -r requirements.in @@ -354,6 +379,7 @@ nh3==0.2.14 numpy==1.23.3 # via # -r requirements.in + # langchain # pandas # pyarrow # scikit-learn @@ -362,23 +388,26 @@ oauthlib==3.1.0 # via # requests-oauthlib # social-auth-core -openai==1.10.0 +openai==1.43.0 # via # -r requirements.in + # langchain-openai # sentry-sdk openpyxl==3.1.2 # via -r requirements.in -orjson==3.9.10 +orjson==3.10.7 # via # -r requirements.in # dlt + # langsmith outcome==1.3.0.post0 # via trio -packaging==23.1 +packaging==24.1 # via # aiokafka # dlt # google-cloud-bigquery + # langchain-core # snowflake-connector-python # webdriver-manager pandas==2.2.0 @@ -443,6 +472,9 @@ pycparser==2.20 pydantic==2.5.3 # via # -r requirements.in + # langchain + # langchain-core + # langsmith # openai pydantic-core==2.14.6 # via pydantic @@ -502,6 +534,8 @@ pyyaml==6.0.1 # via # dlt # drf-spectacular + # langchain + # langchain-core qrcode==7.4.2 # via django-two-factor-auth redis==4.5.4 @@ -523,6 +557,8 @@ requests==2.32.0 # google-api-core # google-cloud-bigquery # infi-clickhouse-orm + # langchain + # langsmith # pdpyras # posthoganalytics # requests-file @@ -613,6 +649,7 @@ sortedcontainers==2.4.0 sqlalchemy==2.0.31 # via # -r requirements.in + # langchain # snowflake-sqlalchemy sqlparse==0.4.4 # via @@ -628,17 +665,20 @@ structlog==23.2.0 # via # -r requirements.in # django-structlog -temporalio==1.6.0 +temporalio==1.7.0 # via -r requirements.in tenacity==8.2.3 # via # celery-redbeat # dlt + # langchain + # langchain-core threadpoolctl==3.3.0 # via scikit-learn -tiktoken==0.6.0 +tiktoken==0.7.0 # via # -r requirements.in + # langchain-openai # sentry-sdk token-bucket==0.3.0 # via -r requirements.in @@ -663,6 +703,7 @@ types-setuptools==69.0.0.0 typing-extensions==4.12.2 # via # dlt + # langchain-core # openai # psycopg # pydantic diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 55c64258d5ec2..117d4bae8a717 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -620,6 +620,7 @@ dependencies = [ "axum-test-helper", "base64 0.22.0", "bytes", + "common-alloc", "envconfig", "flate2", "futures", @@ -702,6 +703,13 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "common-alloc" +version = "0.1.0" +dependencies = [ + "tikv-jemallocator", +] + [[package]] name = "common-dns" version = "0.1.0" @@ -877,6 +885,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-dns", "common-kafka", "common-metrics", @@ -906,6 +915,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-kafka", "common-metrics", "cyclotron-core", @@ -1156,6 +1166,7 @@ dependencies = [ "axum 0.7.5", "axum-client-ip", "bytes", + "common-alloc", "envconfig", "maxminddb", "once_cell", @@ -1586,6 +1597,7 @@ name = "hook-api" version = "0.1.0" dependencies = [ "axum 0.7.5", + "common-alloc", "common-metrics", "envconfig", "eyre", @@ -1627,6 +1639,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum 0.7.5", + "common-alloc", "common-kafka", "common-metrics", "envconfig", @@ -1652,6 +1665,7 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-alloc", "common-dns", "common-kafka", "common-metrics", @@ -2847,6 +2861,7 @@ dependencies = [ "ahash", "axum 0.7.5", "chrono", + "common-alloc", "common-metrics", "envconfig", "futures", @@ -3980,6 +3995,26 @@ dependencies = [ "once_cell", ] +[[package]] +name = "tikv-jemalloc-sys" +version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "tikv-jemallocator" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +dependencies = [ + "libc", + "tikv-jemalloc-sys", +] + [[package]] name = "time" version = "0.3.36" diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 39fbcb8c48449..5c30dd1a8cf46 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,6 +7,7 @@ members = [ "common/health", "common/metrics", "common/dns", + "common/alloc", "feature-flags", "hook-api", "hook-common", diff --git a/rust/capture/Cargo.toml b/rust/capture/Cargo.toml index 55ccddd3a0311..7b50fe760b742 100644 --- a/rust/capture/Cargo.toml +++ b/rust/capture/Cargo.toml @@ -17,6 +17,7 @@ envconfig = { workspace = true } flate2 = { workspace = true } governor = { workspace = true } health = { path = "../common/health" } +common-alloc = { path = "../common/alloc" } metrics = { workspace = true } metrics-exporter-prometheus = { workspace = true } opentelemetry = { workspace = true } diff --git a/rust/capture/src/main.rs b/rust/capture/src/main.rs index 12b91941c7f6c..150cf29835291 100644 --- a/rust/capture/src/main.rs +++ b/rust/capture/src/main.rs @@ -16,6 +16,8 @@ use tracing_subscriber::{EnvFilter, Layer}; use capture::config::Config; use capture::server::serve; +common_alloc::used!(); + async fn shutdown() { let mut term = signal::unix::signal(signal::unix::SignalKind::terminate()) .expect("failed to register SIGTERM handler"); diff --git a/rust/common/alloc/Cargo.toml b/rust/common/alloc/Cargo.toml new file mode 100644 index 0000000000000..c000c381d3c1d --- /dev/null +++ b/rust/common/alloc/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "common-alloc" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +[target.'cfg(not(target_env = "msvc"))'.dependencies] +tikv-jemallocator = "0.6" diff --git a/rust/common/alloc/README.md b/rust/common/alloc/README.md new file mode 100644 index 0000000000000..f35e8a6437ba7 --- /dev/null +++ b/rust/common/alloc/README.md @@ -0,0 +1,12 @@ +# What is this? + +We use jemalloc everywhere we can, for any binary that we expect to run in a long-lived process. The reason for this is that our workloads are: +- multi-threaded +- extremely prone to memory fragmentation (due to our heavy use of `serde_json`, or json generally) + +jemalloc helps reduce memory fragmentation hugely, to the point of solving production OOMs that would have made use of capture-rs for replay a non-starter with the default system allocator. + +At time of writing (2024-09-04), rust workspaces don't have good support for specifying dependencies on a per-target basis, so this crate does the work of pulling in jemalloc only when compiling for supported targets, and then exposes a simple macro to use jemalloc as the global allocator. Anyone writing a binary crate should put this macro at the top of their `main.rs`. Libraries should not make use of this crate. + +## Future work +Functions could be added to this crate to, in situations where jemalloc is in use, report a set of metrics about the allocator, as well as other functionality (health/liveness, a way to specify hooks to execute when memory usage exceeds a certain threshold, etc). Right now, it's prety barebones. \ No newline at end of file diff --git a/rust/common/alloc/src/lib.rs b/rust/common/alloc/src/lib.rs new file mode 100644 index 0000000000000..af560a96b3dc0 --- /dev/null +++ b/rust/common/alloc/src/lib.rs @@ -0,0 +1,12 @@ +#[cfg(target_env = "msvc")] +pub use std::alloc::System as DefaultAllocator; +#[cfg(not(target_env = "msvc"))] +pub use tikv_jemallocator::Jemalloc as DefaultAllocator; + +#[macro_export] +macro_rules! used { + () => { + #[global_allocator] + static GLOBAL: $crate::DefaultAllocator = $crate::DefaultAllocator; + }; +} diff --git a/rust/cyclotron-fetch/Cargo.toml b/rust/cyclotron-fetch/Cargo.toml index 69f6f4ac2adf1..8de85020ea106 100644 --- a/rust/cyclotron-fetch/Cargo.toml +++ b/rust/cyclotron-fetch/Cargo.toml @@ -19,6 +19,7 @@ cyclotron-core = { path = "../cyclotron-core" } common-metrics = { path = "../common/metrics" } common-dns = { path = "../common/dns" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } health = { path = "../common/health" } reqwest = { workspace = true } serde = { workspace = true } diff --git a/rust/cyclotron-fetch/src/main.rs b/rust/cyclotron-fetch/src/main.rs index 2013f1b6c7218..ebefa9f01d787 100644 --- a/rust/cyclotron-fetch/src/main.rs +++ b/rust/cyclotron-fetch/src/main.rs @@ -10,6 +10,8 @@ use health::HealthRegistry; use std::{future::ready, sync::Arc}; use tracing::{error, info}; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<(), std::io::Error> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/cyclotron-janitor/Cargo.toml b/rust/cyclotron-janitor/Cargo.toml index d6eb553d3e72f..15a0ae4e412f1 100644 --- a/rust/cyclotron-janitor/Cargo.toml +++ b/rust/cyclotron-janitor/Cargo.toml @@ -19,6 +19,7 @@ cyclotron-core = { path = "../cyclotron-core" } common-metrics = { path = "../common/metrics" } common-kafka = { path = "../common/kafka" } health = { path = "../common/health" } +common-alloc = { path = "../common/alloc" } time = { workspace = true } rdkafka = { workspace = true } diff --git a/rust/cyclotron-janitor/src/main.rs b/rust/cyclotron-janitor/src/main.rs index fa0f682601e61..a4a9274e08f3c 100644 --- a/rust/cyclotron-janitor/src/main.rs +++ b/rust/cyclotron-janitor/src/main.rs @@ -7,8 +7,7 @@ use health::{HealthHandle, HealthRegistry}; use std::{future::ready, time::Duration}; use tracing::{error, info}; -/// Most of this stuff is stolen pretty shamelessly from the rustyhook janitor. It'll diverge more -/// once we introduce the management command stuff, but for now it's a good starting point. +common_alloc::used!(); async fn cleanup_loop(janitor: Janitor, livenes: HealthHandle, interval_secs: u64) -> Result<()> { let mut interval = tokio::time::interval(Duration::from_secs(interval_secs)); diff --git a/rust/cyclotron-janitor/tests/janitor.rs b/rust/cyclotron-janitor/tests/janitor.rs index 7dceae4969c19..90afcfbdec45e 100644 --- a/rust/cyclotron-janitor/tests/janitor.rs +++ b/rust/cyclotron-janitor/tests/janitor.rs @@ -1,14 +1,12 @@ -use chrono::{DateTime, Duration, Timelike, Utc}; +use chrono::{Duration, Timelike, Utc}; use common_kafka::kafka_messages::app_metrics2::{ AppMetric2, Kind as AppMetric2Kind, Source as AppMetric2Source, }; use cyclotron_core::{JobInit, JobState, QueueManager, Worker}; use cyclotron_janitor::{config::JanitorSettings, janitor::Janitor}; use rdkafka::consumer::{Consumer, StreamConsumer}; -use rdkafka::types::{RDKafkaApiKey, RDKafkaRespErr}; use rdkafka::{ClientConfig, Message}; use sqlx::PgPool; -use std::str::FromStr; use uuid::Uuid; use common_kafka::{test::create_mock_kafka, APP_METRICS2_TOPIC}; @@ -58,7 +56,7 @@ async fn janitor_test(db: PgPool) { queue_name: queue_name.clone(), priority: 0, scheduled: now, - function_id: Some(uuid.clone()), + function_id: Some(uuid), vm_state: None, parameters: None, blob: None, diff --git a/rust/feature-flags/Cargo.toml b/rust/feature-flags/Cargo.toml index b43d09cc93d2f..3d898dfdbfa72 100644 --- a/rust/feature-flags/Cargo.toml +++ b/rust/feature-flags/Cargo.toml @@ -31,6 +31,7 @@ regex = "1.10.4" maxminddb = "0.17" sqlx = { workspace = true } uuid = { workspace = true } +common-alloc = { path = "../common/alloc" } [lints] workspace = true diff --git a/rust/feature-flags/src/main.rs b/rust/feature-flags/src/main.rs index 980db6973893f..46cc1be270b27 100644 --- a/rust/feature-flags/src/main.rs +++ b/rust/feature-flags/src/main.rs @@ -7,6 +7,8 @@ use tracing_subscriber::{EnvFilter, Layer}; use feature_flags::config::Config; use feature_flags::server::serve; +common_alloc::used!(); + async fn shutdown() { let mut term = signal::unix::signal(signal::unix::SignalKind::terminate()) .expect("failed to register SIGTERM handler"); diff --git a/rust/hook-api/Cargo.toml b/rust/hook-api/Cargo.toml index 7887e8e49a8e2..87057fa8c74fd 100644 --- a/rust/hook-api/Cargo.toml +++ b/rust/hook-api/Cargo.toml @@ -23,3 +23,4 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } common-metrics = { path = "../common/metrics" } +common-alloc = { path = "../common/alloc" } diff --git a/rust/hook-api/src/main.rs b/rust/hook-api/src/main.rs index 1f84abb4e4665..0491d49eea3be 100644 --- a/rust/hook-api/src/main.rs +++ b/rust/hook-api/src/main.rs @@ -9,6 +9,8 @@ use hook_common::pgqueue::PgQueue; mod config; mod handlers; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<()> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/hook-janitor/Cargo.toml b/rust/hook-janitor/Cargo.toml index dba9bef7e7046..70d6e263296e6 100644 --- a/rust/hook-janitor/Cargo.toml +++ b/rust/hook-janitor/Cargo.toml @@ -26,3 +26,4 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true } common-metrics = { path = "../common/metrics" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } diff --git a/rust/hook-janitor/src/main.rs b/rust/hook-janitor/src/main.rs index 6ca27fa6e6d6f..de8910bcff97b 100644 --- a/rust/hook-janitor/src/main.rs +++ b/rust/hook-janitor/src/main.rs @@ -17,6 +17,8 @@ mod config; mod handlers; mod webhooks; +common_alloc::used!(); + async fn listen(app: Router, bind: String) -> Result<()> { let listener = tokio::net::TcpListener::bind(bind).await?; diff --git a/rust/hook-worker/Cargo.toml b/rust/hook-worker/Cargo.toml index fdc6f150dfec9..4670116572a04 100644 --- a/rust/hook-worker/Cargo.toml +++ b/rust/hook-worker/Cargo.toml @@ -28,6 +28,7 @@ url = { version = "2.2" } common-metrics = { path = "../common/metrics" } common-dns = { path = "../common/dns" } common-kafka = { path = "../common/kafka" } +common-alloc = { path = "../common/alloc" } [dev-dependencies] httpmock = { workspace = true } diff --git a/rust/hook-worker/src/main.rs b/rust/hook-worker/src/main.rs index 0aeae27e0a3a0..7aa0845247a92 100644 --- a/rust/hook-worker/src/main.rs +++ b/rust/hook-worker/src/main.rs @@ -13,6 +13,8 @@ use hook_worker::config::Config; use hook_worker::error::WorkerError; use hook_worker::worker::WebhookWorker; +common_alloc::used!(); + #[tokio::main] async fn main() -> Result<(), WorkerError> { tracing_subscriber::fmt::init(); diff --git a/rust/property-defs-rs/Cargo.toml b/rust/property-defs-rs/Cargo.toml index 6deb3bc0c22f7..f0ec58d5a6fac 100644 --- a/rust/property-defs-rs/Cargo.toml +++ b/rust/property-defs-rs/Cargo.toml @@ -21,6 +21,7 @@ metrics = { workspace = true } chrono = { workspace = true } quick_cache = { workspace = true } common-metrics = { path = "../common/metrics" } +common-alloc = { path = "../common/alloc" } ahash = { workspace = true } uuid = { workspace = true } diff --git a/rust/property-defs-rs/src/main.rs b/rust/property-defs-rs/src/main.rs index 2fa7b94614081..044104b30cb9f 100644 --- a/rust/property-defs-rs/src/main.rs +++ b/rust/property-defs-rs/src/main.rs @@ -31,6 +31,8 @@ use tokio::{ use tracing::{info, warn}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer}; +common_alloc::used!(); + fn setup_tracing() { let log_layer: tracing_subscriber::filter::Filtered< tracing_subscriber::fmt::Layer,