Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into 17839-fix-update-…
Browse files Browse the repository at this point in the history
…and-run
  • Loading branch information
nikitaevg committed Jun 19, 2024
2 parents c98790c + 5d761a1 commit 8a77e1c
Show file tree
Hide file tree
Showing 86 changed files with 3,428 additions and 1,333 deletions.
16 changes: 0 additions & 16 deletions .github/workflows/build-and-deploy-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,6 @@ on:
- 'livestream/**'

jobs:
slack:
name: Notify Slack of start of deploy
runs-on: ubuntu-20.04
if: github.repository == 'posthog/posthog'
steps:
- name: Notify Platform team on slack
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: platform-bots
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_ICON: https://github.com/posthog.png?size=48
SLACK_MESSAGE: 'Production Cloud Deploy Beginning :rocket: - ${{ github.event.head_commit.message }}'
SLACK_TITLE: Message
SLACK_USERNAME: Max Hedgehog
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}

sentry:
name: Notify Sentry of a production release
runs-on: ubuntu-20.04
Expand Down
54 changes: 27 additions & 27 deletions .github/workflows/livestream-docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,31 +58,31 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

# deploy:
# runs-on: ubuntu-latest
# needs: build
# steps:
# - name: get deployer token
# id: deployer
# uses: getsentry/action-github-app-token@v3
# with:
# app_id: ${{ secrets.DEPLOYER_APP_ID }}
# private_key: ${{ secrets.DEPLOYER_APP_PRIVATE_KEY }}
deploy:
runs-on: ubuntu-latest
needs: build
steps:
- name: get deployer token
id: deployer
uses: getsentry/action-github-app-token@v3
with:
app_id: ${{ secrets.DEPLOYER_APP_ID }}
private_key: ${{ secrets.DEPLOYER_APP_PRIVATE_KEY }}

# - name: Trigger livestream deployment
# uses: peter-evans/repository-dispatch@v3
# with:
# token: ${{ steps.deployer.outputs.token }}
# repository: PostHog/charts
# event-type: commit_state_update
# client-payload: |
# {
# "values": {
# "image": {
# "sha": "${{ needs.build.outputs.sha }}"
# }
# },
# "release": "livestream",
# "commit": ${{ toJson(github.event.head_commit) }},
# "repository": ${{ toJson(github.repository) }}
# }
- name: Trigger livestream deployment
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ steps.deployer.outputs.token }}
repository: PostHog/charts
event-type: commit_state_update
client-payload: |
{
"values": {
"image": {
"sha": "${{ needs.build.outputs.sha }}"
}
},
"release": "livestream",
"commit": ${{ toJson(github.event.head_commit) }},
"repository": ${{ toJson(github.repository) }}
}

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from itertools import product
from unittest import mock
from uuid import uuid4

from dateutil.relativedelta import relativedelta
Expand All @@ -9,6 +8,10 @@

from ee.clickhouse.materialized_columns.columns import materialize
from posthog.clickhouse.client import sync_execute
from posthog.hogql.ast import CompareOperation, And, SelectQuery
from posthog.hogql.base import Expr
from posthog.hogql.context import HogQLContext
from posthog.hogql.printer import print_ast
from posthog.models import Person
from posthog.models.filters import SessionRecordingsFilter
from posthog.schema import PersonsOnEventsMode
Expand All @@ -26,9 +29,16 @@
)


# The HogQL pair of TestClickhouseSessionRecordingsListFromSessionReplay can be renamed when delete the old one
@freeze_time("2021-01-01T13:46:23")
class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest):
__test__ = False
class TestClickhouseSessionRecordingsListFromFilters(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest):
def _print_query(self, query: SelectQuery) -> str:
return print_ast(
query,
HogQLContext(team_id=self.team.pk, enable_select_queries=True),
"clickhouse",
pretty=True,
)

def tearDown(self) -> None:
sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL())
Expand Down Expand Up @@ -65,30 +75,22 @@ def create_event(
False,
False,
PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
"person_uuid": None,
"vperson_filter_pre__0": ["false"],
"vpersonquery_person_filter_fin__0": ["false"],
},
True,
False,
],
[
"test_poe_being_unavailable_we_fall_back_to_person_subquery",
False,
False,
False,
PersonsOnEventsMode.DISABLED,
{
"kperson_filter_pre__0": "rgInternal",
"kpersonquery_person_filter_fin__0": "rgInternal",
"person_uuid": None,
"vperson_filter_pre__0": ["false"],
"vpersonquery_person_filter_fin__0": ["false"],
},
True,
],
[
"test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props",
False,
False,
False,
PersonsOnEventsMode.DISABLED,
False,
],
[
Expand All @@ -97,31 +99,15 @@ def create_event(
True,
False,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_names": [],
"event_start_time": mock.ANY,
"event_end_time": mock.ANY,
"kglobal_0": "rgInternal",
"vglobal_0": ["false"],
},
False,
True,
],
[
"test_poe_v2_available_person_properties_are_used_in_replay_listing",
False,
True,
True,
PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS,
{
"event_end_time": mock.ANY,
"event_names": [],
"event_start_time": mock.ANY,
"kglobal_0": "rgInternal",
"vglobal_0": ["false"],
},
False,
True,
],
]
)
Expand All @@ -132,9 +118,7 @@ def test_effect_of_poe_settings_on_query_generated(
poe_v2: bool,
allow_denormalized_props: bool,
expected_poe_mode: PersonsOnEventsMode,
expected_query_params: dict,
unmaterialized_person_column_used: bool,
materialized_event_column_used: bool,
) -> None:
with self.settings(
PERSON_ON_EVENTS_OVERRIDE=poe_v1,
Expand All @@ -160,30 +144,58 @@ def test_effect_of_poe_settings_on_query_generated(
session_recording_list_instance = SessionRecordingListFromFilters(
filter=filter, team=self.team, hogql_query_modifiers=None
)
[generated_query, query_params] = session_recording_list_instance.get_query()
assert query_params == {
"clamped_to_storage_ttl": mock.ANY,
"end_time": mock.ANY,
"limit": 51,
"offset": 0,
"start_time": mock.ANY,
"team_id": self.team.id,
**expected_query_params,
}

json_extract_fragment = (
"has(%(vperson_filter_pre__0)s, replaceRegexpAll(JSONExtractRaw(properties, %(kperson_filter_pre__0)s)"
)
materialized_column_fragment = 'AND ( has(%(vglobal_0)s, "mat_pp_rgInternal"))'

# it will always have one of these fragments
assert (json_extract_fragment in generated_query) or (materialized_column_fragment in generated_query)
hogql_parsed_select = session_recording_list_instance.get_query()
printed_query = self._print_query(hogql_parsed_select)

person_filtering_expr = self._matching_person_filter_expr_from(hogql_parsed_select)

if poe_v1 or poe_v2:
# when poe is off we will join to events, so we can get person properties directly off them
self._assert_is_events_person_filter(person_filtering_expr)

assert "ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null')" in printed_query
else:
# when poe is off we join to person_distinct_ids, so we can get persons, so we can query their properties
self._assert_is_pdi_filter(person_filtering_expr)

if unmaterialized_person_column_used:
assert (
"argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_6)s), ''), 'null'), '^\"|\"$', ''), person.version) AS properties___rgInternal"
in printed_query
)
else:
# we should use materialized column
# assert (
# "argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_6)s), ''), 'null'), '^\"|\"$', ''), person.version) AS properties___rgInternal"
# not in printed_query
# )
# TODO frustratingly this doesn't pass - but since we're migrating to PoE maybe we can ignore it
pass

self.assertQueryMatchesSnapshot(printed_query)

def _assert_is_pdi_filter(self, person_filtering_expr: list[Expr]) -> None:
assert person_filtering_expr[0].right.select_from.table.chain == ["person_distinct_ids"]
assert person_filtering_expr[0].right.where.left.chain == ["person", "properties", "rgInternal"]

def _assert_is_events_person_filter(self, person_filtering_expr: list[Expr]) -> None:
assert person_filtering_expr[0].right.select_from.table.chain == ["events"]
event_person_condition = [
x
for x in person_filtering_expr[0].right.where.exprs
if isinstance(x, CompareOperation) and x.left.chain == ["person", "properties", "rgInternal"]
]
assert len(event_person_condition) == 1

# the unmaterialized person column
assert (json_extract_fragment in generated_query) is unmaterialized_person_column_used
# materialized event column
assert (materialized_column_fragment in generated_query) is materialized_event_column_used
self.assertQueryMatchesSnapshot(generated_query)
def _matching_person_filter_expr_from(self, hogql_parsed_select: SelectQuery) -> list[Expr]:
where_conditions: list[Expr] = hogql_parsed_select.where.exprs
ands = [x for x in where_conditions if isinstance(x, And)]
assert len(ands) == 1
and_comparisons = [x for x in ands[0].exprs if isinstance(x, CompareOperation)]
assert len(and_comparisons) == 1
assert isinstance(and_comparisons[0].right, SelectQuery)
return and_comparisons

settings_combinations = [
["poe v2 and materialized columns allowed", False, True, True],
Expand Down Expand Up @@ -262,7 +274,7 @@ def test_event_filter_with_person_properties_materialized(
session_recording_list_instance = SessionRecordingListFromFilters(
filter=match_everyone_filter, team=self.team, hogql_query_modifiers=None
)
(session_recordings, _) = session_recording_list_instance.run()
(session_recordings, _, _) = session_recording_list_instance.run()

assert sorted([x["session_id"] for x in session_recordings]) == sorted([session_id_one, session_id_two])

Expand All @@ -283,19 +295,19 @@ def test_event_filter_with_person_properties_materialized(
session_recording_list_instance = SessionRecordingListFromFilters(
filter=match_bla_filter, team=self.team, hogql_query_modifiers=None
)
(session_recordings, _) = session_recording_list_instance.run()
(session_recordings, _, _) = session_recording_list_instance.run()

assert len(session_recordings) == 1
assert session_recordings[0]["session_id"] == session_id_one

def _add_replay_with_pageview(self, session_id: str, user_one):
def _add_replay_with_pageview(self, session_id: str, user: str) -> None:
self.create_event(
user_one,
user,
self.base_time,
properties={"$session_id": session_id, "$window_id": str(uuid4())},
)
produce_replay_summary(
distinct_id=user_one,
distinct_id=user,
session_id=session_id,
first_timestamp=self.base_time,
team_id=self.team.id,
Expand Down Expand Up @@ -349,5 +361,5 @@ def test_person_id_filter(
session_recording_list_instance = SessionRecordingListFromFilters(
filter=filter, team=self.team, hogql_query_modifiers=None
)
(session_recordings, _) = session_recording_list_instance.run()
(session_recordings, _, _) = session_recording_list_instance.run()
assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one])
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
16 changes: 7 additions & 9 deletions frontend/src/layout/navigation-3000/navigationLogic.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -436,15 +436,13 @@ export const navigation3000Logic = kea<navigation3000LogicType>([
identifier: Scene.Insight,
},
},
featureFlags[FEATURE_FLAGS.WEB_ANALYTICS]
? {
identifier: Scene.WebAnalytics,
label: 'Web analytics',
icon: <IconPieChart />,
to: isUsingSidebar ? undefined : urls.webAnalytics(),
tag: 'beta' as const,
}
: null,
{
identifier: Scene.WebAnalytics,
label: 'Web analytics',
icon: <IconPieChart />,
to: isUsingSidebar ? undefined : urls.webAnalytics(),
tag: 'beta' as const,
},
{
identifier: Scene.Replay,
label: 'Session replay',
Expand Down
18 changes: 7 additions & 11 deletions frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -560,17 +560,13 @@ export const commandPaletteLogic = kea<commandPaletteLogicType>([
push(urls.cohorts())
},
},
...(values.featureFlags[FEATURE_FLAGS.WEB_ANALYTICS]
? [
{
icon: IconPieChart,
display: 'Go to Web analytics',
executor: () => {
push(urls.webAnalytics())
},
},
]
: []),
{
icon: IconPieChart,
display: 'Go to Web analytics',
executor: () => {
push(urls.webAnalytics())
},
},
...(values.featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE]
? [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,9 @@ export function OperatorValueSelect({
if (tentativeValidationError) {
setValidationError(tentativeValidationError)
return
} else {
setValidationError(null)
}
setValidationError(null)

setCurrentOperator(newOperator)
if (isOperatorFlag(newOperator)) {
onChange(newOperator, newOperator)
Expand Down Expand Up @@ -151,9 +151,9 @@ export function OperatorValueSelect({
if (tentativeValidationError) {
setValidationError(tentativeValidationError)
return
} else {
setValidationError(null)
}
setValidationError(null)

onChange(currentOperator || PropertyOperator.Exact, newValue)
}}
// open automatically only if new filter
Expand Down
2 changes: 2 additions & 0 deletions frontend/src/lib/components/PropertyFilters/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,8 @@ export function propertyFilterTypeToPropertyDefinitionType(
? PropertyDefinitionType.Group
: filterType === PropertyFilterType.Session
? PropertyDefinitionType.Session
: filterType === PropertyFilterType.Recording
? PropertyDefinitionType.Session
: PropertyDefinitionType.Event
}

Expand Down
Loading

0 comments on commit 8a77e1c

Please sign in to comment.