Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
neilkakkar committed Apr 17, 2024
2 parents 0d520b6 + a6899dd commit e42ff97
Show file tree
Hide file tree
Showing 36 changed files with 982 additions and 64 deletions.
3 changes: 1 addition & 2 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,7 @@
"DATABASE_URL": "postgres://posthog:posthog@localhost:5432/posthog",
"SKIP_SERVICE_VERSION_REQUIREMENTS": "1",
"PRINT_SQL": "1",
"BILLING_SERVICE_URL": "https://billing.dev.posthog.dev",
"RECORDINGS_INGESTER_URL": "http://localhost:6738"
"BILLING_SERVICE_URL": "https://billing.dev.posthog.dev"
},
"console": "integratedTerminal",
"python": "${workspaceFolder}/env/bin/python",
Expand Down
1 change: 0 additions & 1 deletion bin/start
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT
export DEBUG=${DEBUG:-1}
export SKIP_SERVICE_VERSION_REQUIREMENTS=1
export BILLING_SERVICE_URL=${BILLING_SERVICE_URL:-https://billing.dev.posthog.dev}
export RECORDINGS_INGESTER_URL=${RECORDINGS_INGESTER_URL:-http://localhost:6738}

service_warning() {
echo -e "\033[0;31m$1 isn't ready. You can run the stack with:\ndocker compose -f docker-compose.dev.yml up\nIf you have already ran that, just make sure that services are starting properly, and sit back.\nWaiting for $1 to start...\033[0m"
Expand Down
1 change: 1 addition & 0 deletions docker-compose.base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ services:
volumes:
- /var/lib/elasticsearch/data
temporal:
restart: on-failure
environment:
- DB=postgresql
- DB_PORT=5432
Expand Down
1 change: 0 additions & 1 deletion docker-compose.hobby.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ services:
SENTRY_DSN: $SENTRY_DSN
SITE_URL: https://$DOMAIN
SECRET_KEY: $POSTHOG_SECRET
RECORDINGS_INGESTER_URL: http://plugins:6738
depends_on:
- db
- redis
Expand Down
89 changes: 89 additions & 0 deletions ee/clickhouse/views/experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
ClickhouseTrendExperimentResult,
)
from ee.clickhouse.queries.experiments.utils import requires_flag_warning
from posthog.api.cohort import CohortSerializer
from posthog.api.feature_flag import FeatureFlagSerializer, MinimalFeatureFlagSerializer
from posthog.api.routing import TeamAndOrgViewSetMixin
from posthog.api.shared import UserBasicSerializer
Expand Down Expand Up @@ -150,6 +151,7 @@ class Meta:
"end_date",
"feature_flag_key",
"feature_flag",
"exposure_cohort",
"parameters",
"secondary_metrics",
"filters",
Expand All @@ -164,6 +166,7 @@ class Meta:
"created_at",
"updated_at",
"feature_flag",
"exposure_cohort",
]

def validate_parameters(self, value):
Expand Down Expand Up @@ -353,3 +356,89 @@ def requires_flag_implementation(self, request: Request, *args: Any, **kwargs: A
warning = requires_flag_warning(filter, self.team)

return Response({"result": warning})

@action(methods=["POST"], detail=True)
def create_exposure_cohort_for_experiment(self, request: Request, *args: Any, **kwargs: Any) -> Response:
experiment = self.get_object()
flag = getattr(experiment, "feature_flag", None)
if not flag:
raise ValidationError("Experiment does not have a feature flag")

if not experiment.start_date:
raise ValidationError("Experiment does not have a start date")

if experiment.exposure_cohort:
raise ValidationError("Experiment already has an exposure cohort")

exposure_filter_data = (experiment.parameters or {}).get("custom_exposure_filter")
exposure_filter = None
if exposure_filter_data:
exposure_filter = Filter(data={**exposure_filter_data, "is_simplified": True}, team=experiment.team)

target_entity: int | str = "$feature_flag_called"
target_entity_type = "events"
target_filters = [
{
"key": "$feature_flag",
"value": [flag.key],
"operator": "exact",
"type": "event",
}
]

if exposure_filter:
entity = exposure_filter.entities[0]
if entity.id:
target_entity_type = entity.type if entity.type in ["events", "actions"] else "events"
target_entity = entity.id
if entity.type == "actions":
try:
target_entity = int(target_entity)
except ValueError:
raise ValidationError("Invalid action ID")

target_filters = [
prop.to_dict()
for prop in entity.property_groups.flat
if prop.type in ("event", "feature", "element", "hogql")
]

cohort_serializer = CohortSerializer(
data={
"is_static": False,
"name": f'Users exposed to experiment "{experiment.name}"',
"is_calculating": True,
"filters": {
"properties": {
"type": "OR",
"values": [
{
"type": "OR",
"values": [
{
"type": "behavioral",
"value": "performed_event",
"key": target_entity,
"negation": False,
"event_type": target_entity_type,
"event_filters": target_filters,
"explicit_datetime": experiment.start_date.isoformat(),
}
],
}
],
}
},
},
context={
"request": request,
"team": self.team,
"team_id": self.team_id,
},
)

cohort_serializer.is_valid(raise_exception=True)
cohort = cohort_serializer.save()
experiment.exposure_cohort = cohort
experiment.save(update_fields=["exposure_cohort"])
return Response({"cohort": cohort_serializer.data}, status=201)
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# serializer version: 1
# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results
'''
/* user_id:101 celery:posthog.tasks.tasks.sync_insight_caching_state */
/* user_id:106 celery:posthog.tasks.tasks.sync_insight_caching_state */
SELECT team_id,
date_diff('second', max(timestamp), now()) AS age
FROM events
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1913,3 +1913,106 @@
ORDER BY breakdown_value
'''
# ---
# name: TestExperimentAuxiliaryEndpoints.test_create_exposure_cohort_for_experiment_with_custom_action_filters_exposure
'''
/* cohort_calculation: */
INSERT INTO cohortpeople
SELECT id,
2 as cohort_id,
2 as team_id,
1 AS sign,
1 AS version
FROM
(SELECT id
FROM person
WHERE team_id = 2
AND id IN
(SELECT id
FROM person
WHERE team_id = 2
AND ((has(['http://example.com'], replaceRegexpAll(JSONExtractRaw(properties, '$pageview'), '^"|"$', '')))) )
GROUP BY id
HAVING max(is_deleted) = 0
AND ((has(['http://example.com'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$pageview'), '^"|"$', '')))) SETTINGS optimize_aggregation_in_order = 1) as person
UNION ALL
SELECT person_id,
cohort_id,
team_id,
-1,
version
FROM cohortpeople
WHERE team_id = 2
AND cohort_id = 2
AND version < 1
AND sign = 1
'''
# ---
# name: TestExperimentAuxiliaryEndpoints.test_create_exposure_cohort_for_experiment_with_custom_action_filters_exposure.1
'''
/* user_id:115 cohort_calculation:posthog.tasks.calculate_cohort.calculate_cohort_ch */
INSERT INTO cohortpeople
SELECT id,
2 as cohort_id,
2 as team_id,
1 AS sign,
1 AS version
FROM
(SELECT behavior_query.person_id AS id
FROM
(SELECT pdi.person_id AS person_id,
countIf(timestamp > '2024-01-01 10:23:00'
AND timestamp < now()
AND ((event = 'insight viewed'
AND (has(['RETENTION'], replaceRegexpAll(JSONExtractRaw(properties, 'insight'), '^"|"$', ''))
AND distinct_id IN
(SELECT distinct_id
FROM
(SELECT distinct_id, argMax(person_id, version) as person_id
FROM person_distinct_id2
WHERE team_id = 2
GROUP BY distinct_id
HAVING argMax(is_deleted, version) = 0)
WHERE person_id IN
(SELECT id
FROM person
WHERE team_id = 2
AND id IN
(SELECT id
FROM person
WHERE team_id = 2
AND ((has(['http://example.com'], replaceRegexpAll(JSONExtractRaw(properties, '$pageview'), '^"|"$', '')))) )
GROUP BY id
HAVING max(is_deleted) = 0
AND ((has(['http://example.com'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$pageview'), '^"|"$', '')))) SETTINGS optimize_aggregation_in_order = 1) )))
OR (event = 'insight viewed'
AND (toFloat64OrNull(replaceRegexpAll(replaceRegexpAll(replaceRegexpAll(JSONExtractRaw(properties, 'filters_count'), '^"|"$', ''), ' ', ''), '^"|"$', '')) > '1'))
OR (match(replaceRegexpAll(JSONExtractRaw(properties, '$current_url'), '^"|"$', ''), '/123')
AND event = '$autocapture'))
AND (has(['bonk'], replaceRegexpAll(JSONExtractRaw(properties, 'bonk'), '^"|"$', ''))
AND ifNull(in(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$current_url'), ''), 'null'), '^"|"$', ''), tuple('x', 'y')), 0))) > 0 AS performed_event_condition_22_level_level_0_level_0_0
FROM events e
INNER JOIN
(SELECT distinct_id,
argMax(person_id, version) as person_id
FROM person_distinct_id2
WHERE team_id = 2
GROUP BY distinct_id
HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
WHERE team_id = 2
AND event IN ['insight viewed', 'insight viewed', '$autocapture']
GROUP BY person_id) behavior_query
WHERE 1 = 1
AND (((performed_event_condition_22_level_level_0_level_0_0))) ) as person
UNION ALL
SELECT person_id,
cohort_id,
team_id,
-1,
version
FROM cohortpeople
WHERE team_id = 2
AND cohort_id = 2
AND version < 1
AND sign = 1
'''
# ---
Loading

0 comments on commit e42ff97

Please sign in to comment.