Skip to content

Commit

Permalink
Merge branch 'master' into feature-management-ui-setup
Browse files Browse the repository at this point in the history
  • Loading branch information
havenbarnes authored Dec 5, 2024
2 parents 7bc7837 + f04d14c commit a21c9fb
Show file tree
Hide file tree
Showing 5 changed files with 167 additions and 11 deletions.
61 changes: 52 additions & 9 deletions ee/clickhouse/materialized_columns/columns.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import logging
import re
from collections.abc import Callable, Iterator
from copy import copy
Expand All @@ -22,6 +23,9 @@
from posthog.models.utils import generate_random_short_suffix
from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_PER_TEAM_SETTINGS, TEST


logger = logging.getLogger(__name__)

T = TypeVar("T")

DEFAULT_TABLE_COLUMN: Literal["properties"] = "properties"
Expand Down Expand Up @@ -161,6 +165,10 @@ def map_data_nodes(self, cluster: ClickhouseCluster, fn: Callable[[Client], T])
}


def get_minmax_index_name(column: str) -> str:
return f"minmax_{column}"


@dataclass
class CreateColumnOnDataNodesTask:
table: str
Expand All @@ -182,7 +190,7 @@ def execute(self, client: Client) -> None:
parameters["comment"] = self.column.details.as_column_comment()

if self.create_minmax_index:
index_name = f"minmax_{self.column.name}"
index_name = get_minmax_index_name(self.column.name)
actions.append(f"ADD INDEX IF NOT EXISTS {index_name} {self.column.name} TYPE minmax GRANULARITY 1")

client.execute(
Expand Down Expand Up @@ -289,26 +297,61 @@ def update_column_is_disabled(table: TablesWithMaterializedColumns, column_name:
).result()


def check_index_exists(client: Client, table: str, index: str) -> bool:
[(count,)] = client.execute(
"""
SELECT count()
FROM system.data_skipping_indices
WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s
""",
{"table": table, "name": index},
)
assert 1 >= count >= 0
return bool(count)


def check_column_exists(client: Client, table: str, column: str) -> bool:
[(count,)] = client.execute(
"""
SELECT count()
FROM system.columns
WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s
""",
{"table": table, "name": column},
)
assert 1 >= count >= 0
return bool(count)


@dataclass
class DropColumnTask:
table: str
column_name: str
try_drop_index: bool

def execute(self, client: Client) -> None:
# XXX: copy/pasted from create task
actions = []

if self.try_drop_index:
index_name = f"minmax_{self.column_name}"
index_name = get_minmax_index_name(self.column_name)
drop_index_action = f"DROP INDEX IF EXISTS {index_name}"
if check_index_exists(client, self.table, index_name):
actions.append(drop_index_action)
else:
logger.info("Skipping %r, nothing to do...", drop_index_action)

drop_column_action = f"DROP COLUMN IF EXISTS {self.column_name}"
if check_column_exists(client, self.table, self.column_name):
actions.append(drop_column_action)
else:
logger.info("Skipping %r, nothing to do...", drop_column_action)

if actions:
client.execute(
f"ALTER TABLE {self.table} DROP INDEX IF EXISTS {index_name}",
f"ALTER TABLE {self.table} " + ", ".join(actions),
settings={"alter_sync": 2 if TEST else 1},
)

client.execute(
f"ALTER TABLE {self.table} DROP COLUMN IF EXISTS {self.column_name}",
settings={"alter_sync": 2 if TEST else 1},
)


def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None:
cluster = get_cluster()
Expand Down
68 changes: 68 additions & 0 deletions ee/clickhouse/materialized_columns/test/test_columns.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from datetime import timedelta
from time import sleep
from collections.abc import Iterable
from unittest import TestCase
from unittest.mock import patch

Expand Down Expand Up @@ -336,3 +337,70 @@ def test_lifecycle(self):
assert key not in get_materialized_columns(table, exclude_disabled_columns=True)
with self.assertRaises(ValueError):
MaterializedColumn.get(table, destination_column)

def _get_latest_mutation_id(self, table: str) -> str:
[(mutation_id,)] = sync_execute(
"""
SELECT max(mutation_id)
FROM system.mutations
WHERE
database = currentDatabase()
AND table = %(table)s
""",
{"table": table},
)
return mutation_id

def _get_mutations_since_id(self, table: str, id: str) -> Iterable[str]:
return [
command
for (command,) in sync_execute(
"""
SELECT command
FROM system.mutations
WHERE
database = currentDatabase()
AND table = %(table)s
AND mutation_id > %(mutation_id)s
ORDER BY mutation_id
""",
{"table": table, "mutation_id": id},
)
]

def test_drop_optimized_no_index(self):
table: TablesWithMaterializedColumns = (
"person" # little bit easier than events because no shard awareness needed
)
property: PropertyName = "myprop"
source_column: TableColumn = "properties"

destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False)
assert destination_column is not None

latest_mutation_id_before_drop = self._get_latest_mutation_id(table)

drop_column(table, destination_column)

mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop)
assert not any("DROP INDEX" in mutation for mutation in mutations_ran)

def test_drop_optimized_no_column(self):
table: TablesWithMaterializedColumns = (
"person" # little bit easier than events because no shard awareness needed
)
property: PropertyName = "myprop"
source_column: TableColumn = "properties"

# create the materialized column
destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False)
assert destination_column is not None

sync_execute(f"ALTER TABLE {table} DROP COLUMN {destination_column}", settings={"alter_sync": 1})

latest_mutation_id_before_drop = self._get_latest_mutation_id(table)

drop_column(table, destination_column)

mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop)
assert not any("DROP COLUMN" in mutation for mutation in mutations_ran)
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,13 @@ export function SummaryTable(): JSX.Element {
],
},
]
if (experiment.filters.insight === InsightType.FUNNELS) {
if (experiment.filters?.events?.[0]) {
filters.push(experiment.filters.events[0])
} else if (experiment.filters?.actions?.[0]) {
filters.push(experiment.filters.actions[0])
}
}
const filterGroup: Partial<RecordingUniversalFilters> = {
filter_group: {
type: FilterLogicalOperator.And,
Expand Down
25 changes: 24 additions & 1 deletion posthog/api/organization.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,16 @@
from django.shortcuts import get_object_or_404
from rest_framework import exceptions, permissions, serializers, viewsets
from rest_framework.request import Request
from rest_framework.response import Response
import posthoganalytics

from posthog import settings
from posthog.api.routing import TeamAndOrgViewSetMixin
from posthog.api.shared import ProjectBasicSerializer, TeamBasicSerializer
from posthog.auth import PersonalAPIKeyAuthentication
from posthog.cloud_utils import is_cloud
from posthog.constants import INTERNAL_BOT_EMAIL_SUFFIX, AvailableFeature
from posthog.event_usage import report_organization_deleted
from posthog.event_usage import report_organization_deleted, groups
from posthog.models import Organization, User
from posthog.models.async_deletion import AsyncDeletion, DeletionType
from posthog.rbac.user_access_control import UserAccessControlSerializerMixin
Expand Down Expand Up @@ -240,3 +242,24 @@ def get_serializer_context(self) -> dict[str, Any]:
**super().get_serializer_context(),
"user_permissions": UserPermissions(cast(User, self.request.user)),
}

def update(self, request: Request, *args: Any, **kwargs: Any) -> Response:
if "enforce_2fa" in request.data:
enforce_2fa_value = request.data["enforce_2fa"]
organization = self.get_object()
user = cast(User, request.user)

# Add capture event for 2FA enforcement change
posthoganalytics.capture(
str(user.distinct_id),
"organization 2fa enforcement toggled",
properties={
"enabled": enforce_2fa_value,
"organization_id": str(organization.id),
"organization_name": organization.name,
"user_role": user.organization_memberships.get(organization=organization).level,
},
groups=groups(organization),
)

return super().update(request, *args, **kwargs)
17 changes: 16 additions & 1 deletion posthog/api/test/test_organization.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from rest_framework import status
from unittest.mock import patch, ANY

from posthog.models import Organization, OrganizationMembership, Team
from posthog.models.personal_api_key import PersonalAPIKey, hash_key_value
Expand Down Expand Up @@ -128,7 +129,8 @@ def test_cant_update_plugins_access_level(self):
self.organization.refresh_from_db()
self.assertEqual(self.organization.plugins_access_level, 3)

def test_enforce_2fa_for_everyone(self):
@patch("posthoganalytics.capture")
def test_enforce_2fa_for_everyone(self, mock_capture):
# Only admins should be able to enforce 2fa
response = self.client.patch(f"/api/organizations/{self.organization.id}/", {"enforce_2fa": True})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
Expand All @@ -142,6 +144,19 @@ def test_enforce_2fa_for_everyone(self):
self.organization.refresh_from_db()
self.assertEqual(self.organization.enforce_2fa, True)

# Verify the capture event was called correctly
mock_capture.assert_any_call(
self.user.distinct_id,
"organization 2fa enforcement toggled",
properties={
"enabled": True,
"organization_id": str(self.organization.id),
"organization_name": self.organization.name,
"user_role": OrganizationMembership.Level.ADMIN,
},
groups={"instance": ANY, "organization": str(self.organization.id)},
)

def test_projects_outside_personal_api_key_scoped_organizations_not_listed(self):
other_org, _, _ = Organization.objects.bootstrap(self.user)
personal_api_key = generate_random_token_personal()
Expand Down

0 comments on commit a21c9fb

Please sign in to comment.