Skip to content

Commit

Permalink
Merge branch 'master' into feature/celery-queue-configure
Browse files Browse the repository at this point in the history
# Conflicts:
#	requirements-dev.txt
#	requirements.in
#	requirements.txt
  • Loading branch information
webjunkie committed Jan 16, 2024
2 parents 7473fb1 + 9700a2b commit 40b985c
Show file tree
Hide file tree
Showing 904 changed files with 19,691 additions and 19,305 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
!package.json
!pnpm-lock.yaml
!webpack.config.js
!postcss.config.js
!playwright.config.ts
!tailwind.config.js
!.kearc
!.storybook
!tsconfig.json
Expand Down
5 changes: 5 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@ module.exports = {
'import',
],
rules: {
// PyCharm always adds curly braces, I guess vscode doesn't, PR reviewers often complain they are present on props that don't need them
// let's save the humans time and let the machines do the work
// "never" means if the prop does not need the curly braces, they will be removed/errored
// see https://github.com/jsx-eslint/eslint-plugin-react/blob/master/docs/rules/jsx-curly-brace-presence.md
"react/jsx-curly-brace-presence": ['error', { "props": "never", "children": "never", "propElementValues": "always" }],
'no-console': ['error', { allow: ['warn', 'error'] }],
'no-debugger': 'error',
'no-only-tests/no-only-tests': 'error',
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-hogql-parser.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
curl -s -u posthog-bot:${{ secrets.POSTHOG_BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} -X POST -d "{ \"body\": \"$message_body\" }" "https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments"
fi
fi
echo "::set-output name=parser-release-needed::$parser_release_needed"
echo "parser-release-needed=$parser_release_needed" >> $GITHUB_OUTPUT
build-wheels:
name: Build wheels on ${{ matrix.os }}
Expand Down
6 changes: 4 additions & 2 deletions .github/workflows/ci-backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,10 @@ jobs:
# the dependencies more clear if we separated the backend/frontend
# code completely
# really we should ignore ee/frontend/** but dorny doesn't support that
# - '!ee/frontend/**'
# including the negated rule appears to work
# but makes it always match because the checked file always isn't `ee/frontend/**` 🙈
- 'ee/**/*'
- '!ee/frontend/**'
- 'posthog/**/*'
- 'bin/*.py'
- requirements.txt
Expand Down Expand Up @@ -135,7 +137,7 @@ jobs:
- name: Check static typing
run: |
mypy -p posthog --exclude bin/migrate_kafka_data.py --exclude posthog/hogql/grammar/HogQLParser.py --exclude gunicorn.config.py --enable-recursive-aliases
mypy -p posthog | mypy-baseline filter
- name: Check if "schema.py" is up to date
run: |
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/ci-frontend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ jobs:
- tsconfig.json
- tsconfig.*.json
- webpack.config.js
- postcss.config.js
- stylelint*
frontend-code-quality:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/codespaces.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
# for more details
- name: Docker image metadata
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ steps.lowercase.outputs.repository }}/codespaces
tags: |
Expand All @@ -54,7 +54,7 @@ jobs:
# This creates a scope similar to the github cache action scoping
- name: Docker cache-from/cache-to metadata
id: meta-for-cache
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ steps.lowercase.outputs.repository }}/codespaces
tags: |
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/container-images-cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ jobs:
- name: Check for changes in plugins directory
id: check_changes_plugins
run: |
echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep '^plugin-server/' || true)"
echo "changed=$(git diff --name-only HEAD^ HEAD | grep '^plugin-server/' || true)" >> $GITHUB_OUTPUT
- name: Trigger Ingestion Cloud deployment
if: steps.check_changes_plugins.outputs.changed != ''
Expand All @@ -116,7 +116,7 @@ jobs:
- name: Check for changes that affect batch exports temporal worker
id: check_changes_batch_exports_temporal_worker
run: |
echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$' || true)"
echo "changed=$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$' || true)" >> $GITHUB_OUTPUT
- name: Trigger Batch Exports Temporal Worker Cloud deployment
if: steps.check_changes_batch_exports_temporal_worker.outputs.changed != ''
Expand All @@ -135,7 +135,7 @@ jobs:
- name: Check for changes that affect data warehouse temporal worker
id: check_changes_data_warehouse_temporal_worker
run: |
echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$' || true)"
echo "changed=$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$' || true)" >> $GITHUB_OUTPUT
- name: Trigger Data Warehouse Temporal Worker Cloud deployment
if: steps.check_changes_data_warehouse_temporal_worker.outputs.changed != ''
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/customer-data-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
run: |
echo "repository=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT"
- uses: docker/metadata-action@v4
- uses: docker/metadata-action@v5
id: meta
with:
images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp
Expand All @@ -49,7 +49,7 @@ jobs:
# ${{ github.repository }} directly because the repository
# organization name is has upper case characters, which are not
# allowed in docker image names.
- uses: docker/metadata-action@v4
- uses: docker/metadata-action@v5
id: meta-cache
with:
images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp
Expand Down Expand Up @@ -140,7 +140,7 @@ jobs:
- name: Generate docker latest tag
if: github.ref == 'refs/heads/master'
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
id: meta
with:
images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp
Expand Down
10 changes: 1 addition & 9 deletions .github/workflows/storybook-chromatic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -146,15 +146,7 @@ jobs:
VARIANT: ${{ github.event.pull_request.head.repo.full_name == github.repository && 'update' || 'verify' }}
STORYBOOK_SKIP_TAGS: 'test-skip,test-skip-${{ matrix.browser }}'
run: |
pnpm test:visual-regression:stories:ci:$VARIANT --browsers ${{ matrix.browser }} --shard ${{ matrix.shard }}/$SHARD_COUNT
- name: Run @playwright/test (legacy, Chromium-only)
if: matrix.browser == 'chromium' && matrix.shard == 1
env:
# Update snapshots for PRs on the main repo, verify on forks, which don't have access to PostHog Bot
VARIANT: ${{ github.event.pull_request.head.repo.full_name == github.repository && 'update' || 'verify' }}
run: |
pnpm test:visual-regression:legacy:ci:$VARIANT
pnpm test:visual-regression:ci:$VARIANT --browsers ${{ matrix.browser }} --shard ${{ matrix.shard }}/$SHARD_COUNT
- name: Archive failure screenshots
if: ${{ failure() }}
Expand Down
6 changes: 6 additions & 0 deletions .stylelintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ module.exports = {
message: 'Expected variable to match kebab-case or snake_case',
},
],
'scss/at-rule-no-unknown': [
true,
{
'ignoreAtRules': ['tailwind']
}
],
'scss/operator-no-newline-after': null, // Doesn't always play well with prettier
'scss/at-extend-no-missing-placeholder': null,
'scss/comment-no-empty': null,
Expand Down
28 changes: 28 additions & 0 deletions bin/build-schema.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/usr/bin/env node

// replaces ts-json-schema-generator -f tsconfig.json --path 'frontend/src/queries/schema.ts' --no-type-check > frontend/src/queries/schema.json

import fs from "fs";
import stableStringify from "safe-stable-stringify";
import tsj from "ts-json-schema-generator";

/** @type {import('ts-json-schema-generator/dist/src/Config').Config} */
const config = {
...tsj.DEFAULT_CONFIG,
path: "frontend/src/queries/schema.ts",
tsconfig: "tsconfig.json",
discriminatorType: "open-api",
skipTypeCheck: true,
};

const output_path = "frontend/src/queries/schema.json";

const schema = tsj.createGenerator(config).createSchema(config.type);
const stringify = config.sortProps ? stableStringify : JSON.stringify;
const schemaString = (config.minify ? stringify(schema) : stringify(schema, null, 2));

fs.writeFile(output_path, schemaString, (err) => {
if (err) {
throw err;
}
});
4 changes: 4 additions & 0 deletions bin/start-frontend
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,9 @@ set -e
# pass first argument to WEBPACK_HOT_RELOAD_HOST
[ $# -ge 1 ] && export WEBPACK_HOT_RELOAD_HOST=$1

# DEBUG=1 might be exported to this script from a parent, but we don't want it in the frontend build process
# In particular, DEBUG=1 enables a lot of Tailwind logging we don't need, so let's set 0 instead
export DEBUG=0

pnpm install
pnpm start
2 changes: 1 addition & 1 deletion bin/tests
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ PG_PASSWORD="${PGPASSWORD:=posthog}"
PG_PORT="${PGPORT:=5432}"
PGOPTIONS='--client-min-messages=warning' psql posthog -d "postgres://${PG_USER}:${PG_PASSWORD}@${PG_HOST}:${PG_PORT}" -c "drop database if exists test_posthog" 1> /dev/null

nodemon -w ./posthog -w ./ee --ext py --exec "OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES pytest --reuse-db --durations-min=2.0 ${MIGRATIONS} -s $* --snapshot-update; mypy -p posthog --exclude bin/migrate_kafka_data.py --exclude posthog/hogql/grammar/HogQLParser.py --exclude gunicorn.config.py --exclude posthog/batch_exports/models.py"
nodemon -w ./posthog -w ./ee --ext py --exec "OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES pytest --reuse-db --durations-min=2.0 ${MIGRATIONS} -s $* --snapshot-update; mypy -p posthog | mypy-baseline filter"
2 changes: 1 addition & 1 deletion cypress/e2e/actions.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ const createAction = (actionName: string): void => {
cy.get('input[name="item-name-large"]').should('exist')

cy.get('input[name="item-name-large"]').type(actionName)
cy.get('.LemonSegmentedButton > ul > :nth-child(3)').click()
cy.get('.LemonSegmentedButton > ul > :nth-child(2)').click() // Click "Pageview"
cy.get('[data-attr=edit-action-url-input]').click().type(Cypress.config().baseUrl)

cy.get('[data-attr=save-action-button]').first().click()
Expand Down
8 changes: 3 additions & 5 deletions cypress/e2e/auth.cy.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import { urls } from 'scenes/urls'

describe('Auth', () => {
beforeEach(() => {
cy.get('[data-attr=menu-item-me]').click()
Expand All @@ -20,7 +18,7 @@ describe('Auth', () => {

cy.get('[type=submit]').click()
// Login should have succeeded
cy.location('pathname').should('eq', '/home')
cy.location('pathname').should('eq', '/')
})

it('Logout and verify that Google login button has correct link', () => {
Expand Down Expand Up @@ -48,7 +46,7 @@ describe('Auth', () => {
cy.get('[data-attr=password]').clear().type('12345678')
cy.get('[type=submit]').click()
// Login should have succeeded
cy.location('pathname').should('eq', '/home')
cy.location('pathname').should('eq', '/')
})

it('Redirect to appropriate place after login', () => {
Expand Down Expand Up @@ -84,6 +82,6 @@ describe('Auth', () => {

it('Cannot access signup page if authenticated', () => {
cy.visit('/signup')
cy.location('pathname').should('eq', urls.projectHomepage())
cy.location('pathname').should('eq', '/project/1')
})
})
15 changes: 0 additions & 15 deletions cypress/e2e/cohorts.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,21 +42,6 @@ describe('Cohorts', () => {
cy.get('tbody').contains('Test Cohort')
cy.contains('Create your first cohort').should('not.exist')

it('Cohorts new and list', () => {
cy.get('[data-row-key]').first().click()
cy.get('[data-test-goto-person]').first().click()
cy.url().should('include', '/person/')

cy.get('[data-attr="persons-cohorts-tab"]').click()
cy.get('[data-row-key]').first().click()

cy.get('div:not(disabled) > [data-attr="persons-cohorts-tab"]').click()
cy.get('[data-row-key]').first().click()

cy.url().should('include', '/cohorts/')
cy.get('[data-attr="cohort-name"]').should('have.value', 'Test Cohort')
})

// back into cohort
cy.get('tbody').contains('Test Cohort').click()

Expand Down
2 changes: 1 addition & 1 deletion cypress/e2e/insights.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ describe('Insights', () => {

it('Shows not found error with invalid short URL', () => {
cy.visit('/i/i_dont_exist')
cy.location('pathname').should('eq', '/insights/i_dont_exist')
cy.location('pathname').should('contain', '/insights/i_dont_exist')
cy.get('.LemonSkeleton').should('exist')
})

Expand Down
2 changes: 1 addition & 1 deletion cypress/e2e/invites.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ describe('Invite Signup', () => {
cy.get('[data-attr=menu-item-me]').click()
cy.get('[data-attr=top-menu-item-org-settings]').click()

cy.location('pathname').should('eq', '/settings/organization')
cy.location('pathname').should('contain', '/settings/organization')
cy.get('[id="invites"]').should('exist')
cy.contains('Pending Invites').should('exist')

Expand Down
2 changes: 1 addition & 1 deletion cypress/e2e/person.cy.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
describe('Person Visualization Check', () => {
beforeEach(() => {
cy.clickNavMenu('personsmanagement')
cy.location('pathname').should('eq', '/persons')
cy.location('pathname').should('contain', '/persons')
cy.wait(1000)
cy.get('[data-attr=persons-search]').type('deb').should('have.value', 'deb')
cy.contains('[email protected]').should('not.exist')
Expand Down
2 changes: 1 addition & 1 deletion cypress/e2e/systemStatus.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { urls } from 'scenes/urls'

describe('System Status', () => {
it('System Status loaded', () => {
cy.location('pathname').should('eq', urls.savedInsights())
cy.location('pathname').should('eq', '/project/1/insights')
cy.wait(500)
cy.get('[data-attr=menu-item-me]').click()
cy.get('[data-attr=system-status-badge]').click()
Expand Down
3 changes: 1 addition & 2 deletions cypress/e2e/trends.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ describe('Trends', () => {
cy.get('[data-attr=math-selector-0]').click()
cy.get('[data-attr=math-total-0]').should('be.visible')

cy.get('[data-attr=math-node-property-value-0]').click()
cy.get('[data-attr=math-avg-0]').click()
cy.get('[data-attr=math-node-property-value-0]').click('left')
cy.get('[data-attr=math-property-select]').should('exist')
})

Expand Down
3 changes: 3 additions & 0 deletions ee/api/ee_event_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
Detail,
)

from loginas.utils import is_impersonated_session


class EnterpriseEventDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer):
updated_by = UserBasicSerializer(read_only=True)
Expand Down Expand Up @@ -98,6 +100,7 @@ def update(self, event_definition: EnterpriseEventDefinition, validated_data):
item_id=str(event_definition.id),
scope="EventDefinition",
activity="changed",
was_impersonated=is_impersonated_session(self.context["request"]),
detail=Detail(name=str(event_definition.name), changes=changes),
)

Expand Down
2 changes: 2 additions & 0 deletions ee/api/ee_property_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
log_activity,
Detail,
)
from loginas.utils import is_impersonated_session


class EnterprisePropertyDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer):
Expand Down Expand Up @@ -77,6 +78,7 @@ def update(self, property_definition: EnterprisePropertyDefinition, validated_da
organization_id=None,
team_id=self.context["team_id"],
user=self.context["request"].user,
was_impersonated=is_impersonated_session(self.context["request"]),
item_id=str(property_definition.id),
scope="PropertyDefinition",
activity="changed",
Expand Down
4 changes: 2 additions & 2 deletions ee/api/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ class PublicIntegrationViewSet(viewsets.GenericViewSet):
queryset = Integration.objects.all()
serializer_class = IntegrationSerializer

authentication_classes = [] # type: ignore
permission_classes = [] # type: ignore
authentication_classes = []
permission_classes = []

@action(methods=["POST"], detail=False, url_path="slack/events")
def slack_events(self, request: Request, *args: Any, **kwargs: Any) -> Response:
Expand Down
13 changes: 7 additions & 6 deletions ee/api/test/__snapshots__/test_instance_settings.ambr
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# serializer version: 1
# name: TestInstanceSettings.test_update_recordings_performance_events_ttl_setting
'
'''
/* user_id:0 request:_snapshot_ */
ALTER TABLE sharded_performance_events ON CLUSTER 'posthog' MODIFY TTL toDate(timestamp) + toIntervalWeek(5)
'
---
'''
# ---
# name: TestInstanceSettings.test_update_recordings_ttl_setting
'
'''
/* user_id:0 request:_snapshot_ */
ALTER TABLE sharded_session_recording_events ON CLUSTER 'posthog' MODIFY TTL toDate(created_at) + toIntervalWeek(5)
'
---
'''
# ---
Loading

0 comments on commit 40b985c

Please sign in to comment.