-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'origin/master' into xvello/replay-overf…
…low-capture
- Loading branch information
Showing
81 changed files
with
6,033 additions
and
1,447 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,278 @@ | ||
# | ||
# This workflow runs CI E2E tests with Cypress. | ||
# | ||
# It relies on the container image built by 'container-images-ci.yml'. | ||
# | ||
name: E2E CI (depot) | ||
|
||
on: | ||
pull_request: | ||
|
||
concurrency: | ||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | ||
cancel-in-progress: true | ||
|
||
jobs: | ||
changes: | ||
runs-on: depot-ubuntu-latest | ||
timeout-minutes: 5 | ||
if: github.repository == 'PostHog/posthog' | ||
name: Determine need to run E2E checks | ||
# Set job outputs to values from filter step | ||
outputs: | ||
shouldTriggerCypress: ${{ steps.changes.outputs.shouldTriggerCypress }} | ||
steps: | ||
# For pull requests it's not necessary to check out the code | ||
- uses: dorny/paths-filter@v2 | ||
id: changes | ||
with: | ||
filters: | | ||
shouldTriggerCypress: | ||
# Avoid running E2E tests for irrelevant changes | ||
# NOTE: we are at risk of missing a dependency here. We could make | ||
# the dependencies more clear if we separated the backend/frontend | ||
# code completely | ||
- 'ee/**' | ||
- 'posthog/**' | ||
- 'hogvm/**' | ||
- 'bin/*' | ||
- frontend/**/* | ||
- requirements.txt | ||
- requirements-dev.txt | ||
- package.json | ||
- pnpm-lock.yaml | ||
# Make sure we run if someone is explicitly change the workflow | ||
- .github/workflows/ci-e2e.yml | ||
- .github/actions/build-n-cache-image/action.yml | ||
# We use docker compose for tests, make sure we rerun on | ||
# changes to docker-compose.dev.yml e.g. dependency | ||
# version changes | ||
- docker-compose.dev.yml | ||
- Dockerfile | ||
- cypress/** | ||
# Job that lists and chunks spec file names and caches node modules | ||
chunks: | ||
needs: changes | ||
name: Cypress preparation | ||
runs-on: depot-ubuntu-latest | ||
timeout-minutes: 5 | ||
outputs: | ||
chunks: ${{ steps.chunk.outputs.chunks }} | ||
|
||
steps: | ||
- name: Check out | ||
uses: actions/checkout@v3 | ||
|
||
- name: Group spec files into chunks of three | ||
id: chunk | ||
run: echo "chunks=$(ls cypress/e2e/* | jq --slurp --raw-input -c 'split("\n")[:-1] | _nwise(2) | join("\n")' | jq --slurp -c .)" >> $GITHUB_OUTPUT | ||
|
||
container: | ||
name: Build and cache container image | ||
runs-on: depot-ubuntu-latest | ||
timeout-minutes: 60 | ||
needs: [changes] | ||
permissions: | ||
contents: read | ||
id-token: write # allow issuing OIDC tokens for this workflow run | ||
outputs: | ||
tag: ${{ steps.build.outputs.tag }} | ||
build-id: ${{ steps.build.outputs.build-id }} | ||
steps: | ||
- name: Checkout | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: actions/checkout@v3 | ||
- name: Build the Docker image with Depot | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
# Build the container image in preparation for the E2E tests | ||
uses: ./.github/actions/build-n-cache-image | ||
id: build | ||
with: | ||
save: true | ||
actions-id-token-request-url: ${{ env.ACTIONS_ID_TOKEN_REQUEST_URL }} | ||
|
||
cypress: | ||
name: Cypress E2E tests (${{ strategy.job-index }}) | ||
runs-on: depot-ubuntu-latest | ||
timeout-minutes: 60 | ||
needs: [chunks, changes, container] | ||
permissions: | ||
id-token: write # allow issuing OIDC tokens for this workflow run | ||
|
||
strategy: | ||
# when one test fails, DO NOT cancel the other | ||
# containers, as there may be other spec failures | ||
# we want to know about. | ||
fail-fast: false | ||
matrix: | ||
chunk: ${{ fromJson(needs.chunks.outputs.chunks) }} | ||
|
||
steps: | ||
- name: Checkout | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: actions/checkout@v3 | ||
|
||
- name: Install pnpm | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: pnpm/action-setup@v2 | ||
with: | ||
version: 8.x.x | ||
|
||
- name: Set up Node.js | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: actions/setup-node@v4 | ||
with: | ||
node-version: 18.12.1 | ||
|
||
- name: Get pnpm cache directory path | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
id: pnpm-cache-dir | ||
run: echo "PNPM_STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT | ||
|
||
- name: Get cypress cache directory path | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
id: cypress-cache-dir | ||
run: echo "CYPRESS_BIN_PATH=$(npx cypress cache path)" >> $GITHUB_OUTPUT | ||
|
||
- uses: actions/cache@v4 | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
id: pnpm-cache | ||
with: | ||
path: | | ||
${{ steps.pnpm-cache-dir.outputs.PNPM_STORE_PATH }} | ||
${{ steps.cypress-cache-dir.outputs.CYPRESS_BIN_PATH }} | ||
key: ${{ runner.os }}-pnpm-cypress-${{ hashFiles('**/pnpm-lock.yaml') }} | ||
restore-keys: | | ||
${{ runner.os }}-pnpm-cypress- | ||
- name: Install package.json dependencies with pnpm | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
run: pnpm install --frozen-lockfile | ||
|
||
- name: Stop/Start stack with Docker Compose | ||
# these are required checks so, we can't skip entire sections | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
run: | | ||
docker compose -f docker-compose.dev.yml down | ||
docker compose -f docker-compose.dev.yml up -d | ||
- name: Wait for ClickHouse | ||
# these are required checks so, we can't skip entire sections | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
run: ./bin/check_kafka_clickhouse_up | ||
|
||
- name: Install Depot CLI | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: depot/setup-action@v1 | ||
|
||
- name: Get Docker image cached in Depot | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: depot/pull-action@v1 | ||
with: | ||
build-id: ${{ needs.container.outputs.build-id }} | ||
tags: ${{ needs.container.outputs.tag }} | ||
|
||
- name: Write .env # This step intentionally has no if, so that GH always considers the action as having run | ||
run: | | ||
cat <<EOT >> .env | ||
SECRET_KEY=6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da | ||
REDIS_URL=redis://localhost | ||
DATABASE_URL=postgres://posthog:posthog@localhost:5432/posthog | ||
KAFKA_HOSTS=kafka:9092 | ||
DISABLE_SECURE_SSL_REDIRECT=1 | ||
SECURE_COOKIES=0 | ||
OPT_OUT_CAPTURE=0 | ||
E2E_TESTING=1 | ||
SKIP_SERVICE_VERSION_REQUIREMENTS=1 | ||
EMAIL_HOST=email.test.posthog.net | ||
SITE_URL=http://localhost:8000 | ||
NO_RESTART_LOOP=1 | ||
CLICKHOUSE_SECURE=0 | ||
OBJECT_STORAGE_ENABLED=1 | ||
OBJECT_STORAGE_ENDPOINT=http://localhost:19000 | ||
OBJECT_STORAGE_ACCESS_KEY_ID=object_storage_root_user | ||
OBJECT_STORAGE_SECRET_ACCESS_KEY=object_storage_root_password | ||
GITHUB_ACTION_RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" | ||
CELERY_METRICS_PORT=8999 | ||
EOT | ||
- name: Start PostHog | ||
# these are required checks so, we can't skip entire sections | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
run: | | ||
mkdir -p /tmp/logs | ||
echo "Starting PostHog using the container image ${{ needs.container.outputs.tag }}" | ||
DOCKER_RUN="docker run --rm --network host --add-host kafka:127.0.0.1 --env-file .env ${{ needs.container.outputs.tag }}" | ||
$DOCKER_RUN ./bin/migrate | ||
$DOCKER_RUN python manage.py setup_dev | ||
# only starts the plugin server so that the "wait for PostHog" step passes | ||
$DOCKER_RUN ./bin/docker-worker &> /tmp/logs/worker.txt & | ||
$DOCKER_RUN ./bin/docker-server &> /tmp/logs/server.txt & | ||
- name: Wait for PostHog | ||
# these are required checks so, we can't skip entire sections | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
# this action might be abandoned - but v1 doesn't point to latest of v1 (which it should) | ||
# so pointing to v1.1.0 to remove warnings about node version with v1 | ||
# todo check https://github.com/iFaxity/wait-on-action/releases for new releases | ||
uses: iFaxity/[email protected] | ||
timeout-minutes: 3 | ||
with: | ||
verbose: true | ||
log: true | ||
resource: http://localhost:8000 | ||
|
||
- name: Cypress run | ||
# these are required checks so, we can't skip entire sections | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: cypress-io/github-action@v6 | ||
with: | ||
config-file: cypress.e2e.config.ts | ||
config: retries=2 | ||
spec: ${{ matrix.chunk }} | ||
install: false | ||
env: | ||
E2E_TESTING: 1 | ||
OPT_OUT_CAPTURE: 0 | ||
GITHUB_ACTION_RUN_URL: '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' | ||
|
||
- name: Archive test screenshots | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: screenshots | ||
path: cypress/screenshots | ||
if: ${{ failure() }} | ||
|
||
- name: Archive test downloads | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: downloads | ||
path: cypress/downloads | ||
if: ${{ failure() }} | ||
|
||
- name: Archive test videos | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: videos | ||
path: cypress/videos | ||
if: ${{ failure() }} | ||
|
||
- name: Archive accessibility violations | ||
if: needs.changes.outputs.shouldTriggerCypress == 'true' | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: accessibility-violations | ||
path: '**/a11y/' | ||
if-no-files-found: 'ignore' | ||
|
||
- name: Show logs on failure | ||
# use artefact here, as I think the output will be too large for display in an action | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: logs-${{ strategy.job-index }} | ||
path: /tmp/logs | ||
if: ${{ failure() }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
import { decideResponse } from '../fixtures/api/decide' | ||
import * as fflate from 'fflate' | ||
|
||
// Mainly testing to make sure events are fired as expected | ||
|
||
describe('Billing Upgrade CTA', () => { | ||
beforeEach(() => { | ||
cy.intercept('**/decide/*', (req) => | ||
req.reply( | ||
decideResponse({ | ||
'billing-upgrade-language': 'credit_card', | ||
}) | ||
) | ||
) | ||
|
||
cy.intercept('/api/billing-v2/', { fixture: 'api/billing-v2/billing-v2-unsubscribed.json' }) | ||
}) | ||
|
||
it('Check that events are being sent on each page visit', () => { | ||
cy.visit('/organization/billing') | ||
cy.get('[data-attr=product_analytics-upgrade-cta] .LemonButton__content').should('have.text', 'Add credit card') | ||
cy.window().then((win) => { | ||
const events = (win as any)._cypress_posthog_captures | ||
|
||
const matchingEvents = events.filter((event) => event.event === 'billing CTA shown') | ||
// One for each product card | ||
expect(matchingEvents.length).to.equal(4) | ||
}) | ||
|
||
// Mock billing response with subscription | ||
cy.intercept('/api/billing-v2/', { fixture: 'api/billing-v2/billing-v2.json' }) | ||
cy.reload() | ||
|
||
cy.get('[data-attr=session_replay-upgrade-cta] .LemonButton__content').should('have.text', 'Add paid plan') | ||
cy.intercept('POST', '**/e/?compression=gzip-js*').as('capture3') | ||
cy.window().then((win) => { | ||
const events = (win as any)._cypress_posthog_captures | ||
|
||
const matchingEvents = events.filter((event) => event.event === 'billing CTA shown') | ||
expect(matchingEvents.length).to.equal(3) | ||
}) | ||
|
||
cy.intercept('/api/billing-v2/', { fixture: 'api/billing-v2/billing-v2-unsubscribed.json' }) | ||
// Navigate to the onboarding billing step | ||
cy.visit('/products') | ||
cy.get('[data-attr=product_analytics-onboarding-card]').click() | ||
cy.get('[data-attr=onboarding-breadcrumbs] > :nth-child(5)').click() | ||
|
||
cy.intercept('POST', '**/e/?compression=gzip-js*').as('capture4') | ||
cy.window().then((win) => { | ||
const events = (win as any)._cypress_posthog_captures | ||
|
||
const matchingEvents = events.filter((event) => event.event === 'billing CTA shown') | ||
expect(matchingEvents.length).to.equal(3) | ||
}) | ||
}) | ||
}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.