Skip to content

Commit

Permalink
merge master in
Browse files Browse the repository at this point in the history
  • Loading branch information
zlwaterfield committed Mar 22, 2024
2 parents 24a9c84 + ced62f4 commit 0a701a4
Show file tree
Hide file tree
Showing 81 changed files with 2,829 additions and 934 deletions.
10 changes: 1 addition & 9 deletions .github/workflows/ci-backend-depot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,7 @@
name: Backend CI (depot)

on:
push:
branches:
- master
pull_request:
workflow_dispatch:
inputs:
clickhouseServerVersion:
description: ClickHouse server version. Leave blank for default
type: string

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
Expand Down Expand Up @@ -41,7 +33,7 @@ jobs:
changes:
runs-on: depot-ubuntu-latest-4
timeout-minutes: 5
if: github.repository == 'PostHog/posthog'
if: ${{ contains(github.event.pull_request.labels.*.name, 'test-depot') }}
name: Determine need to run backend checks
# Set job outputs to values from filter step
outputs:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci-e2e-depot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
changes:
runs-on: depot-ubuntu-latest-4
timeout-minutes: 5
if: github.repository == 'PostHog/posthog'
if: ${{ contains(github.event.pull_request.labels.*.name, 'test-depot') }}
name: Determine need to run E2E checks
# Set job outputs to values from filter step
outputs:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,13 @@
"x": 66,
"y": 556
}
},
{
"parentId": 209272202,
"wireframe": {
"id": 52129787123,
"type": "text"
}
}
],
"removes": [
Expand Down
162 changes: 162 additions & 0 deletions ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,147 @@ exports[`replay/transform transform can convert images 1`] = `
]
`;

exports[`replay/transform transform can convert invalid text wireframe 1`] = `
[
{
"data": {
"height": 600,
"href": "",
"width": 300,
},
"timestamp": 1,
"type": 4,
},
{
"data": {
"initialOffset": {
"left": 0,
"top": 0,
},
"node": {
"childNodes": [
{
"id": 2,
"name": "html",
"publicId": "",
"systemId": "",
"type": 1,
},
{
"attributes": {
"data-rrweb-id": 3,
"style": "height: 100vh; width: 100vw;",
},
"childNodes": [
{
"attributes": {
"data-rrweb-id": 4,
},
"childNodes": [
{
"attributes": {
"type": "text/css",
},
"childNodes": [
{
"id": 102,
"textContent": "
body {
margin: unset;
}
input, button, select, textarea {
font: inherit;
margin: 0;
padding: 0;
border: 0;
outline: 0;
background: transparent;
padding-block: 0 !important;
}
.input:focus {
outline: none;
}
img {
border-style: none;
}
",
"type": 3,
},
],
"id": 101,
"tagName": "style",
"type": 2,
},
],
"id": 4,
"tagName": "head",
"type": 2,
},
{
"attributes": {
"data-rrweb-id": 5,
"style": "height: 100vh; width: 100vw;",
},
"childNodes": [
{
"attributes": {
"data-rrweb-id": 12345,
"style": "border-width: 4px;border-radius: 10px;border-color: #ee3ee4;border-style: solid;color: #ee3ee4;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:normal;",
},
"childNodes": [],
"id": 12345,
"tagName": "div",
"type": 2,
},
{
"attributes": {
"data-render-reason": "a fixed placeholder to contain the keyboard in the correct stacking position",
"data-rrweb-id": 9,
},
"childNodes": [],
"id": 9,
"tagName": "div",
"type": 2,
},
{
"attributes": {
"data-rrweb-id": 7,
},
"childNodes": [],
"id": 7,
"tagName": "div",
"type": 2,
},
{
"attributes": {
"data-rrweb-id": 11,
},
"childNodes": [],
"id": 11,
"tagName": "div",
"type": 2,
},
],
"id": 5,
"tagName": "body",
"type": 2,
},
],
"id": 3,
"tagName": "html",
"type": 2,
},
],
"id": 1,
"type": 0,
},
},
"timestamp": 1,
"type": 2,
},
]
`;

exports[`replay/transform transform can convert navigation bar 1`] = `
[
{
Expand Down Expand Up @@ -1453,6 +1594,20 @@ exports[`replay/transform transform incremental mutations de-duplicate the tree
},
"parentId": 52129787,
},
{
"nextId": null,
"node": {
"attributes": {
"data-rrweb-id": 52129787123,
"style": "position: fixed;left: 0px;top: 0px;overflow:hidden;white-space:normal;",
},
"childNodes": [],
"id": 52129787123,
"tagName": "div",
"type": 2,
},
"parentId": 209272202,
},
],
"attributes": [],
"removes": [
Expand Down Expand Up @@ -1689,6 +1844,13 @@ AAAAAAAAAAAAAAAAAAAAAAAAgCN/AW0xMqHnNQceAAAAAElFTkSuQmCC
"y": 556,
},
},
{
"parentId": 209272202,
"wireframe": {
"id": 52129787123,
"type": "text",
},
},
],
"removes": [
{
Expand Down
37 changes: 37 additions & 0 deletions ee/frontend/mobile-replay/transform.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,43 @@ describe('replay/transform', () => {
expect(converted).toMatchSnapshot()
})

test('can convert invalid text wireframe', () => {
const converted = posthogEEModule.mobileReplay?.transformToWeb([
{
data: {
width: 300,
height: 600,
},
timestamp: 1,
type: 4,
},
{
type: 2,
data: {
wireframes: [
{
id: 12345,
type: 'text',
x: 11,
y: 12,
width: 100,
height: 30,
style: {
color: '#ee3ee4',
borderColor: '#ee3ee4',
borderWidth: '4',
borderRadius: '10px',
},
// text property is missing
},
],
},
timestamp: 1,
},
])
expect(converted).toMatchSnapshot()
})

test('can set background image to base64 png', () => {
const converted = posthogEEModule.mobileReplay?.transformToWeb([
{
Expand Down
26 changes: 17 additions & 9 deletions ee/frontend/mobile-replay/transformer/transformers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@ export function _isPositiveInteger(id: unknown): id is number {
return typeof id === 'number' && id > 0 && id % 1 === 0
}

function _isNullish(x: unknown): x is null | undefined {
return x === null || x === undefined
}

function isRemovedNodeMutation(x: addedNodeMutation | removedNodeMutation): x is removedNodeMutation {
return isObject(x) && 'id' in x
}
Expand Down Expand Up @@ -218,6 +222,17 @@ function makeTextElement(
// because we might have to style the text, we always wrap it in a div
// and apply styles to that
const id = context.idSequence.next().value

const childNodes = [...children]
if (!_isNullish(wireframe.text)) {
childNodes.unshift({
type: NodeType.Text,
textContent: wireframe.text,
// since the text node is wrapped, we assign it a synthetic id
id,
})
}

return {
result: {
type: NodeType.Element,
Expand All @@ -227,15 +242,7 @@ function makeTextElement(
'data-rrweb-id': wireframe.id,
},
id: wireframe.id,
childNodes: [
{
type: NodeType.Text,
textContent: wireframe.text,
// since the text node is wrapped, we assign it a synthetic id
id: id,
},
...children,
],
childNodes,
},
context,
}
Expand Down Expand Up @@ -983,6 +990,7 @@ function isMobileIncrementalSnapshotEvent(x: unknown): x is MobileIncrementalSna

function makeIncrementalAdd(add: MobileNodeMutation, context: ConversionContext): addedNodeMutation[] | null {
const converted = convertWireframe(add.wireframe, context)

if (!converted) {
return null
}
Expand Down
15 changes: 12 additions & 3 deletions ee/session_recordings/ai/error_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pandas as pd
import numpy as np
from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
from datetime import date

CLUSTER_REPLAY_ERRORS_TIMING = Histogram(
"posthog_session_recordings_cluster_replay_errors",
Expand All @@ -30,7 +31,7 @@ def error_clustering(team: Team, user: User):
if not results:
return []

df = pd.DataFrame(results, columns=["session_id", "input", "embeddings"])
df = pd.DataFrame(results, columns=["session_id", "error", "embeddings", "timestamp"])

df["cluster"] = cluster_embeddings(df["embeddings"].tolist())

Expand All @@ -42,7 +43,7 @@ def error_clustering(team: Team, user: User):
def fetch_error_embeddings(team_id: int):
query = """
SELECT
session_id, input, embeddings
session_id, input, embeddings, generation_timestamp
FROM
session_replay_embeddings
WHERE
Expand Down Expand Up @@ -76,13 +77,21 @@ def construct_response(df: pd.DataFrame, team: Team, user: User):
clusters = []
for cluster, rows in df.groupby("cluster"):
session_ids = rows["session_id"].unique()
sample = rows.sample(n=1)[["session_id", "input"]].rename(columns={"input": "error"}).to_dict("records")[0]
sample = rows.sample(n=1)[["session_id", "error"]].to_dict("records")[0]

date_series = (
df.groupby([df["timestamp"].dt.date])
.size()
.reindex(pd.date_range(end=date.today(), periods=7), fill_value=0)
)
sparkline = dict(zip(date_series.index.astype(str), date_series))
clusters.append(
{
"cluster": cluster,
"sample": sample.get("error"),
"session_ids": np.random.choice(session_ids, size=DBSCAN_MIN_SAMPLES - 1),
"occurrences": rows.size,
"sparkline": sparkline,
"unique_sessions": len(session_ids),
"viewed": len(np.intersect1d(session_ids, viewed_session_ids, assume_unique=True)),
}
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified frontend/__snapshots__/scenes-other-signup--cloud--light.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified frontend/__snapshots__/scenes-other-signup--self-hosted--light.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
8 changes: 7 additions & 1 deletion frontend/src/queries/query.ts
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,13 @@ export async function query<N extends DataNode = DataNode>(
(hogQLInsightsFunnelsFlagEnabled && isFunnelsQuery(queryNode))
) {
if (hogQLInsightsLiveCompareEnabled) {
const legacyFunction = legacyUrl ? fetchLegacyUrl : fetchLegacyInsights
const legacyFunction = (): any => {
try {
return legacyUrl ? fetchLegacyUrl : fetchLegacyInsights
} catch (e) {
console.error('Error fetching legacy insights', e)
}
}
let legacyResponse: any
;[response, legacyResponse] = await Promise.all([
executeQuery(queryNode, methodOptions, refresh, queryId),
Expand Down
Loading

0 comments on commit 0a701a4

Please sign in to comment.