diff --git a/.vscode/launch.json b/.vscode/launch.json index 51101a38d3d73..5ec947cc0ee99 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -71,6 +71,7 @@ "SKIP_SERVICE_VERSION_REQUIREMENTS": "1", "PRINT_SQL": "1", "REPLAY_EVENTS_NEW_CONSUMER_RATIO": "1.0", + "AUTO_LOGIN": "True", "BILLING_SERVICE_URL": "https://billing.dev.posthog.dev" }, "console": "integratedTerminal", diff --git a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap index 13717b20f47e5..849d883b906cf 100644 --- a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap +++ b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap @@ -27,7 +27,9 @@ exports[`replay/transform transform can convert images 1`] = ` "type": 1, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -37,7 +39,9 @@ exports[`replay/transform transform can convert images 1`] = ` "type": 2, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -122,7 +126,9 @@ exports[`replay/transform transform can convert rect with text 1`] = ` "type": 1, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -132,7 +138,9 @@ exports[`replay/transform transform can convert rect with text 1`] = ` "type": 2, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -232,7 +240,9 @@ exports[`replay/transform transform can ignore unknown wireframe types 1`] = ` "type": 1, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -242,7 +252,9 @@ exports[`replay/transform transform can ignore unknown wireframe types 1`] = ` "type": 2, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -311,7 +323,9 @@ exports[`replay/transform transform can process unknown types without error 1`] "type": 1, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -321,7 +335,9 @@ exports[`replay/transform transform can process unknown types without error 1`] "type": 2, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -414,7 +430,9 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` "type": 1, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, @@ -424,7 +442,9 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` "type": 2, }, { - "attributes": {}, + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, "childNodes": [ { "attributes": {}, diff --git a/ee/frontend/mobile-replay/transformers.ts b/ee/frontend/mobile-replay/transformers.ts index 97f426912d0cf..acdd9f723238f 100644 --- a/ee/frontend/mobile-replay/transformers.ts +++ b/ee/frontend/mobile-replay/transformers.ts @@ -12,7 +12,7 @@ import { wireframeRectangle, wireframeText, } from './mobile.types' -import { makePositionStyles, makeStylesString, makeSvgBorder } from './wireframeStyle' +import { makeBodyStyles, makeHTMLStyles, makePositionStyles, makeStylesString, makeSvgBorder } from './wireframeStyle' const PLACEHOLDER_IMAGE = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZAAAAGQCAIAAAAP3aGbAAAViUlEQVR4nO3d0XLjOA5GYfTWPuO8Y+Yp98JTGa8lSyJBEvjBcy5nA4mbbn/lKDT7z8/PjxERKfSf6AUQET0NsIhIJsAiIpkAi4hkAiwikum/z7/0r7/+uvhf//777+5LeWaJyFPUK7dp9reGd1jXN2hy5ONSnlki6k5LK2v9kRCziMokodXHbPMzLMwiKpCiVtb30B2ziKQT1cq6f0uIWUSi6Wplnm0Nt2Y9p+doVvcsEV0krZWdgjUQC8+lMItobIpafcyev8PCLKJiJRHHOfv1R0LMIipTHnGcs1fPsDCLqECpxHHO3jx0xywi6bKJ45y9/y0hZhGJllAc5+wJWFOxwCyiNeUUxzl7/g4Ls4ikSyuOc/brj4SYRSRaZnGcs1fPsDCLSK7k4jhnbx66O7EY9ZFDzCJ6Un5xnLP3vyU8ujPw88mYRTQqCXGcJ/k9/fAzZhFlTkIc/7mj/UckYxZRkiTEWX2m+/EemEUUnoQ4MWe6H++EWUSBSYgTeab78X6KZsEWFUhRHM+sjToieeAZoWvMul0GUfIUxXFqZWOPSA4xa97bPaK0KYrj18qGH5G83qzjpTCLaqcozhCtbMYRyZhFNC9FcQbOTjkiGbOIZpREjcDZWUckYxbR2PKoETg78YjkJGaxRYsKlEqNwNm5RyRnMGvgMohCyqZG4Oz0I5IHmpVhGUSLS6hG4OyKI5IHYpFkGURryqlG4OyiI5Ixi6i1tGoEzq47IhmziJ6XWY3A2blHJA+czbkMohklVyNw9tERydc3mzeb4WPSmEWLy69G4OyjDz9HmXU6vn4ZmEXLklAjata6z3Q/3njS7On4+mVgFi1IQo1Arcxzpvvx9tez8z43g1lUIAk1YrUy55nux0U0jWMW0SsJNcK1sr4z3Qe6g1lEEmokmR12RHLI7EeYRXIpqhE1awOPSE5ypgJmkVCKagRqZR6wjrdPcqYCZpFEimrEamXOM92Pi8AsoicpqhGulfnPdD8uxfnznZxZsEWtKaqRQSsbcqb76YL2Met2GUTvKaqRRCu7PdM9yh3MopIpqpFHK8t8RDJmUbEU1UillT38LeFWZs3bJkY7p6hGNq3s+baGfcw6XgqzyJmiGgm1sqZ9WJg1ZBm0W4pq5NTKWjeObm4WW7SoNUU10mplHTvddzZr4DJohxTVyKyV9X00B7OGLINqp6hGcq2s+7OEEmZdfAcxi6amqEZ+rczz4ef8Zp2Ov18nw9s9qpeiGhJamfO0BnWzli0Ds/ZJUQ0VrcwJVtMKMOvhV5JuimoIaWV+sJrWMfaZFGZRqhTV0NLKhoBlce+VMIuSpKiGnFY2CizDLMzaOEU1FLWygWBZ3M93A/egYxa1pqiGqFY2FiwLfa+EWbQ+RTV0tbLhYBlmYdY2KaohrZXNAMswC7M2SFENda1sElgW+kwKs2h2imoU0MrmgWWh75Uwi+alqEYNrWwqWIZZmFUuRTXKaGWzwbLQn+8wi8amqEYlrWwBWK8w6+EsbKVNUY1iWtkysAyzHodZCVNUo55WthIsw6zHYVaqFNUoqZUtBssw63GYlSRFNapqZevBss3MGrstlhanqEZhrSwELNvJrOOlMEslRTVqa2VRYJmIWRd/hJhVO0U1ymtlgWCZglmn40Ou49yeRlNTVGMHrSwWLNvbrKZxzFqWohqbaGXhYBlmYVamFNXYRyvLAJbFPZPCLHpPUY2ttLIkYFnce6WBZmX49SV1p6jGblpZHrBM36ymS2FWqhTV2FArSwWWhZqltRUeswamqMaeWlk2sCz0OTpmbZiiGttqZQnBstDn6Ji1VYpq7KyV5QTLQp9JYdYmKaqxuVaWFiwLfSaFWeVTVAOtLDNYFvpeCbMKp6gGWr1KDZZhFmaNTlENtPotO1iGWZg1LkU10Oo9AbAMszBrRIpqoNVHGmBZ6HN0zCqQohpodUwGrFeY5V/DhimqgVaniYFlmPV4FrZeKaqBVt/SA8sw63GYpagGWl0kCZaJmHXxtwezFqSoBlpdpwqWKZh1Op5qDYVTVAOtbhMGyzBrxBpKpqgGWj1JGyzDrBFrKJaiGmj1MHmwbDOzPq6DWR8pqoFWz6sAlsU9R19v1vE6zi21lVJUA62aKgKWxb1XymBW03hVsxTVQKvW6oBloWYN8QKzulNUA606KgWWhT6TwqyoFNVAq76qgWWYtZlZimqgVXcFwTLM2sYsRTXQylNNsCz0d3+jzIrarqGSohpo5awsWBb6HH2UFxnWkDNFNdDKX2WwLPS9EmbNS1ENtBpScbAMs8qZpagGWo2qPliGWYXMUlQDrQa2BViGWSXMUlQDrca2C1iGWeJmKaqBVsPbCCwL/d0fZnlSVAOtZrQXWK8wy7+GlSmqgVaT2hEsEzFryFZ4dbMU1UCreW0KlimYdTq+eA2xZimqgVZT2xcsw6zHsyFsKaqBVrPbGizDrBFrmJGiGmi1oN3BMswasYaxKaqBVmsCLDPMGrGGUSmqgVbLAqx/wiz/GvwpqoFWKwOsf4t6hu3cztq3hu7Z6zV4UlQDrRYHWP9X4Gt+c7MU1UCr9QHWZ5ubFbJFS1ENtAoJsE7ayqzwbaWKaqBVVIB13j5mzVvD8PEkaqBVYID1Nczyr2HgYBI10Co2wLrK+bu/5zfa0CxFNdAqPMC6yfOadz7DLmyWohpolSHAui/wNV/SLEU10CpJgPUozPKv4eEXXNx3t1k6BlhPK2BW+BoU1UCrVAFWQ+pmxa5BUQ20yhZgtYVZfbOKaqBVwgCrucCfrZxb2Nev4dtFLkqiBlrlDLA6y2/W6fjiNVy7ebuq3WbpNsDqD7PGlkQNtMocYLnCrFElUQOtkgdY3jDLXxI10Cp/gDUgzPKURA20kgiwxoRZfSVRA61UAqxhBZq1+OM7o8xKogZaCQVYIwvcKyBnVhI10EorwBpc4M93sWY1vXo/vl5RHLQKCbDGt6FZrVq9TymKg1ZRAdaUtjKrT6vbNRxLIg5aBQZYs9rHrDUlEQetYgOsiWHWqJKIg1bhAdbcMMtfEnHQKkOANT3P8yDnHqsCZiURB62SBFgrCnyvFG5Wk7nX90UrAqxF7WnW71THeBJx0CpVgLWu3cza/DOGNCPAWpq0Wcv2sntm0ap2gLU6UbOW7WX3zKJV+QArIBWznJ+8WTyLVjsEWDEF7ldo5dIjzrLQapMAKzIJs/KHVvsEWMFhljO02irAig+zukOr3QKsFEmb5dzLrr4PnlYGWFkSNWvIXnbdffC0OMBKVKBZzk/exM6i1T4BVq7W77HqGD/9+t32wVNIgJWulXusrq/WfammKcV98BQVYGVsvVkdamy7D54C+/Pz8/Pxn/jjSdLzF9Xxj0xie/qy0Eq69z8v3mHlLXy/Qo3QqlKAlTrMcoZWxQKs7GFWd2hVL8ASKGSP1agK7IOnPAGWRs7zQsN3eOrug6dUAZZMi/eFvk9p7YP/CK0qBVhKefaFto5/fL3QPnjr3SOGVvkDLLG09rJ7Zj374K39rFS0kgiw9JLYy/7rRZSSTaGVSux0V4297KPiL3zy2OleIV5mQ+LbqBVgCceLzRnfQLkAS7vwfaGxJdk5QcsCrArpvvb8e9nX7xGjwACrSOF72T2zzr3szj1iJBRg1SlqL3vfu6SovewkHWCVKnAvu2d2/R6x1hFKEmCVauUuzZB/ReLjIp7ZvkGKDbDqtP5c9sX/isTYMEsxdroXiZdfX/xtzx873auFVt3xrdMKsOTjJeeMb6BQgKVd+Rfbmj1inuf3tDLAEq78a2zIHrHnI+W/nwUCLNWWnU6X4V+RWLa/DLOSB1iSdWjl/Myd0L8icXu16zArc4ClV/d7q8C97J7ZIXvZMatGgCWW5ydBlb3so/bBf/wXzCoQYCm1Uqv3qZBZ/2/uMKtegCXTeq1iZ4fk/Keww9dPHwGWRlFaFcj5T2Fv/t3LFmAJhFbOMKtMgJU9tLrIiQ5myQVYqUOri1r3l2FWgQArb8u0CtzLvngf/OlvHjFLKMBK2kqtTi+ycta/o9W5XwGzVAKsjK3XqqPYvey317wOs0QDrHRFaSW0l/3bCGaVD7BypaLV+1TIme6YtWeAlSgtrWJnL8Yxq3CAlSVFrcLDrN0CrBShVXcXZnncwaycAVZ8aOXs4v8IZhULsIJDq4uGkIFZlQKsyCS0it3Ljln0HmCFpaLV6UWWzTaNY1b5ACsmLa068sx6roZZtQOsgBS1Ct/LrmUWbE0KsFYnIU74PvjTwSFkrDHrdJz8AdbSJMRJsg/+dHwIGZilG2CtS0KcbLsfMIveA6xFSYiTTatXmEW/AdaKJMTJqdUrzKJXgDU9CXGitAonI3wB1BRgzU1CnMB98Me7X5TTLLZorQywJqYijn82cC/7EDKcV8CsZQHWrLTE8c86CycjfAH0JMCakqI4i595Dd/Ljlk7BFjjk1AjwxN6zKLWAGtwEmpk0OrbeDgZzg8AYdbUAGtkEmrk0erbRcLJcP7yEbPmBVjDklAjm1bfLhVORvgC6DTAGpOEGjm1+nbBcDLCF0DHAGtAEmokmb0Is+g2wPKmqMbiWczyLIDeAyxXQmrEzmKWZwH0G2D1J6dG4Ozpf/lWTrPWrB+zrgOszhTVCH9uFfiZwSFkhC+AAKsnRTXCtXryv97eNJyM8AVsHmA1p6hGEq2ef83FrZeRMePjO5jlDLDaUlQjlVatXxlo1re7L1sAZh0DrIYU1UioVevXD//8jZZZsPUeYD1NUY20WnVMbWvWxQI2DLAepahGcq06ZjGLAOs+RTUktOq4AmZtHmDdpKiGkFYd18GsnQOsqxTVkNOq42rDzVr58R3M8gRYX1NUQ1SrjmsO3y6AWRIB1nmKakhr9XvlwC1O4WaxRes2wDpJUY0CWnXcophZQxZQO8D6TFGNSlq13giztgqw/i9FNepp1Xq74VvJMSttgPVvimpU1ar1psMfY2NWzgDrnxTVqK1V660xa4cAy0xTjR20al0AZpUPsCTV2EerV/uYNWMBldodLEU1dtPq1SZmTVpAmbYGS1GNPbV6hVmeBdRoX7AU1dhZq1eBW8kxK0ObgqWoBlq9CnyM7Xwa1XQF/wKe30ioHcFSVAOt3pMwy3935wJKmrUdWIpqoNUxzOq+u3R7gaWoBlp9S92s2Adqom0ElqIaaHWdtFnLFlDJrF3AUlQDrZ4UuC0Ts9a3BViKaqDV8wIfCWmZVYCt+mApqoFWrcWatfjjO92zFwtQqThYimqgVV+xv3rDrDVVBktRDbTyhFnOBeSvLFiKaqCVP8xyLiB5NcFSVAOtRoVZzgVkriBYimqg1dhit2Vi1ryqgaWoBlpNKnC7QLhZVbdolQJLUQ20mlrgW5VYs4YsIGF1wFJUA60WhFmeBWSrCFiKaqDVsjDLs4BUVQBLUQ20WhxmeRaQJ3mwFNVAq5Awy7OAJGmDpagGWgWGWZ4FZEgYLEU10Cq8wO0Czq0SrVcYPp4hVbAU1UCrJAW+7P27Ojc3SxIsRTXQKlX7mDXjh9PA9MBSVAOtEraJWc4FZDNLDCxFNdAqbZjVffeolMBSVAOtkodZ3XcPSQYsRTXQSiLM6r77+jTAUlQDrYQKfIzt3CrRdPdvC/DcfXECYCmqgVZyxb7sMeth2cFSVAOtRFM3a9mm1odfOaPUYCmqgVbSSZvlvIKEWXnBUlQDrQqEWQ9nQ9hKCpaiGmhVJsxyLmBeGcFSVAOtioVZzgVMKh1YimqgVcliH2Nj1mm5wFJUA61qF/hICLOOJQJLUQ202qHAlz1mfZQFLEU10GqfMMuzgIGlAEtRDbTarZ3NyrNFKx4sRTXQas+2NWvIAoYUDJaiGmi1cypmTfr0j2d8SJFgKaqBViRh1ul40xVymhUGlqIaaEWvAh8JbW5WDFiKaqAVvRf4st/ZrACwFNVAKzqGWd3j3a0GS1ENtKJvYVb3eF9LwVJUA63oOszqHu9oHViKaqAVPSnWrJV3n7FBrKlFYCmqgVb0vNi3KsvGT6+w0qwVYCmqgVbUWuxblU3Mmg6WohpoRX3Fvux3MGsuWIpqoBV5wqzuuz9pIliKaqAV+cOs7rvfNgssRTXQikaFWd13v24KWIpqoBWNTd2sNRvEWs0aD5aiGmhFM5I2y3kF5+9MvzUYLEU10IrmtfKtyox3OrEb8Y+NBEtRDbSiBQW+7IuZNQwsRTXQipaFWZ4F/DYGLEU10IoWh1meBbwaAJaiGmhFIWGWZwHmB0tRDbSiwFTMGv6bxyFmucBSVAOtKDwJs07Hm65wenfnG71+sBTVQCtKUuDLPtYs53gnWIpqoBWlKvBlr2tWD1iKaqAVJQyzWmsGS1ENtKK0YVZTbWApqoFWlDzMel4DWIpqoBVJFGtW4N2bxu05WIpqoBUJpaJG7PgjsBTVQCuSS0WNwPF7sBTVQCsSzbkZXQIdz/gNWIpqoBVJF/uZweTjV2ApqoFWVKB9zGpd/FewFNVAKyrTJmadXuFi/BwsRTXQioqFWcdOwFJUA62oZJj1ket4mSRqoBUVbqVZgeSdXuE43g9WEjXQisrn3Iyu8jbtyd07wUqiBlrRPm1r1ns9YCVRA61ot6TNcl7hVTNYSdRAK9ozFbNmPMW3VrCSqIFWtHMSZp2ON13hdLwBrCRqoBXRtmb9+fn5eTKZRA20Ivrt+V/p07/PnnH/q6npCr89eoeVRA20InovcF/oyvdZ792DlUQNtCI6tptZN2AlUQOtiL61lVlXYCVRA62Iros1a+W20q9gJVEDrYiepPI5Z+f4OVhJ1EArouepoOMZPwEriRpoRdSaBDqecc50RysqVX50POOc6U5UreToeMY5052oYM4PKqc1izPdiWq28mwGz92bxjnTnahs9cza/Ux3otoVM2vrM92JdmilWbPJ2/dMd6J9WmaWc/z2Fb3pme5Eu1XDrB3PdCfaM+cHlWN/tHy13ZnuRJsnYdbpuO12pjsRmbJZT8FKIg5aEQ1J1KxdznQnoo+cZi0j770tznQnotMC94X2vUjrn+lORBdpmVX8THciui3WrKbXbOUz3YnoYdk+M/itsme6E1FTEmbVPNOdiDrKb1bBM92JqLvkZlU7052InGU2q9SZ7kQ0pLRm1TnTnYgGltOsPz8/Pw+vexpaERXO+ap8Pn76iu7/8POTe6AVUbFiz+07jsuf6U5EU0tllvaZ7kS0oFiz3hM+052IlpXhfGTTPdOdiBYXfj6yiZ7pTkQhhZuld6Y7EQW28qxR+TPdiShDUWYpnelORHniTPerWSLKFme6n88SUc5Wno9sEme6E1Hmln1M2vKf6U5E+Vtmlve0BiKiZblOayAiWhlgEZFMgEVEMv0PU/uJezostYUAAAAASUVORK5CYII=' @@ -262,7 +262,7 @@ export const makeFullEvent = ( { type: NodeType.Element, tagName: 'html', - attributes: {}, + attributes: { style: makeHTMLStyles() }, id: 3, childNodes: [ { @@ -275,7 +275,7 @@ export const makeFullEvent = ( { type: NodeType.Element, tagName: 'body', - attributes: {}, + attributes: { style: makeBodyStyles() }, id: BODY_ID, childNodes: [ { diff --git a/ee/frontend/mobile-replay/wireframeStyle.ts b/ee/frontend/mobile-replay/wireframeStyle.ts index 9c9fc516a20ee..4a7b3fd662a3b 100644 --- a/ee/frontend/mobile-replay/wireframeStyle.ts +++ b/ee/frontend/mobile-replay/wireframeStyle.ts @@ -105,3 +105,11 @@ export function makeStylesString(wireframe: wireframe): string { styles += makeFontStyles(wireframe) return styles } + +export function makeHTMLStyles(): string { + return 'height: 100vh; width: 100vw;' +} + +export function makeBodyStyles(): string { + return 'height: 100vh; width: 100vw;' +} diff --git a/frontend/__snapshots__/components-command-bar--search--dark.png b/frontend/__snapshots__/components-command-bar--search--dark.png index ba211a1759062..05bf95416f6e6 100644 Binary files a/frontend/__snapshots__/components-command-bar--search--dark.png and b/frontend/__snapshots__/components-command-bar--search--dark.png differ diff --git a/frontend/__snapshots__/components-command-bar--search--light.png b/frontend/__snapshots__/components-command-bar--search--light.png index d326636c898b8..d05ce7546b909 100644 Binary files a/frontend/__snapshots__/components-command-bar--search--light.png and b/frontend/__snapshots__/components-command-bar--search--light.png differ diff --git a/frontend/__snapshots__/components-command-bar--search.png b/frontend/__snapshots__/components-command-bar--search.png index f66404c3cde43..7f0c8830e3612 100644 Binary files a/frontend/__snapshots__/components-command-bar--search.png and b/frontend/__snapshots__/components-command-bar--search.png differ diff --git a/frontend/__snapshots__/components-integrations-slack--slack-integration-added.png b/frontend/__snapshots__/components-integrations-slack--slack-integration-added.png index 7b19245e899f8..3e7d2216654cd 100644 Binary files a/frontend/__snapshots__/components-integrations-slack--slack-integration-added.png and b/frontend/__snapshots__/components-integrations-slack--slack-integration-added.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png new file mode 100644 index 0000000000000..bda6b87f52ed2 Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--light.png b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--light.png new file mode 100644 index 0000000000000..51f4a4f82268b Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states.png b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states.png new file mode 100644 index 0000000000000..d33d03c99278f Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-button--pseudo-states.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--dark.png b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--dark.png index 4b360e20e22f0..44a5c6cc36111 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--light.png b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--light.png index 3b141807c7f47..3b62973650bd8 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--light.png and b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000.png b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000.png index 920416789b607..bcf468b94ebfe 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-button--types-3000.png and b/frontend/__snapshots__/lemon-ui-lemon-button--types-3000.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-tag--lemon-tag--dark.png b/frontend/__snapshots__/lemon-ui-lemon-tag--lemon-tag--dark.png index 1e0857115dd2a..5b3ba9e1479ff 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-tag--lemon-tag--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-tag--lemon-tag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png index cb6204c5f7a95..e430a1e7e64b5 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png index c5acaaff61df6..3898222555311 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png index f8e556b479c86..06178fa43b12e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation.png index 795ebf75b07bd..33493e7f5666c 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-activation.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs.png index 00527eba0cf7c..c6fc370f946d6 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings.png index 569c9c9f33fba..04be7105dfeed 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-settings.png differ diff --git a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.stories.tsx b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.stories.tsx index f7d7bdc8a9479..9c03a98d5763f 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.stories.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.stories.tsx @@ -6,7 +6,7 @@ import { useEffect } from 'react' import { App } from 'scenes/App' import { urls } from 'scenes/urls' -import { setFeatureFlags } from '~/mocks/browser' +import { mswDecorator, setFeatureFlags } from '~/mocks/browser' import { SidePanelTab } from '~/types' import { sidePanelStateLogic } from './sidePanelStateLogic' @@ -18,6 +18,14 @@ const meta: Meta = { viewMode: 'story', mockDate: '2023-07-04', // To stabilize relative dates }, + decorators: [ + mswDecorator({ + get: { + '/api/projects/:team_id/dashboard_templates/': {}, + '/api/projects/:id/integrations': { results: [] }, + }, + }), + ], } export default meta diff --git a/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx index e0ec4f75bfbae..4e2191614a118 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx @@ -32,8 +32,8 @@ export const sidePanelLogic = kea([ selectors({ enabledTabs: [ - (s) => [s.featureFlags, s.isCloudOrDev], - (featureFlags, isCloudOrDev) => { + (s) => [s.featureFlags, s.isCloudOrDev, s.isReady, s.hasCompletedAllTasks], + (featureFlags, isCloudOrDev, isReady, hasCompletedAllTasks) => { const tabs: SidePanelTab[] = [] if (featureFlags[FEATURE_FLAGS.NOTEBOOKS]) { @@ -46,7 +46,9 @@ export const sidePanelLogic = kea([ tabs.push(SidePanelTab.Docs) tabs.push(SidePanelTab.Settings) - tabs.push(SidePanelTab.Activation) + if (isReady && !hasCompletedAllTasks) { + tabs.push(SidePanelTab.Activation) + } tabs.push(SidePanelTab.Activity) if (featureFlags[FEATURE_FLAGS.EARLY_ACCESS_FEATURE_SITE_BUTTON]) { @@ -59,7 +61,7 @@ export const sidePanelLogic = kea([ visibleTabs: [ (s) => [s.enabledTabs, s.selectedTab, s.sidePanelOpen, s.isReady, s.hasCompletedAllTasks], - (enabledTabs, selectedTab, sidePanelOpen, isReady, hasCompletedAllTasks): SidePanelTab[] => { + (enabledTabs, selectedTab, sidePanelOpen): SidePanelTab[] => { return enabledTabs.filter((tab: any) => { if (tab === selectedTab && sidePanelOpen) { return true @@ -70,10 +72,6 @@ export const sidePanelLogic = kea([ return false } - if (tab === SidePanelTab.Activation && (!isReady || hasCompletedAllTasks)) { - return false - } - return true }) }, diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index c847c079f3013..591140cae3ce4 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -2,7 +2,7 @@ import { decompressSync, strFromU8 } from 'fflate' import { encodeParams } from 'kea-router' import { ActivityLogProps } from 'lib/components/ActivityLog/ActivityLog' import { ActivityLogItem, ActivityScope } from 'lib/components/ActivityLog/humanizeActivity' -import { toParams } from 'lib/utils' +import { objectClean, toParams } from 'lib/utils' import posthog from 'posthog-js' import { SavedSessionRecordingPlaylistsResult } from 'scenes/session-recordings/saved-playlists/savedSessionRecordingPlaylistsLogic' @@ -102,6 +102,7 @@ export interface ActivityLogPaginatedResponse extends PaginatedResponse { export interface ApiMethodOptions { signal?: AbortSignal + headers?: Record } const CSRF_COOKIE_NAME = 'posthog_csrftoken' @@ -1519,8 +1520,14 @@ const api = { }, notebooks: { - async get(notebookId: NotebookType['short_id']): Promise { - return await new ApiRequest().notebook(notebookId).get() + async get( + notebookId: NotebookType['short_id'], + params: Record = {}, + headers: Record = {} + ): Promise { + return await new ApiRequest().notebook(notebookId).withQueryString(toParams(params)).get({ + headers, + }) }, async update( notebookId: NotebookType['short_id'], @@ -1842,6 +1849,7 @@ const api = { response = await fetch(url, { signal: options?.signal, headers: { + ...objectClean(options?.headers ?? {}), ...(getSessionId() ? { 'X-POSTHOG-SESSION-ID': getSessionId() } : {}), }, }) @@ -1865,6 +1873,7 @@ const api = { const response = await fetch(url, { method: 'PATCH', headers: { + ...objectClean(options?.headers ?? {}), ...(isFormData ? {} : { 'Content-Type': 'application/json' }), 'X-CSRFToken': getCookie(CSRF_COOKIE_NAME) || '', ...(getSessionId() ? { 'X-POSTHOG-SESSION-ID': getSessionId() } : {}), @@ -1897,6 +1906,7 @@ const api = { const response = await fetch(url, { method: 'POST', headers: { + ...objectClean(options?.headers ?? {}), ...(isFormData ? {} : { 'Content-Type': 'application/json' }), 'X-CSRFToken': getCookie(CSRF_COOKIE_NAME) || '', ...(getSessionId() ? { 'X-POSTHOG-SESSION-ID': getSessionId() } : {}), diff --git a/frontend/src/lib/components/CommandBar/SearchBar.tsx b/frontend/src/lib/components/CommandBar/SearchBar.tsx index a7d1d6898913a..3eba8e50e2aad 100644 --- a/frontend/src/lib/components/CommandBar/SearchBar.tsx +++ b/frontend/src/lib/components/CommandBar/SearchBar.tsx @@ -9,13 +9,16 @@ import { SearchTabs } from './SearchTabs' export const SearchBar = (): JSX.Element => { useMountedLogic(searchBarLogic) // load initial results + /** Ref to the search input for focusing after tab change. */ const inputRef = useRef(null) return ( -
- - +
+
+ + +
) } diff --git a/frontend/src/lib/components/CommandBar/SearchBarTab.tsx b/frontend/src/lib/components/CommandBar/SearchBarTab.tsx index db9da0c03a724..b903f0306a118 100644 --- a/frontend/src/lib/components/CommandBar/SearchBarTab.tsx +++ b/frontend/src/lib/components/CommandBar/SearchBarTab.tsx @@ -19,9 +19,9 @@ export const SearchBarTab = ({ tab, inputRef }: SearchBarTabProps): JSX.Element return (
{ setActiveTab(tab) inputRef.current?.focus() @@ -40,16 +40,15 @@ type CountProps = { const Count = ({ tab }: CountProps): JSX.Element | null => { const { activeTab, tabsCount, tabsLoading } = useValues(searchBarLogic) - // TODO: replace todo with condition that time since search start > 1s - const isActive = tab === activeTab || true + const isLoading = tabsLoading.length > 0 - if (tab === Tab.All) { - return null - } else if (isActive && tabsLoading.includes(tab)) { + if (isLoading && tab === Tab.All && activeTab === Tab.All) { + return + } else if (tabsLoading.includes(tab) && activeTab !== Tab.All) { return - } else if (tabsCount[tab] != null) { + } else if (!isLoading && tabsCount[tab] != null) { return {tabsCount[tab]} } else { - return + return null } } diff --git a/frontend/src/lib/components/CommandBar/SearchResult.tsx b/frontend/src/lib/components/CommandBar/SearchResult.tsx index 554f8f0b1f3cb..ea640489569ac 100644 --- a/frontend/src/lib/components/CommandBar/SearchResult.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResult.tsx @@ -14,7 +14,7 @@ import { Node } from '~/queries/schema' import { FilterType } from '~/types' import { tabToName } from './constants' -import { searchBarLogic, urlForResult } from './searchBarLogic' +import { searchBarLogic } from './searchBarLogic' import { SearchResult as ResultType } from './types' type SearchResultProps = { @@ -52,7 +52,7 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: return (
{ if (isAutoScrolling) { return @@ -79,10 +79,6 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: - - {location.host} - {urlForResult(result)} -
) @@ -90,9 +86,8 @@ export const SearchResult = ({ result, resultIndex, focused, keyboardFocused }: export const SearchResultSkeleton = (): JSX.Element => (
- - - + +
) diff --git a/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx b/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx index e79de19baaa3b..ecd2d4e46733b 100644 --- a/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResultPreview.tsx @@ -2,7 +2,7 @@ import { useValues } from 'kea' import { ResultDescription, ResultName } from 'lib/components/CommandBar/SearchResult' import { tabToName } from './constants' -import { searchBarLogic } from './searchBarLogic' +import { searchBarLogic, urlForResult } from './searchBarLogic' export const SearchResultPreview = (): JSX.Element | null => { const { activeResultIndex, combinedSearchResults } = useValues(searchBarLogic) @@ -19,6 +19,10 @@ export const SearchResultPreview = (): JSX.Element | null => {
+ + {location.host} + {urlForResult(result)} +
diff --git a/frontend/src/lib/components/CommandBar/SearchResults.tsx b/frontend/src/lib/components/CommandBar/SearchResults.tsx index b77ed70363c79..9c0b6808b6cce 100644 --- a/frontend/src/lib/components/CommandBar/SearchResults.tsx +++ b/frontend/src/lib/components/CommandBar/SearchResults.tsx @@ -10,36 +10,39 @@ export const SearchResults = (): JSX.Element => { useValues(searchBarLogic) return ( -
-
- {combinedSearchLoading && ( - <> - - - - - )} - {!combinedSearchLoading && combinedSearchResults?.length === 0 && ( -
-

No results

-

This doesn't happen often, but we're stumped!

- +
+ {!combinedSearchLoading && combinedSearchResults?.length === 0 ? ( +
+

No results

+

This doesn't happen often, but we're stumped!

+ +
+ ) : ( +
+
+ {combinedSearchLoading && ( + <> + + + + + )} + {!combinedSearchLoading && + combinedSearchResults?.map((result, index) => ( + + ))}
- )} - {!combinedSearchLoading && - combinedSearchResults?.map((result, index) => ( - - ))} -
-
- -
+
+ +
+
+ )}
) } diff --git a/frontend/src/lib/components/CommandBar/SearchTabs.tsx b/frontend/src/lib/components/CommandBar/SearchTabs.tsx index 8b46753118332..d847de715b559 100644 --- a/frontend/src/lib/components/CommandBar/SearchTabs.tsx +++ b/frontend/src/lib/components/CommandBar/SearchTabs.tsx @@ -1,6 +1,7 @@ import { useValues } from 'kea' import { RefObject } from 'react' +import { groupToName } from './constants' import { searchBarLogic } from './searchBarLogic' import { SearchBarTab } from './SearchBarTab' @@ -9,11 +10,18 @@ type SearchTabsProps = { } export const SearchTabs = ({ inputRef }: SearchTabsProps): JSX.Element | null => { - const { tabs } = useValues(searchBarLogic) + const { tabsGrouped } = useValues(searchBarLogic) return ( -
- {tabs.map((tab) => ( - +
+ {Object.entries(tabsGrouped).map(([group, tabs]) => ( +
+ {group !== 'all' && ( + {groupToName[group]} + )} + {tabs.map((tab) => ( + + ))} +
))}
) diff --git a/frontend/src/lib/components/CommandBar/constants.ts b/frontend/src/lib/components/CommandBar/constants.ts index b6935ed5ae4c9..72a93d8151c7f 100644 --- a/frontend/src/lib/components/CommandBar/constants.ts +++ b/frontend/src/lib/components/CommandBar/constants.ts @@ -3,6 +3,18 @@ export const actionScopeToName: Record = { insights: 'Insights', } +export enum TabGroup { + All = 'all', + EventData = 'event_data', + PostHog = 'posthog', +} + +export const groupToName: Record = { + [TabGroup.All]: 'All', + [TabGroup.EventData]: 'Event data', + [TabGroup.PostHog]: 'PostHog', +} + export enum Tab { All = 'all', Action = 'action', diff --git a/frontend/src/lib/components/CommandBar/index.scss b/frontend/src/lib/components/CommandBar/index.scss index 765089691b6a8..caa7b400e575a 100644 --- a/frontend/src/lib/components/CommandBar/index.scss +++ b/frontend/src/lib/components/CommandBar/index.scss @@ -8,10 +8,15 @@ .SearchBarTab { &:hover { - border-top: 2px solid var(--border-3000); + border-left: 2px solid var(--border-3000); } &.SearchBarTab__active { border-color: var(--primary-3000); } } + +.SearchResults { + // offset container height by input + height: 'calc(100% - 2.375rem)'; +} diff --git a/frontend/src/lib/components/CommandBar/searchBarLogic.ts b/frontend/src/lib/components/CommandBar/searchBarLogic.ts index efb7bb4b85c39..7d0982be78d35 100644 --- a/frontend/src/lib/components/CommandBar/searchBarLogic.ts +++ b/frontend/src/lib/components/CommandBar/searchBarLogic.ts @@ -9,7 +9,7 @@ import { groupsModel } from '~/models/groupsModel' import { Group, InsightShortId, PersonType, SearchableEntity, SearchResponse } from '~/types' import { commandBarLogic } from './commandBarLogic' -import { clickhouseTabs, Tab } from './constants' +import { clickhouseTabs, Tab, TabGroup } from './constants' import type { searchBarLogicType } from './searchBarLogicType' import { BarStatus, GroupResult, PersonResult, SearchResult } from './types' @@ -66,7 +66,10 @@ export const searchBarLogic = kea([ loadSearchResponse: async (_, breakpoint) => { await breakpoint(DEBOUNCE_MS) - if (values.activeTab === Tab.All) { + if (clickhouseTabs.includes(values.activeTab)) { + // prevent race conditions when switching tabs quickly + return values.rawSearchResponse + } else if (values.activeTab === Tab.All) { return await api.search.list({ q: values.searchQuery }) } else { return await api.search.list({ @@ -277,27 +280,30 @@ export const searchBarLogic = kea([ group3Loading && group4Loading, ], - tabs: [ + tabsForGroups: [ (s) => [s.groupTypes], (groupTypes): Tab[] => { - return [ - Tab.All, - Tab.EventDefinition, - Tab.Action, - Tab.Person, - Tab.Cohort, - ...Array.from(groupTypes.values()).map( - ({ group_type_index }) => `group_${group_type_index}` as Tab - ), - Tab.Insight, - Tab.Dashboard, - Tab.Notebook, - Tab.Experiment, - Tab.FeatureFlag, - ] + return Array.from(groupTypes.values()).map(({ group_type_index }) => `group_${group_type_index}` as Tab) + }, + ], + tabsGrouped: [ + (s) => [s.tabsForGroups], + (tabsForGroups): Record => { + return { + all: [Tab.All], + event_data: [Tab.EventDefinition, Tab.Action, Tab.Person, Tab.Cohort, ...tabsForGroups], + posthog: [Tab.Insight, Tab.Dashboard, Tab.Notebook, Tab.Experiment, Tab.FeatureFlag], + } }, ], - tabsCount: [ + tabs: [ + (s) => [s.tabsGrouped], + (tabsGrouped): Tab[] => { + return Object.values(tabsGrouped).reduce((acc, val) => acc.concat(val), []) + }, + ], + tabsCount: [(s) => [s.tabsCountMemoized], (tabsCountMemoized) => tabsCountMemoized[0]], + tabsCountMemoized: [ (s) => [ s.rawSearchResponse, s.rawPersonsResponse, @@ -306,6 +312,7 @@ export const searchBarLogic = kea([ s.rawGroup2Response, s.rawGroup3Response, s.rawGroup4Response, + s.searchQuery, ], ( searchResponse, @@ -314,8 +321,11 @@ export const searchBarLogic = kea([ group1Response, group2Response, group3Response, - group4Response - ): Record => { + group4Response, + searchQuery + ): [Record, string] => { + /** :TRICKY: We need to pull in the searchQuery to memoize the counts. */ + const counts = {} Object.values(Tab).forEach((tab) => { @@ -332,11 +342,30 @@ export const searchBarLogic = kea([ ] clickhouseTabsResults.forEach(([tab, results]) => { if (results !== undefined) { - counts[tab] = results.length === 100 ? '>=100' : results.length.toString() + counts[tab] = results.length === 100 ? '100+' : results.length.toString() } }) - return counts as Record + return [counts as Record, searchQuery] + }, + { + resultEqualityCheck: (prev, next) => { + const [prevCounts, prevQuery] = prev + const [nextCounts, nextQuery] = next + + if (prevQuery !== nextQuery) { + return false + } + + const prevNulls = Object.values(prevCounts).filter((v) => v === null).length + const nextNulls = Object.values(nextCounts).filter((v) => v === null).length + + if (nextNulls !== prevNulls) { + return false + } + + return true + }, }, ], tabsLoading: [ diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index d18e8a96d5bb9..f0762c235657d 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -46,7 +46,7 @@ import { actions, connect, events, kea, listeners, path, reducers, selectors } f import { router } from 'kea-router' import api from 'lib/api' import { FEATURE_FLAGS } from 'lib/constants' -import { IconFlare } from 'lib/lemon-ui/icons' +import { IconClose, IconFlare } from 'lib/lemon-ui/icons' import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { isMobile, isURL, uniqueBy } from 'lib/utils' @@ -59,10 +59,15 @@ import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' +import { SidePanelTabs } from '~/layout/navigation-3000/sidepanel/SidePanel' +import { sidePanelLogic } from '~/layout/navigation-3000/sidepanel/sidePanelLogic' +import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' import { dashboardsModel } from '~/models/dashboardsModel' import { DashboardType, InsightType } from '~/types' import { personalAPIKeysLogic } from '../../../scenes/settings/user/personalAPIKeysLogic' +import { commandBarLogic } from '../CommandBar/commandBarLogic' +import { BarStatus } from '../CommandBar/types' import { hedgehogbuddyLogic } from '../HedgehogBuddy/hedgehogbuddyLogic' import type { commandPaletteLogicType } from './commandPaletteLogicType' import { openCHQueriesDebugModal } from './DebugCHQueries' @@ -144,6 +149,10 @@ export const commandPaletteLogic = kea([ ['updateUser'], hedgehogbuddyLogic, ['setHedgehogModeEnabled'], + commandBarLogic, + ['setCommandBar'], + sidePanelStateLogic, + ['openSidePanel', 'closeSidePanel'], ], values: [ teamLogic, @@ -154,6 +163,10 @@ export const commandPaletteLogic = kea([ ['featureFlags'], hedgehogbuddyLogic, ['hedgehogModeEnabled'], + sidePanelLogic, + ['enabledTabs'], + sidePanelStateLogic, + ['sidePanelOpen'], ], logic: [preflightLogic], }), @@ -935,6 +948,58 @@ export const commandPaletteLogic = kea([ }, } + const shortcuts: Command = { + key: 'shortcuts', + scope: GLOBAL_COMMAND_SCOPE, + resolver: { + icon: IconKeyboard, + display: 'Open keyboard shortcut overview', + executor: () => { + actions.setCommandBar(BarStatus.SHOW_SHORTCUTS) + + // :HACKY: we need to return a dummy flow here, as otherwise + // the executor will hide the command bar, which also displays + // the shortcut overview + const dummyFlow: CommandFlow = { + resolver: () => ({ + icon: <>, + display: '', + executor: true, + }), + } + return dummyFlow + }, + }, + } + + const sidepanel: Command = { + key: 'sidepanel', + scope: GLOBAL_COMMAND_SCOPE, + resolver: [ + ...values.enabledTabs.map((tab) => { + const { Icon, label } = SidePanelTabs[tab] + return { + icon: Icon, + display: `Open ${label} side panel`, + executor: () => { + actions.openSidePanel(tab) + }, + } + }), + ...(values.sidePanelOpen + ? [ + { + icon: IconClose, + display: 'Close side panel', + executor: () => { + actions.closeSidePanel() + }, + }, + ] + : []), + ], + } + actions.registerCommand(goTo) actions.registerCommand(openUrls) actions.registerCommand(debugClickhouseQueries) @@ -946,6 +1011,8 @@ export const commandPaletteLogic = kea([ if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000]) { actions.registerCommand(toggleTheme) actions.registerCommand(toggleHedgehogMode) + actions.registerCommand(shortcuts) + actions.registerCommand(sidepanel) } }, beforeUnmount: () => { @@ -959,6 +1026,8 @@ export const commandPaletteLogic = kea([ actions.deregisterCommand('debug-copy-session-recording-url') actions.deregisterCommand('toggle-theme') actions.deregisterCommand('toggle-hedgehog-mode') + actions.deregisterCommand('shortcuts') + actions.deregisterCommand('sidepanel') }, })), ]) diff --git a/frontend/src/lib/components/CopyToClipboard.tsx b/frontend/src/lib/components/CopyToClipboard.tsx index 03480644f2d5b..fd8e9c78dd3a7 100644 --- a/frontend/src/lib/components/CopyToClipboard.tsx +++ b/frontend/src/lib/components/CopyToClipboard.tsx @@ -1,10 +1,10 @@ +import clsx from 'clsx' import { IconCopy } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { copyToClipboard } from 'lib/utils/copyToClipboard' -import { HTMLProps } from 'react' -interface InlinePropsBase extends HTMLProps { +interface InlinePropsBase { description?: string /** Makes text selectable instead of copying on click anywhere */ selectable?: boolean @@ -12,6 +12,8 @@ interface InlinePropsBase extends HTMLProps { tooltipMessage?: string | null iconStyle?: Record iconPosition?: 'end' | 'start' + className?: string + /** @deprecated */ style?: React.CSSProperties } interface InlinePropsWithStringInside extends InlinePropsBase { @@ -33,41 +35,49 @@ export function CopyToClipboardInline({ tooltipMessage = null, iconStyle, iconPosition = 'end', + className, style, ...props }: InlineProps): JSX.Element { const copy = async (): Promise => await copyToClipboard((explicitValue ?? children) as string, description) - const content = ( - - {children && {children}} - } - noPadding - className="ml-1" - data-attr="copy-icon" - onClick={!selectable ? undefined : copy} - /> - + let content = ( + } + noPadding + className="ml-1" + data-attr="copy-icon" + onClick={!selectable ? undefined : copy} + /> ) + + if (children) { + content = ( + + {children} + } + noPadding + className="ml-1" + data-attr="copy-icon" + onClick={!selectable ? undefined : copy} + /> + + ) + } return !selectable || tooltipMessage !== null ? ( {content} ) : ( diff --git a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx index 65d2f4ecb8229..47e0dcd57b105 100644 --- a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx +++ b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx @@ -1,6 +1,6 @@ import './TaxonomicPropertyFilter.scss' -import { LemonButtonWithDropdown } from '@posthog/lemon-ui' +import { LemonButton, LemonDropdown } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useMountedLogic, useValues } from 'kea' import { OperatorValueSelect } from 'lib/components/PropertyFilters/components/OperatorValueSelect' @@ -159,28 +159,28 @@ export function TaxonomicPropertyFilter({
)}
- (dropdownOpen ? closeDropdown() : openDropdown())} - type="secondary" - status={!valuePresent ? 'primary' : 'stealth'} - icon={!valuePresent ? : undefined} - sideIcon={null} - data-attr={'property-select-toggle-' + index} + - {filter?.type === 'cohort' ? ( - selectedCohortName || `Cohort #${filter?.value}` - ) : filter?.key ? ( - - ) : ( - addText || 'Add filter' - )} - + : undefined} + data-attr={'property-select-toggle-' + index} + onClick={() => (dropdownOpen ? closeDropdown() : openDropdown())} + > + {filter?.type === 'cohort' ? ( + selectedCohortName || `Cohort #${filter?.value}` + ) : filter?.key ? ( + + ) : ( + addText || 'Add filter' + )} + + {showOperatorValueSelect ? ( -
- {prefix} - {at && ' '} +
+ {prefix} {at && } - {by && ' by'} + {by && by}
{by && }
diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 556535e73ce27..43458d1dde12f 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -142,7 +142,7 @@ export const FEATURE_FLAGS = { POSTHOG_3000_NAV: 'posthog-3000-nav', // owner: @Twixes ENABLE_PROMPTS: 'enable-prompts', // owner: @lharries FEEDBACK_SCENE: 'feedback-scene', // owner: @lharries - NOTEBOOKS: 'notebooks', // owner: #team-monitoring + NOTEBOOKS: 'notebooks', // owner: #team-replay EARLY_ACCESS_FEATURE_SITE_BUTTON: 'early-access-feature-site-button', // owner: @neilkakkar HEDGEHOG_MODE_DEBUG: 'hedgehog-mode-debug', // owner: @benjackwhite GENERIC_SIGNUP_BENEFITS: 'generic-signup-benefits', // experiment, owner: @raquelmsmith @@ -160,7 +160,7 @@ export const FEATURE_FLAGS = { PRODUCT_SPECIFIC_ONBOARDING: 'product-specific-onboarding', // owner: @raquelmsmith REDIRECT_SIGNUPS_TO_INSTANCE: 'redirect-signups-to-instance', // owner: @raquelmsmith APPS_AND_EXPORTS_UI: 'apps-and-exports-ui', // owner: @benjackwhite - SESSION_REPLAY_CORS_PROXY: 'session-replay-cors-proxy', // owner: #team-monitoring + SESSION_REPLAY_CORS_PROXY: 'session-replay-cors-proxy', // owner: #team-replay HOGQL_INSIGHTS_LIFECYCLE: 'hogql-insights-lifecycle', // owner: @mariusandra HOGQL_INSIGHTS_TRENDS: 'hogql-insights-trends', // owner: @Gilbert09 HOGQL_INSIGHT_LIVE_COMPARE: 'hogql-insight-live-compare', // owner: @mariusandra @@ -168,17 +168,18 @@ export const FEATURE_FLAGS = { WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline SURVEYS_RESULTS_VISUALIZATIONS: 'surveys-results-visualizations', // owner: @jurajmajerik SURVEYS_PAYGATES: 'surveys-paygates', - CONSOLE_RECORDING_SEARCH: 'console-recording-search', // owner: #team-monitoring + CONSOLE_RECORDING_SEARCH: 'console-recording-search', // owner: #team-replay PERSONS_HOGQL_QUERY: 'persons-hogql-query', // owner: @mariusandra PIPELINE_UI: 'pipeline-ui', // owner: #team-pipeline - NOTEBOOK_CANVASES: 'notebook-canvases', // owner: #team-monitoring - SESSION_RECORDING_SAMPLING: 'session-recording-sampling', // owner: #team-monitoring + NOTEBOOK_CANVASES: 'notebook-canvases', // owner: #team-replay + SESSION_RECORDING_SAMPLING: 'session-recording-sampling', // owner: #team-replay PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas MULTI_PROJECT_FEATURE_FLAGS: 'multi-project-feature-flags', // owner: @jurajmajerik #team-feature-success - NETWORK_PAYLOAD_CAPTURE: 'network-payload-capture', // owner: #team-monitoring + NETWORK_PAYLOAD_CAPTURE: 'network-payload-capture', // owner: #team-replay FEATURE_FLAG_COHORT_CREATION: 'feature-flag-cohort-creation', // owner: @neilkakkar #team-feature-success INSIGHT_HORIZONTAL_CONTROLS: 'insight-horizontal-controls', // owner: @benjackwhite ALWAYS_SHOW_SEEKBAR_PREVIEW: 'always-show-seekbar-preview', // owner: @pauldambra + SESSION_REPLAY_MOBILE: 'session-replay-mobile', // owner: #team-replay } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx index 7673cb3b3b84a..254e56643372d 100644 --- a/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx +++ b/frontend/src/lib/lemon-ui/LemonButton/LemonButton.stories.tsx @@ -74,78 +74,65 @@ const TypesAndStatusesTemplate: StoryFn = (props) => { ) } +const ButtonVariants3000 = ({ + tertiary = false, + active = false, +}: { + tertiary?: boolean + active?: LemonButtonProps['active'] +}): JSX.Element => { + const variants: LemonButtonProps[] = tertiary + ? [ + { type: 'tertiary', children: 'Primary' }, + { type: 'tertiary', status: 'danger', children: 'Danger' }, + ] + : [ + { type: 'primary', children: 'Primary' }, + { type: 'primary', status: 'primary-alt', children: 'Primary alt' }, + { type: 'secondary', children: 'Secondary' }, + { type: 'secondary', status: 'danger', children: 'Danger' }, + { type: 'secondary', stealth: true, status: 'primary', children: 'Stealth' }, + ] + return ( +
+ {variants.map((props, index) => ( + } /> + ))} +
+ ) +} + export const Types3000: Story = () => { return (
-
type=BLOCK
+
type=3D
-
- }> - Primary - - }> - Primary Alt - - }> - Secondary - - }> - Destroy - - }> - Stealth - +
+
Active
+ +
+
+
Light background
- }> - Primary - - }> - Primary Alt - - }> - Secondary - - }> - Destroy - - }> - Stealth - +
+
+
type=TERTIARY
+
+
+ +
Light background
- }> - Primary - - }> - Primary Alt - - }> - Secondary - - }> - Destroy - - }> - Stealth - +
-
type=MENU
-
- }> - Primary - - }> - Danger - -
) } @@ -245,6 +232,47 @@ export const Active = (): JSX.Element => { ) } +export const PseudoStates = (): JSX.Element => { + return ( +
+
+
+
TYPE=3D STATE=DEFAULT
+ +
+
+
TYPE=3D STATE=HOVER
+ +
+
+
TYPE=3D STATE=HOVER,ACTIVE
+ +
+
+
+
+
TYPE=TERTIARY STATE=DEFAULT
+ +
+
+
TYPE=TERTIARY STATE=HOVER
+ +
+
+
TYPE=TERTIARY STATE=HOVER,ACTIVE
+ +
+
+
+ ) +} +PseudoStates.parameters = { + pseudo: { + hover: ['#hover .LemonButton', '#active .LemonButton'], + active: ['#active .LemonButton'], + }, +} + export const MenuButtons = (): JSX.Element => { return (
diff --git a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss index 2eb5371890db7..fd474dbc2e865 100644 --- a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss +++ b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss @@ -36,7 +36,7 @@ } &.LemonTag--option { - color: var(--primary); + color: var(--primary-3000); background-color: var(--accent-3000); } diff --git a/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json b/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json index d805e902a4390..638a1618d5fb2 100644 --- a/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json +++ b/frontend/src/mocks/fixtures/api/projects/team_id/insights/dataTableEvents.json @@ -106,7 +106,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -147,7 +146,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -465,7 +463,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -506,7 +503,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -697,7 +693,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -738,7 +733,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -915,7 +909,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -956,7 +949,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -1137,7 +1129,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -1178,7 +1169,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -1442,7 +1432,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -1483,7 +1472,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -1822,7 +1810,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -1863,7 +1850,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -2170,7 +2156,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -2211,7 +2196,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -2568,7 +2552,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -2609,7 +2592,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -2786,7 +2768,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -2827,7 +2808,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -3005,7 +2985,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -3046,7 +3025,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -3403,7 +3381,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -3444,7 +3421,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -3622,7 +3598,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -3663,7 +3638,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -3854,7 +3828,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -3895,7 +3868,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -4074,7 +4046,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -4115,7 +4086,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -4294,7 +4264,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -4335,7 +4304,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -4512,7 +4480,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -4553,7 +4520,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -4923,7 +4889,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -4964,7 +4929,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -5155,7 +5119,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -5196,7 +5159,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -5373,7 +5335,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -5414,7 +5375,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -5744,7 +5704,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -5785,7 +5744,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -5964,7 +5922,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -6005,7 +5962,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -6182,7 +6138,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -6223,7 +6178,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -6404,7 +6358,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -6445,7 +6398,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -6624,7 +6576,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -6665,7 +6616,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -6968,7 +6918,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -7009,7 +6958,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -7192,7 +7140,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -7233,7 +7180,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -7410,7 +7356,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -7451,7 +7396,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -7630,7 +7574,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -7671,7 +7614,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -7958,7 +7900,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -7999,7 +7940,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -8178,7 +8118,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -8219,7 +8158,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -8398,7 +8336,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -8439,7 +8376,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -8618,7 +8554,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -8659,7 +8594,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -8837,7 +8771,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -8878,7 +8811,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -9057,7 +8989,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -9098,7 +9029,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -9277,7 +9207,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -9318,7 +9247,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -9497,7 +9425,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -9538,7 +9465,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -9717,7 +9643,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -9758,7 +9683,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -10024,7 +9948,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -10065,7 +9988,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -10252,7 +10174,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -10293,7 +10214,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -10481,7 +10401,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -10522,7 +10441,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -10701,7 +10619,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -10742,7 +10659,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -10919,7 +10835,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -10960,7 +10875,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -11137,7 +11051,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -11178,7 +11091,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -11355,7 +11267,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -11396,7 +11307,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -11684,7 +11594,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -11725,7 +11634,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -11907,7 +11815,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -11948,7 +11855,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -12125,7 +12031,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -12166,7 +12071,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -12345,7 +12249,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -12386,7 +12289,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -12565,7 +12467,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -12606,7 +12507,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -12785,7 +12685,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -12826,7 +12725,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -13005,7 +12903,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -13046,7 +12943,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -13224,7 +13120,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -13265,7 +13160,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -13444,7 +13338,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -13485,7 +13378,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -13664,7 +13556,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -13705,7 +13596,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -13884,7 +13774,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -13925,7 +13814,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -14104,7 +13992,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -14145,7 +14032,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -14411,7 +14297,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -14452,7 +14337,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -14639,7 +14523,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -14680,7 +14563,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -14868,7 +14750,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -14909,7 +14790,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -15088,7 +14968,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -15129,7 +15008,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -15306,7 +15184,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -15347,7 +15224,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -15524,7 +15400,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -15565,7 +15440,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -15742,7 +15616,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -15783,7 +15656,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -15966,7 +15838,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -16007,7 +15878,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -16291,7 +16161,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -16332,7 +16201,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -16509,7 +16377,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -16550,7 +16417,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -16732,7 +16598,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -16773,7 +16638,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -16952,7 +16816,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -16993,7 +16856,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -17172,7 +17034,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -17213,7 +17074,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -17392,7 +17252,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -17433,7 +17292,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -17611,7 +17469,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -17652,7 +17509,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -17831,7 +17687,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -17872,7 +17727,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -18051,7 +17905,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -18092,7 +17945,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -18271,7 +18123,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -18312,7 +18163,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -18491,7 +18341,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -18532,7 +18381,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -18798,7 +18646,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -18839,7 +18686,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -19026,7 +18872,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -19067,7 +18912,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -19255,7 +19099,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -19296,7 +19139,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -19473,7 +19315,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -19514,7 +19355,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -19693,7 +19533,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -19734,7 +19573,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -19911,7 +19749,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -19952,7 +19789,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -20129,7 +19965,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -20170,7 +20005,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -20353,7 +20187,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -20394,7 +20227,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -20572,7 +20404,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -20613,7 +20444,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -20790,7 +20620,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -20831,7 +20660,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -21185,7 +21013,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -21226,7 +21053,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -21403,7 +21229,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -21444,7 +21269,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -21625,7 +21449,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -21666,7 +21489,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -21997,7 +21819,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -22038,7 +21859,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -22221,7 +22041,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -22262,7 +22081,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -22555,7 +22373,6 @@ "billing-by-products", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -22596,7 +22413,6 @@ "$feature/billing-by-products": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, diff --git a/frontend/src/queries/examples.ts b/frontend/src/queries/examples.ts index 3761896f7b279..ae185ba6a1e03 100644 --- a/frontend/src/queries/examples.ts +++ b/frontend/src/queries/examples.ts @@ -317,6 +317,22 @@ const HogQLRaw: HogQLQuery = { }, } +const HogQLForDataVisualization: HogQLQuery = { + kind: NodeKind.HogQLQuery, + query: `select toDate(timestamp) as timestamp, count() +from events +where {filters} +group by timestamp +order by timestamp asc +limit 100`, + explain: true, + filters: { + dateRange: { + date_from: '-7d', + }, + }, +} + const HogQLTable: DataTableNode = { kind: NodeKind.DataTableNode, full: true, @@ -325,7 +341,7 @@ const HogQLTable: DataTableNode = { const DataVisualization: DataVisualizationNode = { kind: NodeKind.DataVisualizationNode, - source: HogQLRaw, + source: HogQLForDataVisualization, } /* a subset of examples including only those we can show all users and that don't use HogQL */ diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Chart.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Chart.tsx index 2e9d5fc9ef17b..a399cb9d2a43c 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Chart.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Chart.tsx @@ -16,7 +16,7 @@ export const Chart = (): JSX.Element => {
)} -
+
diff --git a/frontend/src/queries/nodes/DataVisualization/Components/ChartSelection.tsx b/frontend/src/queries/nodes/DataVisualization/Components/ChartSelection.tsx index 2d5b05677fd7e..34958263afa02 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/ChartSelection.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/ChartSelection.tsx @@ -21,6 +21,7 @@ export const ChartSelection = (): JSX.Element => {
X-axis { /> Y-axis { return (
-
+
diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index 4b619e282f4e5..4e9ad08a51963 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -7,9 +7,11 @@ import { QueryContext } from '~/queries/types' import { ChartDisplayType } from '~/types' import { dataNodeLogic, DataNodeLogicProps } from '../DataNode/dataNodeLogic' +import { DateRange } from '../DataNode/DateRange' import { ElapsedTime } from '../DataNode/ElapsedTime' import { Reload } from '../DataNode/Reload' import { DataTable } from '../DataTable/DataTable' +import { QueryFeature } from '../DataTable/queryFeatures' import { HogQLQueryEditor } from '../HogQLQuery/HogQLQueryEditor' import { Chart } from './Components/Chart' import { TableDisplay } from './Components/TableDisplay' @@ -45,7 +47,7 @@ export function DataTableVisualization(props: DataTableVisualizationProps): JSX. cachedResults: props.cachedResults, } - const { query, visualizationType, showEditingUI } = useValues(builtDataVisualizationLogic) + const { query, visualizationType, showEditingUI, sourceFeatures } = useValues(builtDataVisualizationLogic) const setQuerySource = useCallback( (source: HogQLQuery) => props.setQuery?.({ ...props.query, source }), @@ -73,7 +75,24 @@ export function DataTableVisualization(props: DataTableVisualizationProps): JSX.
- {showEditingUI && } + {showEditingUI && ( + <> + + {sourceFeatures.has(QueryFeature.dateRangePicker) && ( +
+ { + if (query.kind === NodeKind.HogQLQuery) { + setQuerySource(query) + } + }} + /> +
+ )} + + )}
diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index 627483ddc2891..a79dc5eeac358 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -8,6 +8,7 @@ import { QueryContext } from '~/queries/types' import { ChartDisplayType, ItemMode } from '~/types' import { dataNodeLogic } from '../DataNode/dataNodeLogic' +import { getQueryFeatures, QueryFeature } from '../DataTable/queryFeatures' import type { dataVisualizationLogicType } from './dataVisualizationLogicType' export interface DataVisualizationLogicProps { @@ -78,6 +79,7 @@ export const dataVisualizationLogic = kea([ selectors({ query: [(_state, props) => [props.query], (query) => query], showEditingUI: [(state) => [state.insightMode], (insightMode) => insightMode == ItemMode.Edit], + sourceFeatures: [(_, props) => [props.query], (query): Set => getQueryFeatures(query.source)], isShowingCachedResults: [ () => [(_, props) => props.cachedResults ?? null], (cachedResults: AnyResponseType | null): boolean => !!cachedResults, @@ -165,14 +167,27 @@ export const dataVisualizationLogic = kea([ } } }), - subscriptions(({ actions }) => ({ + subscriptions(({ actions, values }) => ({ columns: (value, oldValue) => { - if (!oldValue || !oldValue.length) { - return + if (oldValue && oldValue.length) { + if (JSON.stringify(value) !== JSON.stringify(oldValue)) { + actions.clearAxis() + } } - if (JSON.stringify(value) !== JSON.stringify(oldValue)) { - actions.clearAxis() + // Set default axis values + if (values.response && values.selectedXIndex === null && values.selectedYIndex === null) { + const types: string[][] = values.response['types'] + const yAxisIndex = types.findIndex((n) => n[1].indexOf('Int') !== -1) + const xAxisIndex = types.findIndex((n) => n[1].indexOf('Date') !== -1) + + if (yAxisIndex >= 0) { + actions.setYAxis(yAxisIndex) + } + + if (xAxisIndex >= 0) { + actions.setXAxis(xAxisIndex) + } } }, })), diff --git a/frontend/src/scenes/data-management/database/DatabaseTables.tsx b/frontend/src/scenes/data-management/database/DatabaseTables.tsx index 003d918cb7e07..8e2c7495fe721 100644 --- a/frontend/src/scenes/data-management/database/DatabaseTables.tsx +++ b/frontend/src/scenes/data-management/database/DatabaseTables.tsx @@ -4,7 +4,8 @@ import { FEATURE_FLAGS } from 'lib/constants' import { LemonTable, LemonTableColumns } from 'lib/lemon-ui/LemonTable' import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { databaseTableListLogic, DatabaseTableListRow } from 'scenes/data-management/database/databaseTableListLogic' +import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' +import { DatabaseTableListRow } from 'scenes/data-warehouse/types' import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' import { ViewLinkModal } from 'scenes/data-warehouse/ViewLinkModal' import { urls } from 'scenes/urls' @@ -108,10 +109,12 @@ export function DatabaseTables({ title: 'Type', key: 'type', dataIndex: 'name', - render: function RenderType() { + render: function RenderType(_, obj: T) { return ( - PostHog + {obj.external_data_source + ? obj.external_data_source.source_type + : 'PostHog'} ) }, diff --git a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts index 7fac0498924d4..8097add8fa1a2 100644 --- a/frontend/src/scenes/data-management/database/databaseTableListLogic.ts +++ b/frontend/src/scenes/data-management/database/databaseTableListLogic.ts @@ -1,16 +1,12 @@ import { actions, afterMount, kea, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' +import { DatabaseTableListRow } from 'scenes/data-warehouse/types' import { query } from '~/queries/query' -import { DatabaseSchemaQuery, DatabaseSchemaQueryResponseField, NodeKind } from '~/queries/schema' +import { DatabaseSchemaQuery, NodeKind } from '~/queries/schema' import type { databaseTableListLogicType } from './databaseTableListLogicType' -export interface DatabaseTableListRow { - name: string - columns: DatabaseSchemaQueryResponseField[] -} - export const databaseTableListLogic = kea([ path(['scenes', 'data-management', 'database', 'databaseTableListLogic']), actions({ diff --git a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx index 35b3270e71516..def8ccbbc5e29 100644 --- a/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx +++ b/frontend/src/scenes/data-warehouse/external/DataWarehouseTables.tsx @@ -21,8 +21,14 @@ export function DataWarehouseTablesContainer(): JSX.Element { return (
- Files URL pattern - {row.url_pattern} + {row.external_data_source ? ( + <> + ) : ( + <> + Files URL pattern + {row.url_pattern} + + )} File format {row.format} diff --git a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.tsx b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.tsx index 13ebc2dda9f32..46f87bd01b557 100644 --- a/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.tsx +++ b/frontend/src/scenes/data-warehouse/external/dataWarehouseSceneLogic.tsx @@ -49,6 +49,7 @@ export const dataWarehouseSceneLogic = kea([ columns: table.columns, url_pattern: table.url_pattern, format: table.format, + external_data_source: table.external_data_source, } as DataWarehouseSceneRow) ) }, diff --git a/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx b/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx index 510b3bf63f742..09876429772f8 100644 --- a/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx +++ b/frontend/src/scenes/data-warehouse/settings/DataWarehouseSettingsScene.tsx @@ -1,4 +1,5 @@ -import { LemonButton, LemonTable, LemonTag, Spinner } from '@posthog/lemon-ui' +import { TZLabel } from '@posthog/apps-common' +import { LemonButton, LemonDialog, LemonTable, LemonTag, Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' import { FEATURE_FLAGS } from 'lib/constants' @@ -16,9 +17,10 @@ export const scene: SceneExport = { } const StatusTagSetting = { - running: 'default', - succeeded: 'primary', - error: 'danger', + Running: 'primary', + Completed: 'success', + Error: 'danger', + Failed: 'danger', } export function DataWarehouseSettingsScene(): JSX.Element { @@ -66,21 +68,45 @@ export function DataWarehouseSettingsScene(): JSX.Element { { title: 'Source Type', key: 'name', - width: 0, render: function RenderName(_, source) { return source.source_type }, }, + { + title: 'Table Prefix', + key: 'prefix', + render: function RenderPrefix(_, source) { + return source.prefix + }, + }, { title: 'Status', key: 'status', - width: 0, render: function RenderStatus(_, source) { return ( {source.status} ) }, }, + { + title: 'Sync Frequency', + key: 'prefix', + render: function RenderFrequency() { + return 'Every 24 hours' + }, + }, + { + title: 'Last Successful Run', + key: 'last_run_at', + tooltip: 'Time of the last run that completed a data import', + render: (_, run) => { + return run.last_run_at ? ( + + ) : ( + 'Never' + ) + }, + }, { key: 'actions', width: 0, @@ -106,12 +132,26 @@ export function DataWarehouseSettingsScene(): JSX.Element { > Reload + { - deleteSource(source) + LemonDialog.open({ + title: 'Delete data source?', + description: + 'Are you sure you want to delete this data source? All related tables will be deleted.', + + primaryButton: { + children: 'Delete', + status: 'danger', + onClick: () => deleteSource(source), + }, + secondaryButton: { + children: 'Cancel', + }, + }) }} > Delete diff --git a/frontend/src/scenes/data-warehouse/types.ts b/frontend/src/scenes/data-warehouse/types.ts index 6da8ea535c24a..e5e15c1246760 100644 --- a/frontend/src/scenes/data-warehouse/types.ts +++ b/frontend/src/scenes/data-warehouse/types.ts @@ -1,8 +1,10 @@ import { DatabaseSchemaQueryResponseField, HogQLQuery } from '~/queries/schema' +import { ExternalDataStripeSource } from '~/types' export interface DatabaseTableListRow { name: string columns: DatabaseSchemaQueryResponseField[] + external_data_source?: ExternalDataStripeSource } export interface DataWarehouseSceneRow extends DatabaseTableListRow { diff --git a/frontend/src/scenes/events/__mocks__/eventsQuery.json b/frontend/src/scenes/events/__mocks__/eventsQuery.json index 1b86d95737f0b..2ec1ff533774c 100644 --- a/frontend/src/scenes/events/__mocks__/eventsQuery.json +++ b/frontend/src/scenes/events/__mocks__/eventsQuery.json @@ -159,7 +159,6 @@ "hedgehog-mode-debug", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -199,7 +198,6 @@ "$feature/hedgehog-mode-debug": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -373,7 +371,6 @@ "hedgehog-mode-debug", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -413,7 +410,6 @@ "$feature/hedgehog-mode-debug": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -587,7 +583,6 @@ "hedgehog-mode-debug", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -627,7 +622,6 @@ "$feature/hedgehog-mode-debug": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -801,7 +795,6 @@ "hedgehog-mode-debug", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -841,7 +834,6 @@ "$feature/hedgehog-mode-debug": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, @@ -1029,7 +1021,6 @@ "hedgehog-mode-debug", "recordings-dom-explorer", "auto-redirect", - "session-recording-blob-replay", "session-recording-infinite-list", "surveys", "new-empty-states", @@ -1069,7 +1060,6 @@ "$feature/hedgehog-mode-debug": true, "$feature/recordings-dom-explorer": true, "$feature/auto-redirect": true, - "$feature/session-recording-blob-replay": true, "$feature/session-recording-infinite-list": true, "$feature/surveys": true, "$feature/new-empty-states": true, diff --git a/frontend/src/scenes/experiments/Experiment.scss b/frontend/src/scenes/experiments/Experiment.scss index 85059e647fa48..c9c4fe75b35cf 100644 --- a/frontend/src/scenes/experiments/Experiment.scss +++ b/frontend/src/scenes/experiments/Experiment.scss @@ -25,6 +25,10 @@ .ant-slider-rail { background-color: var(--primary-3000-highlight); } + + .ant-slider-handle:focus { + box-shadow: 0 0 0 5px var(--primary-3000-highlight); + } } } diff --git a/frontend/src/scenes/insights/summarizeInsight.ts b/frontend/src/scenes/insights/summarizeInsight.ts index a0f4aca384486..4fcf69fb0924c 100644 --- a/frontend/src/scenes/insights/summarizeInsight.ts +++ b/frontend/src/scenes/insights/summarizeInsight.ts @@ -1,3 +1,4 @@ +import { useValues } from 'kea' import { RETENTION_FIRST_TIME } from 'lib/constants' import { KEY_MAPPING } from 'lib/taxonomy' import { alphabet, capitalizeFirstLetter } from 'lib/utils' @@ -16,10 +17,12 @@ import { humanizePathsEventTypes, } from 'scenes/insights/utils' import { retentionOptions } from 'scenes/retention/constants' -import { apiValueToMathType, MathCategory, MathDefinition } from 'scenes/trends/mathsLogic' +import { apiValueToMathType, MathCategory, MathDefinition, mathsLogic } from 'scenes/trends/mathsLogic' import { mathsLogicType } from 'scenes/trends/mathsLogicType' +import { cohortsModel } from '~/models/cohortsModel' import { cohortsModelType } from '~/models/cohortsModelType' +import { groupsModel } from '~/models/groupsModel' import { groupsModelType } from '~/models/groupsModelType' import { extractExpressionComment } from '~/queries/nodes/DataTable/utils' import { BreakdownFilter, InsightQueryNode, Node } from '~/queries/schema' @@ -352,3 +355,12 @@ export function summarizeInsight( ? summarizeInsightFilters(filters, context) : '' } + +export function useSummarizeInsight(): (query: Node | undefined | null, filters?: Partial) => string { + const { aggregationLabel } = useValues(groupsModel) + const { cohortsById } = useValues(cohortsModel) + const { mathDefinitions } = useValues(mathsLogic) + + return (query, filters) => + summarizeInsight(query, filters || {}, { aggregationLabel, cohortsById, mathDefinitions }) +} diff --git a/frontend/src/scenes/instance/AsyncMigrations/SettingUpdateField.tsx b/frontend/src/scenes/instance/AsyncMigrations/SettingUpdateField.tsx index 415d832354f3e..a2a1584715f89 100644 --- a/frontend/src/scenes/instance/AsyncMigrations/SettingUpdateField.tsx +++ b/frontend/src/scenes/instance/AsyncMigrations/SettingUpdateField.tsx @@ -1,4 +1,4 @@ -import { Button, Col, Divider, Input, Row } from 'antd' +import { LemonButton, LemonDivider, LemonInput } from '@posthog/lemon-ui' import { useActions } from 'kea' import { useState } from 'react' @@ -15,21 +15,21 @@ export function SettingUpdateField({ setting }: { setting: InstanceSetting }): J

{setting.key}

{setting.description}

- - - setInputValue(e.target.value)} /> - - - - - - + +
+
+
) } diff --git a/frontend/src/scenes/instance/DeadLetterQueue/MetricsTab.tsx b/frontend/src/scenes/instance/DeadLetterQueue/MetricsTab.tsx index 430c80e85cdbf..80a951986dca4 100644 --- a/frontend/src/scenes/instance/DeadLetterQueue/MetricsTab.tsx +++ b/frontend/src/scenes/instance/DeadLetterQueue/MetricsTab.tsx @@ -1,4 +1,4 @@ -import { Button, Col, Divider, Row, Statistic } from 'antd' +import { LemonDivider } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { IconRefresh } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' @@ -36,19 +36,14 @@ export function MetricsTab(): JSX.Element {
- +
{singleValueMetrics.map((row) => ( - - - +
+
{row.metric}
+
{(row.value || '0').toLocaleString('en-US')}
+
))} - - - +
{tableMetrics.map((row) => (
@@ -72,15 +67,15 @@ export function MetricsTab(): JSX.Element { }} embedded /> -
- +
- +
))}
diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx index e41e86158f806..dd9e194634abc 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx @@ -14,6 +14,7 @@ import { urls } from 'scenes/urls' import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightLogic } from 'scenes/insights/insightLogic' import { JSONContent } from '@tiptap/core' +import { useSummarizeInsight } from 'scenes/insights/summarizeInsight' const DEFAULT_QUERY: QuerySchema = { kind: NodeKind.DataTableNode, @@ -34,6 +35,7 @@ const Component = ({ const nodeLogic = useMountedLogic(notebookNodeLogic) const { expanded } = useValues(nodeLogic) const { setTitlePlaceholder } = useActions(nodeLogic) + const summarizeInsight = useSummarizeInsight() useEffect(() => { let title = 'Query' @@ -46,10 +48,14 @@ const Component = ({ } } if (query.kind === NodeKind.InsightVizNode) { - if (query.source.kind) { - title = query.source.kind.replace('Node', '').replace('Query', '') - } else { - title = 'Insight' + title = summarizeInsight(query) + + if (!title) { + if (query.source.kind) { + title = query.source.kind.replace('Node', '').replace('Query', '') + } else { + title = 'Insight' + } } } if (query.kind === NodeKind.SavedInsightNode) { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx index 032eef1110452..90b9fc7d62aaa 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx @@ -44,7 +44,8 @@ const Component = (props: NodeViewProps): JSX.Element => { openNotebook(shortId, NotebookTarget.Popover) diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx index 4b896c1cdb989..60340f4e0819c 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx @@ -86,64 +86,62 @@ export function Notebook({ setContainerSize(size as 'small' | 'medium') }, [size]) - // TODO - Render a special state if the notebook is empty - - if (conflictWarningVisible) { - return - } else if (!notebook && notebookLoading) { - return - } else if (notebookMissing) { - return - } - return ( -
- {isTemplate && ( - - This is a template. You can create a copy of it to edit and use as your own. - - )} - - - {shortId === SCRATCHPAD_NOTEBOOK.short_id ? ( - - This is your scratchpad. It is only visible to you and is persisted only in this browser. It's a - great place to gather ideas before turning into a saved Notebook! - - ) : null} - -
- - - - - + {conflictWarningVisible ? ( + + ) : !notebook && notebookLoading ? ( + + ) : notebookMissing ? ( + + ) : ( +
+ {isTemplate && ( + + This is a template. You can create a copy of it to edit and use as your own. + + )} + + + {shortId === SCRATCHPAD_NOTEBOOK.short_id ? ( + + This is your scratchpad. It is only visible to you and is persisted only in this browser. + It's a great place to gather ideas before turning into a saved Notebook! + + ) : null} + +
+ + + + + +
-
+ )} ) } diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index c76a7adb6657d..0ace283e8b22c 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -1,20 +1,8 @@ import { lemonToast } from '@posthog/lemon-ui' -import { - actions, - beforeUnmount, - BuiltLogic, - connect, - kea, - key, - listeners, - path, - props, - reducers, - selectors, - sharedListeners, -} from 'kea' +import { actions, beforeUnmount, BuiltLogic, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' +import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { downloadFile, slugify } from 'lib/utils' import posthog from 'posthog-js' @@ -34,6 +22,7 @@ import type { notebookLogicType } from './notebookLogicType' import { EditorRange, JSONContent, NotebookEditor } from './utils' const SYNC_DELAY = 1000 +const NOTEBOOK_REFRESH_MS = window.location.origin === 'http://localhost:8000' ? 5000 : 30000 export type NotebookLogicMode = 'notebook' | 'canvas' @@ -81,6 +70,7 @@ export const notebookLogic = kea([ setPreviewContent: (jsonContent: JSONContent) => ({ jsonContent }), clearPreviewContent: true, loadNotebook: true, + scheduleNotebookRefresh: true, saveNotebook: (notebook: Pick) => ({ notebook }), renameNotebook: (title: string) => ({ title }), setEditingNodeId: (editingNodeId: string | null) => ({ editingNodeId }), @@ -221,8 +211,14 @@ export const notebookLogic = kea([ } } else { try { - response = await api.notebooks.get(props.shortId) + response = await api.notebooks.get(props.shortId, undefined, { + 'If-None-Match': values.notebook?.version, + }) } catch (e: any) { + if (e.status === 304) { + // Indicates nothing has changed + return values.notebook + } if (e.status === 404) { return null } @@ -232,7 +228,7 @@ export const notebookLogic = kea([ const notebook = migrate(response) - if (!values.notebook && notebook.content) { + if (notebook.content && (!values.notebook || values.notebook.version !== notebook.version)) { // If this is the first load we need to override the content fully values.editor?.setContent(notebook.content) } @@ -421,15 +417,7 @@ export const notebookLogic = kea([ (shouldBeEditable, previewContent) => shouldBeEditable && !previewContent, ], }), - sharedListeners(({ values, actions }) => ({ - onNotebookChange: () => { - // Keep the list logic up to date with any changes - if (values.notebook && values.notebook.short_id !== SCRATCHPAD_NOTEBOOK.short_id) { - actions.receiveNotebookUpdate(values.notebook) - } - }, - })), - listeners(({ values, actions, sharedListeners, cache }) => ({ + listeners(({ values, actions, cache }) => ({ insertAfterLastNode: async ({ content }) => { await runWhenEditorIsReady( () => !!values.editor, @@ -560,8 +548,8 @@ export const notebookLogic = kea([ values.editor?.setContent(values.content) }, - saveNotebookSuccess: sharedListeners.onNotebookChange, - loadNotebookSuccess: sharedListeners.onNotebookChange, + saveNotebookSuccess: actions.scheduleNotebookRefresh, + loadNotebookSuccess: actions.scheduleNotebookRefresh, exportJSON: () => { const file = new File( @@ -592,6 +580,24 @@ export const notebookLogic = kea([ values.editor?.setTextSelection(selection) }) }, + + scheduleNotebookRefresh: () => { + clearTimeout(cache.refreshTimeout) + cache.refreshTimeout = setTimeout(() => { + actions.loadNotebook() + }, NOTEBOOK_REFRESH_MS) + }, + })), + + subscriptions(({ actions }) => ({ + notebook: (notebook?: NotebookType) => { + // Keep the list logic up to date with any changes + if (notebook && notebook.short_id !== SCRATCHPAD_NOTEBOOK.short_id) { + actions.receiveNotebookUpdate(notebook) + } + // If the notebook ever changes, we want to reset the scheduled refresh + actions.scheduleNotebookRefresh() + }, })), urlToAction(({ values, actions, cache }) => ({ @@ -606,7 +612,8 @@ export const notebookLogic = kea([ }, })), - beforeUnmount(() => { + beforeUnmount(({ cache }) => { + clearTimeout(cache.refreshTimeout) const hashParams = router.values.currentLocation.hashParams delete hashParams['🦔'] router.actions.replace( diff --git a/frontend/src/scenes/notebooks/Suggestions/ReplayTimestamp.tsx b/frontend/src/scenes/notebooks/Suggestions/ReplayTimestamp.tsx index 451dd067d7d9c..433ad93190e2b 100644 --- a/frontend/src/scenes/notebooks/Suggestions/ReplayTimestamp.tsx +++ b/frontend/src/scenes/notebooks/Suggestions/ReplayTimestamp.tsx @@ -38,7 +38,8 @@ const Component = ({ previousNode, editor }: InsertionSuggestionViewProps): JSX. insertTimestamp({ previousNode, editor })} > diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx index b43c7040f3ee2..6b8f14a78cc20 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx @@ -88,7 +88,6 @@ const meta: Meta = { const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] }, - // without the session-recording-blob-replay feature flag, we only load via ClickHouse '/api/projects/:team/session_recordings/:id/snapshots': (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx index fbb0e44330e07..f520ab947b815 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx @@ -89,7 +89,6 @@ const meta: Meta = { const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] }, - // without the session-recording-blob-replay feature flag, we only load via ClickHouse '/api/projects/:team/session_recordings/:id/snapshots': (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index c360a172388c6..0f6b87d473200 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -4,7 +4,9 @@ import { captureException } from '@sentry/react' import { actions, connect, defaults, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' +import { FEATURE_FLAGS } from 'lib/constants' import { Dayjs, dayjs } from 'lib/dayjs' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { toParams } from 'lib/utils' import { chainToElements } from 'lib/utils/elements-chain' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' @@ -41,9 +43,10 @@ let postHogEEModule: PostHogEE const parseEncodedSnapshots = async ( items: (EncodedRecordingSnapshot | string)[], - sessionId: string + sessionId: string, + withMobileTransformer: boolean ): Promise => { - if (!postHogEEModule) { + if (!postHogEEModule && withMobileTransformer) { postHogEEModule = await posthogEE() } return items.flatMap((l) => { @@ -167,6 +170,7 @@ export const sessionRecordingDataLogic = kea([ key(({ sessionRecordingId }) => sessionRecordingId || 'no-session-recording-id'), connect({ logic: [eventUsageLogic], + values: [featureFlagLogic, ['featureFlags']], }), defaults({ sessionPlayerMetaData: null as SessionRecordingType | null, @@ -342,7 +346,11 @@ export const sessionRecordingDataLogic = kea([ ) data.snapshots = prepareRecordingSnapshots( - await parseEncodedSnapshots(encodedResponse, props.sessionRecordingId), + await parseEncodedSnapshots( + encodedResponse, + props.sessionRecordingId, + !!values.featureFlags[FEATURE_FLAGS.SESSION_REPLAY_MOBILE] + ), values.sessionPlayerSnapshotData?.snapshots ?? [] ) } else { @@ -354,7 +362,11 @@ export const sessionRecordingDataLogic = kea([ const response = await api.recordings.listSnapshots(props.sessionRecordingId, params) if (response.snapshots) { data.snapshots = prepareRecordingSnapshots( - await parseEncodedSnapshots(response.snapshots, props.sessionRecordingId), + await parseEncodedSnapshots( + response.snapshots, + props.sessionRecordingId, + !!values.featureFlags[FEATURE_FLAGS.SESSION_REPLAY_MOBILE] + ), values.sessionPlayerSnapshotData?.snapshots ?? [] ) } diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss index 4adf1069a4ee5..e72a2fc3cc1c7 100644 --- a/frontend/src/styles/global.scss +++ b/frontend/src/styles/global.scss @@ -548,6 +548,8 @@ body { &.posthog-3000 { --shadow-elevation: var(--shadow-elevation-3000); + --primary: var(--primary-3000); + --primary-highlight: var(--primary-3000-highlight); overflow: hidden; // Each area handles scrolling individually (e.g. navbar, scene, side panel) background: var(--bg-3000); diff --git a/frontend/src/types.ts b/frontend/src/types.ts index b7687676f72f2..71c748ba01325 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -3273,6 +3273,7 @@ export interface DataWarehouseTable { url_pattern: string credential: DataWarehouseCredential columns: DatabaseSchemaQueryResponseField[] + external_data_source?: ExternalDataStripeSource } export type DataWarehouseTableTypes = 'CSV' | 'Parquet' @@ -3307,6 +3308,8 @@ export interface ExternalDataStripeSource { connection_id: string status: string source_type: string + prefix: string + last_run_at?: Dayjs } export type BatchExportDestinationS3 = { diff --git a/latest_migrations.manifest b/latest_migrations.manifest index d4254016dde73..80c31c99f449a 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0370_cohort_query +posthog: 0371_cohort_query sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/package.json b/package.json index 48bf5eeb00972..5dda490a5e6f4 100644 --- a/package.json +++ b/package.json @@ -140,7 +140,7 @@ "monaco-editor": "^0.39.0", "papaparse": "^5.4.1", "pmtiles": "^2.11.0", - "posthog-js": "1.93.1", + "posthog-js": "1.93.6", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/src/main/graphile-worker/graphile-worker.ts b/plugin-server/src/main/graphile-worker/graphile-worker.ts index 9ba5a9724a318..041819a8fa86b 100644 --- a/plugin-server/src/main/graphile-worker/graphile-worker.ts +++ b/plugin-server/src/main/graphile-worker/graphile-worker.ts @@ -94,10 +94,9 @@ export class GraphileWorker { await instrument( this.hub.statsd, { - metricName: 'job_queues_enqueue', + metricName: `job_queues_enqueue_${jobName}`, key: instrumentationContext?.key ?? '?', tag: instrumentationContext?.tag ?? '?', - tags: { jobName, type: jobType }, data: { timestamp: job.timestamp, type: jobType, payload: jobPayload }, }, enqueueFn diff --git a/plugin-server/src/utils/metrics.ts b/plugin-server/src/utils/metrics.ts index f7196f21cc156..5341b662944eb 100644 --- a/plugin-server/src/utils/metrics.ts +++ b/plugin-server/src/utils/metrics.ts @@ -1,5 +1,5 @@ import { StatsD, Tags } from 'hot-shots' -import { Histogram } from 'prom-client' +import { Summary } from 'prom-client' import { runInSpan } from '../sentry' import { UUID } from './utils' @@ -12,13 +12,7 @@ export function instrumentQuery( tag: string | undefined, runQuery: () => Promise ): Promise { - const end = dataStoreQueryDuration - .labels({ - query: metricName, - tag: tag ?? 'null', - }) - .startTimer() - const result = instrument( + return instrument( statsd, { metricName, @@ -27,8 +21,6 @@ export function instrumentQuery( }, runQuery ) - end() - return result } export function instrument( @@ -37,12 +29,11 @@ export function instrument( metricName: string key?: string tag?: string - tags?: Tags data?: any }, runQuery: () => Promise ): Promise { - const tags: Tags | undefined = options.key ? { ...options.tags, [options.key]: options.tag! } : options.tags + const tags: Tags = options.key ? { [options.key]: options.tag! } : {} return runInSpan( { op: options.metricName, @@ -56,6 +47,9 @@ export function instrument( return await runQuery() } finally { statsd?.timing(options.metricName, timer, tags) + instrumentedFnSummary + .labels(options.metricName, String(options.key ?? 'null'), String(options.tag ?? 'null')) + .observe(Date.now() - timer.getTime()) } } ) @@ -76,8 +70,9 @@ export function captureEventLoopMetrics(statsd: StatsD, instanceId: UUID): StopC } } -export const dataStoreQueryDuration = new Histogram({ - name: 'data_store_query_duration', - help: 'Query latency to data stores, per query and tag', - labelNames: ['query', 'tag'], +const instrumentedFnSummary = new Summary({ + name: 'instrumented_fn_duration_ms', + help: 'Duration of instrumented functions', + labelNames: ['metricName', 'key', 'tag'], + percentiles: [0.5, 0.9, 0.95, 0.99], }) diff --git a/plugin-server/src/utils/retries.ts b/plugin-server/src/utils/retries.ts index 59ee55552f6b2..fe7fe22f932a4 100644 --- a/plugin-server/src/utils/retries.ts +++ b/plugin-server/src/utils/retries.ts @@ -1,5 +1,4 @@ import { RetryError } from '@posthog/plugin-scaffold' -import { Counter } from 'prom-client' import { runInTransaction } from '../sentry' import { Hub } from '../types' @@ -63,12 +62,6 @@ export type RetriableFunctionPayload = RetriableFunctionDefinition & Partial & MetricsDefinition & { hub: Hub } -const retryableFnCounter = new Counter({ - name: 'retryable_fn_status', - help: 'Number of times a retriable function status changed', - labelNames: ['status', 'name'], -}) - function iterateRetryLoop(retriableFunctionPayload: RetriableFunctionPayload, attempt = 1): Promise { const { metricName, @@ -112,8 +105,6 @@ function iterateRetryLoop(retriableFunctionPayload: RetriableFunctionPayload, at } if (error instanceof RetryError && attempt < maxAttempts) { const nextRetryMs = getNextRetryMs(retryBaseMs, retryMultiplier, attempt) - hub.statsd?.increment(`${metricName}.RETRY`) - retryableFnCounter.labels({ name: metricName, status: 'retry' }).inc() nextIterationPromise = new Promise((resolve, reject) => setTimeout(() => { // This is not awaited directly so that attempts beyond the first one don't stall the payload queue @@ -126,8 +117,6 @@ function iterateRetryLoop(retriableFunctionPayload: RetriableFunctionPayload, at await hub.promiseManager.awaitPromisesIfNeeded() } else { await catchFn?.(error) - hub.statsd?.increment(`${metricName}.ERROR`) - retryableFnCounter.labels({ name: metricName, status: 'error' }).inc() if (appMetric) { await hub.appMetrics.queueError( { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5ecf0f3ef8100..c197b0fc76021 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true @@ -222,8 +222,8 @@ dependencies: specifier: ^2.11.0 version: 2.11.0 posthog-js: - specifier: 1.93.1 - version: 1.93.1 + specifier: 1.93.6 + version: 1.93.6 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -324,7 +324,7 @@ dependencies: optionalDependencies: fsevents: specifier: ^2.3.2 - version: 2.3.2 + version: 2.3.3 devDependencies: '@babel/core': @@ -644,7 +644,7 @@ devDependencies: version: 7.5.1 storybook-addon-pseudo-states: specifier: 2.1.2 - version: 2.1.2(@storybook/components@7.5.1)(@storybook/core-events@7.5.1)(@storybook/manager-api@7.5.3)(@storybook/preview-api@7.5.3)(@storybook/theming@7.5.1)(react-dom@18.2.0)(react@18.2.0) + version: 2.1.2(@storybook/components@7.5.1)(@storybook/core-events@7.5.1)(@storybook/manager-api@7.6.3)(@storybook/preview-api@7.6.3)(@storybook/theming@7.5.1)(react-dom@18.2.0)(react@18.2.0) style-loader: specifier: ^2.0.0 version: 2.0.0(webpack@5.88.2) @@ -1023,6 +1023,14 @@ packages: dependencies: '@babel/types': 7.23.4 + /@babel/parser@7.23.5: + resolution: {integrity: sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.23.5 + dev: true + /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10): resolution: {integrity: sha512-NP1M5Rf+u2Gw9qfSO4ihjcTGW5zXTi36ITLd4/EoAcEhIZ0yjMqmftDNl3QC19CX7olhrjpyU454g/2W7X0jvQ==} engines: {node: '>=6.9.0'} @@ -2145,6 +2153,15 @@ packages: '@babel/helper-validator-identifier': 7.22.20 to-fast-properties: 2.0.0 + /@babel/types@7.23.5: + resolution: {integrity: sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.23.4 + '@babel/helper-validator-identifier': 7.22.20 + to-fast-properties: 2.0.0 + dev: true + /@base2/pretty-print-object@1.0.1: resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} dev: true @@ -4728,6 +4745,17 @@ packages: tiny-invariant: 1.3.1 dev: true + /@storybook/channels@7.6.3: + resolution: {integrity: sha512-o9J0TBbFon16tUlU5V6kJgzAlsloJcS1cTHWqh3VWczohbRm+X1PLNUihJ7Q8kBWXAuuJkgBu7RQH7Ib46WyYg==} + dependencies: + '@storybook/client-logger': 7.6.3 + '@storybook/core-events': 7.6.3 + '@storybook/global': 5.0.0 + qs: 6.11.2 + telejson: 7.2.0 + tiny-invariant: 1.3.1 + dev: true + /@storybook/cli@7.5.1: resolution: {integrity: sha512-qKIJs8gqXTy0eSEbt0OW5nsJqiV/2+N1eWoiBiIxoZ+8b0ACXIAUcE/N6AsEDUqIq8AMK7lebqjEfIAt2Sp7Mg==} hasBin: true @@ -4792,6 +4820,12 @@ packages: '@storybook/global': 5.0.0 dev: true + /@storybook/client-logger@7.6.3: + resolution: {integrity: sha512-BpsCnefrBFdxD6ukMjAblm1D6zB4U5HR1I85VWw6LOqZrfzA6l/1uBxItz0XG96HTjngbvAabWf5k7ZFCx5UCg==} + dependencies: + '@storybook/global': 5.0.0 + dev: true + /@storybook/codemod@7.5.1: resolution: {integrity: sha512-PqHGOz/CZnRG9pWgshezCacu524CrXOJrCOwMUP9OMpH0Jk/NhBkHaBZrB8wMjn5hekTj0UmRa/EN8wJm9CCUQ==} dependencies: @@ -4886,6 +4920,12 @@ packages: ts-dedent: 2.2.0 dev: true + /@storybook/core-events@7.6.3: + resolution: {integrity: sha512-Vu3JX1mjtR8AX84lyqWsi2s2lhD997jKRWVznI3wx+UpTk8t7TTMLFk2rGYJRjaornhrqwvLYpnmtxRSxW9BOQ==} + dependencies: + ts-dedent: 2.2.0 + dev: true + /@storybook/core-server@7.5.1: resolution: {integrity: sha512-DD4BXCH91aZJoFuu0cQwG1ZUmE59kG5pazuE3S89zH1GwKS1jWyeAv4EwEfvynT5Ah1ctd8QdCZCSXVzjq0qcw==} dependencies: @@ -4987,6 +5027,12 @@ packages: type-fest: 2.19.0 dev: true + /@storybook/csf@0.1.2: + resolution: {integrity: sha512-ePrvE/pS1vsKR9Xr+o+YwdqNgHUyXvg+1Xjx0h9LrVx7Zq4zNe06pd63F5EvzTbCbJsHj7GHr9tkiaqm7U8WRA==} + dependencies: + type-fest: 2.19.0 + dev: true + /@storybook/docs-mdx@0.1.0: resolution: {integrity: sha512-JDaBR9lwVY4eSH5W8EGHrhODjygPd6QImRbwjAuJNEnY0Vw4ie3bPkeGfnacB3OBW6u/agqPv2aRlR46JcAQLg==} dev: true @@ -5034,29 +5080,27 @@ packages: ts-dedent: 2.2.0 dev: true - /@storybook/manager-api@7.5.3(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-d8mVLr/5BEG4bAS2ZeqYTy/aX4jPEpZHdcLaWoB4mAM+PAL9wcWsirUyApKtDVYLITJf/hd8bb2Dm2ok6E45gA==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + /@storybook/manager-api@7.6.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-soDH7GZuukkhYRGzlw4jhCm5EzjfkuIAtb37/DFplqxuVbvlyJEVzkMUM2KQO7kq0/8GlWPiZ5mn56wagYyhKQ==} dependencies: - '@storybook/channels': 7.5.3 - '@storybook/client-logger': 7.5.3 - '@storybook/core-events': 7.5.3 - '@storybook/csf': 0.1.1 + '@storybook/channels': 7.6.3 + '@storybook/client-logger': 7.6.3 + '@storybook/core-events': 7.6.3 + '@storybook/csf': 0.1.2 '@storybook/global': 5.0.0 - '@storybook/router': 7.5.3(react-dom@18.2.0)(react@18.2.0) - '@storybook/theming': 7.5.3(react-dom@18.2.0)(react@18.2.0) - '@storybook/types': 7.5.3 + '@storybook/router': 7.6.3 + '@storybook/theming': 7.6.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/types': 7.6.3 dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 - react: 18.2.0 - react-dom: 18.2.0(react@18.2.0) semver: 7.5.4 store2: 2.14.2 telejson: 7.2.0 ts-dedent: 2.2.0 + transitivePeerDependencies: + - react + - react-dom dev: true /@storybook/manager@7.5.1: @@ -5162,6 +5206,25 @@ packages: util-deprecate: 1.0.2 dev: true + /@storybook/preview-api@7.6.3: + resolution: {integrity: sha512-uPaK7yLE1P++F+IOb/1j9pgdCwfMYZrUPHogF/Mf9r4cfEjDCcIeKgGMcsbU1KnkzNQQGPh8JRzRr/iYnLjswg==} + dependencies: + '@storybook/channels': 7.6.3 + '@storybook/client-logger': 7.6.3 + '@storybook/core-events': 7.6.3 + '@storybook/csf': 0.1.2 + '@storybook/global': 5.0.0 + '@storybook/types': 7.6.3 + '@types/qs': 6.9.10 + dequal: 2.0.3 + lodash: 4.17.21 + memoizerific: 1.11.3 + qs: 6.11.2 + synchronous-promise: 2.0.17 + ts-dedent: 2.2.0 + util-deprecate: 1.0.2 + dev: true + /@storybook/preview@7.5.1: resolution: {integrity: sha512-nfZC103z9Cy27FrJKUr2IjDuVt8Mvn1Z5gZ0TtJihoK7sfLTv29nd/XU9zzrb/epM3o8UEzc63xZZsMaToDbAw==} dev: true @@ -5286,17 +5349,12 @@ packages: react-dom: 18.2.0(react@18.2.0) dev: true - /@storybook/router@7.5.3(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-/iNYCFore7R5n6eFHbBYoB0P2/sybTVpA+uXTNUd3UEt7Ro6CEslTaFTEiH2RVQwOkceBp/NpyWon74xZuXhMg==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + /@storybook/router@7.6.3: + resolution: {integrity: sha512-NZfhJqsXYca9mZCL/LGx6FmZDbrxX2S4ImW7Tqdtcc/sSlZ0BpCDkNUTesCA287cmoKMhXZRh/+bU+C2h2a+bw==} dependencies: - '@storybook/client-logger': 7.5.3 + '@storybook/client-logger': 7.6.3 memoizerific: 1.11.3 qs: 6.11.2 - react: 18.2.0 - react-dom: 18.2.0(react@18.2.0) dev: true /@storybook/source-loader@7.5.1(react-dom@18.2.0)(react@18.2.0): @@ -5387,14 +5445,14 @@ packages: react-dom: 18.2.0(react@18.2.0) dev: true - /@storybook/theming@7.5.3(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-Cjmthe1MAk0z4RKCZ7m72gAD8YD0zTAH97z5ryM1Qv84QXjiCQ143fGOmYz1xEQdNFpOThPcwW6FEccLHTkVcg==} + /@storybook/theming@7.6.3(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-9ToNU2LM6a2kVBjOXitXEeEOuMurVLhn+uaZO1dJjv8NGnJVYiLwNPwrLsImiUD8/XXNuil972aanBR6+Aj9jw==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: '@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@18.2.0) - '@storybook/client-logger': 7.5.3 + '@storybook/client-logger': 7.6.3 '@storybook/global': 5.0.0 memoizerific: 1.11.3 react: 18.2.0 @@ -5419,6 +5477,15 @@ packages: file-system-cache: 2.3.0 dev: true + /@storybook/types@7.6.3: + resolution: {integrity: sha512-vj9Jzg5eR52l8O9512QywbQpNdo67Z6BQWR8QoZRcG+/Bhzt08YI8IZMPQLFMKzcmWDPK0blQ4GfyKDYplMjPA==} + dependencies: + '@storybook/channels': 7.6.3 + '@types/babel__core': 7.20.5 + '@types/express': 4.17.21 + file-system-cache: 2.3.0 + dev: true + /@sucrase/jest-plugin@3.0.0(jest@29.7.0)(sucrase@3.29.0): resolution: {integrity: sha512-VRY6YKYImVWiRg1H3Yu24hwB1UPJDSDR62R/n+lOHR3+yDrfHEIAoddJivblMYN6U3vD+ndfTSrecZ9Jl+iGNw==} peerDependencies: @@ -5969,6 +6036,16 @@ packages: '@types/babel__traverse': 7.20.4 dev: true + /@types/babel__core@7.20.5: + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + dependencies: + '@babel/parser': 7.23.5 + '@babel/types': 7.23.5 + '@types/babel__generator': 7.6.7 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.20.4 + dev: true + /@types/babel__generator@7.6.6: resolution: {integrity: sha512-66BXMKb/sUWbMdBNdMvajU7i/44RkrA3z/Yt1c7R5xejt8qh84iU54yUWCtm0QwGJlDcf/gg4zd/x4mpLAlb/w==} dependencies: @@ -11210,6 +11287,7 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true + dev: true optional: true /fsevents@2.3.3: @@ -15510,8 +15588,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.93.1: - resolution: {integrity: sha512-tbzxNN86zqC/D/HEMi4dJgW4GmiUHmUbBJ+zHFKABXxeS53SVzuJKkQxeNL9GdfpS9304i2D7ALsXoJ8pszAvw==} + /posthog-js@1.93.6: + resolution: {integrity: sha512-Jou2awqQH5PDtDe9hZxpe9PO4D0uUOU7eA1lGni4pBxJckeA94P7WF3ieQj355xY96ZcXVQmGJykypO/FyBvpQ==} dependencies: fflate: 0.4.8 dev: false @@ -17713,7 +17791,7 @@ packages: resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} dev: true - /storybook-addon-pseudo-states@2.1.2(@storybook/components@7.5.1)(@storybook/core-events@7.5.1)(@storybook/manager-api@7.5.3)(@storybook/preview-api@7.5.3)(@storybook/theming@7.5.1)(react-dom@18.2.0)(react@18.2.0): + /storybook-addon-pseudo-states@2.1.2(@storybook/components@7.5.1)(@storybook/core-events@7.5.1)(@storybook/manager-api@7.6.3)(@storybook/preview-api@7.6.3)(@storybook/theming@7.5.1)(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-AHv6q1JiQEUnMyZE3729iV6cNmBW7bueeytc4Lga4+8W1En8YNea5VjqAdrDNJhXVU0QEEIGtxkD3EoC9aVWLw==} peerDependencies: '@storybook/components': ^7.4.6 @@ -17731,8 +17809,8 @@ packages: dependencies: '@storybook/components': 7.5.1(@types/react-dom@18.2.14)(@types/react@17.0.52)(react-dom@18.2.0)(react@18.2.0) '@storybook/core-events': 7.5.1 - '@storybook/manager-api': 7.5.3(react-dom@18.2.0)(react@18.2.0) - '@storybook/preview-api': 7.5.3 + '@storybook/manager-api': 7.6.3(react-dom@18.2.0)(react@18.2.0) + '@storybook/preview-api': 7.6.3 '@storybook/theming': 7.5.1(react-dom@18.2.0)(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 4781523e059f2..dd122a8c0dd7c 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -12,7 +12,9 @@ extend_schema_view, OpenApiExample, ) -from rest_framework import request, serializers, viewsets +from rest_framework import serializers, viewsets +from rest_framework.request import Request +from rest_framework.response import Response from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -253,7 +255,7 @@ def get_queryset(self) -> QuerySet: return queryset - def _filter_request(self, request: request.Request, queryset: QuerySet) -> QuerySet: + def _filter_request(self, request: Request, queryset: QuerySet) -> QuerySet: filters = request.GET.dict() for key in filters: @@ -329,8 +331,17 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query return queryset + def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: + instance = self.get_object() + serializer = self.get_serializer(instance) + + if str(request.headers.get("If-None-Match")) == str(instance.version): + return Response(None, 304) + + return Response(serializer.data) + @action(methods=["GET"], url_path="activity", detail=False) - def all_activity(self, request: request.Request, **kwargs): + def all_activity(self, request: Request, **kwargs): limit = int(request.query_params.get("limit", "10")) page = int(request.query_params.get("page", "1")) @@ -338,7 +349,7 @@ def all_activity(self, request: request.Request, **kwargs): return activity_page_response(activity_page, limit, page, request) @action(methods=["GET"], url_path="activity", detail=True) - def activity(self, request: request.Request, **kwargs): + def activity(self, request: Request, **kwargs): notebook = self.get_object() limit = int(request.query_params.get("limit", "10")) page = int(request.query_params.get("page", "1")) diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py index 1ef831bde1b82..48339aa38bad9 100644 --- a/posthog/api/services/query.py +++ b/posthog/api/services/query.py @@ -5,6 +5,7 @@ from rest_framework.exceptions import ValidationError from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.constants import LimitContext from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.modifiers import create_default_modifiers_for_team @@ -54,7 +55,7 @@ def _unwrap_pydantic_dict(response: Any) -> Dict: def process_query( team: Team, query_json: Dict, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, refresh_requested: Optional[bool] = False, ) -> Dict: # query_json has been parsed by QuerySchemaParser @@ -63,10 +64,10 @@ def process_query( tag_queries(query=query_json) if query_kind in QUERY_WITH_RUNNER: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + query_runner = get_query_runner(query_json, team, limit_context=limit_context) return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + query_runner = get_query_runner(query_json, team, limit_context=limit_context) return _unwrap_pydantic_dict(query_runner.calculate()) elif query_kind == "HogQLMetadata": metadata_query = HogQLMetadata.model_validate(query_json) diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py index a82b8aef4062d..0325765001be0 100644 --- a/posthog/api/test/notebooks/test_notebook.py +++ b/posthog/api/test/notebooks/test_notebook.py @@ -229,3 +229,17 @@ def test_patching_does_not_leak_between_teams(self) -> None: data={"content": {"something": "here"}}, ) assert response.status_code == status.HTTP_403_FORBIDDEN + + def test_responds_not_modified_if_versions_match(self) -> None: + response = self.client.post( + f"/api/projects/{self.team.id}/notebooks", + data={"content": {}, "text_content": ""}, + ) + assert response.status_code == status.HTTP_201_CREATED + + response = self.client.get( + f"/api/projects/{self.team.id}/notebooks/{response.json()['short_id']}", + HTTP_IF_NONE_MATCH=response.json()["version"], + ) + + assert response.status_code == status.HTTP_304_NOT_MODIFIED diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index ff03704605014..d538e5a241cdf 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -6,6 +6,7 @@ from rest_framework import status from posthog.api.services.query import process_query +from posthog.hogql.query import LimitContext from posthog.models.property_definition import PropertyDefinition, PropertyType from posthog.models.utils import UUIDT from posthog.schema import ( @@ -611,7 +612,7 @@ def test_full_hogql_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEFA "kind": "HogQLQuery", "query": f"select event from events where distinct_id='{random_uuid}'", }, - in_export_context=True, # This is the only difference + limit_context=LimitContext.EXPORT, # This is the only difference ) self.assertEqual(len(response.get("results", [])), 15) @@ -663,7 +664,7 @@ def test_full_events_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEF "select": ["event"], "where": [f"distinct_id = '{random_uuid}'"], }, - in_export_context=True, + limit_context=LimitContext.EXPORT, ) self.assertEqual(len(response.get("results", [])), 15) diff --git a/posthog/celery.py b/posthog/celery.py index 356a77c385f5b..d1804524760ac 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -150,12 +150,12 @@ def setup_periodic_tasks(sender: Celery, **kwargs): # Send all instance usage to the Billing service # Sends later on Sunday due to clickhouse things that happen on Sunday at ~00:00 UTC sender.add_periodic_task( - crontab(hour="2", minute="5", day_of_week="sun"), + crontab(hour="2", minute="15", day_of_week="mon"), send_org_usage_reports.s(), name="send instance usage report", ) sender.add_periodic_task( - crontab(hour="0", minute="5", day_of_week="mon,tue,wed,thu,fri,sat"), + crontab(hour="0", minute="15", day_of_week="tue,wed,thu,fri,sat,sun"), send_org_usage_reports.s(), name="send instance usage report", ) @@ -402,7 +402,7 @@ def redis_heartbeat(): @app.task(ignore_result=True, bind=True) -def process_query_task(self, team_id, query_id, query_json, in_export_context=False, refresh_requested=False): +def process_query_task(self, team_id, query_id, query_json, limit_context=None, refresh_requested=False): """ Kick off query Once complete save results to redis @@ -413,7 +413,7 @@ def process_query_task(self, team_id, query_id, query_json, in_export_context=Fa team_id=team_id, query_id=query_id, query_json=query_json, - in_export_context=in_export_context, + limit_context=limit_context, refresh_requested=refresh_requested, ) diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 211c685a068b1..9be449596fdf0 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -9,6 +9,7 @@ from posthog import celery, redis from posthog.celery import process_query_task from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.constants import LimitContext from posthog.schema import QueryStatus logger = structlog.get_logger(__name__) @@ -69,7 +70,7 @@ def execute_process_query( team_id, query_id, query_json, - in_export_context, + limit_context, refresh_requested, ): manager = QueryStatusManager(query_id, team_id) @@ -90,7 +91,7 @@ def execute_process_query( try: tag_queries(client_query_id=query_id, team_id=team_id) results = process_query( - team=team, query_json=query_json, in_export_context=in_export_context, refresh_requested=refresh_requested + team=team, query_json=query_json, limit_context=limit_context, refresh_requested=refresh_requested ) logger.info("Got results for team %s query %s", team_id, query_id) query_status.complete = True @@ -135,10 +136,12 @@ def enqueue_process_query_task( if bypass_celery: # Call directly ( for testing ) - process_query_task(team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested) + process_query_task( + team_id, query_id, query_json, limit_context=LimitContext.EXPORT, refresh_requested=refresh_requested + ) else: task = process_query_task.delay( - team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested + team_id, query_id, query_json, limit_context=LimitContext.EXPORT, refresh_requested=refresh_requested ) query_status.task_id = task.id manager.store_query_status(query_status) diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 0a2806ca99878..8ea5670321267 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,4 +1,5 @@ from datetime import date, datetime +from enum import Enum from typing import Optional, Literal, TypeAlias, Tuple, List from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -32,6 +33,11 @@ MAX_SELECT_RETURNED_ROWS = 10000 # sync with CSV_EXPORT_LIMIT +class LimitContext(Enum): + QUERY = "query" + EXPORT = "export" + + # Settings applied at the SELECT level class HogQLQuerySettings(BaseModel): model_config = ConfigDict(extra="forbid") diff --git a/posthog/hogql/modifiers.py b/posthog/hogql/modifiers.py index 8884f197afcf6..fd49ba2bc270c 100644 --- a/posthog/hogql/modifiers.py +++ b/posthog/hogql/modifiers.py @@ -1,12 +1,14 @@ -from typing import Optional +from typing import Optional, TYPE_CHECKING -from posthog.models import Team from posthog.schema import HogQLQueryModifiers, MaterializationMode from posthog.utils import PersonOnEventsMode +if TYPE_CHECKING: + from posthog.models import Team + def create_default_modifiers_for_team( - team: Team, modifiers: Optional[HogQLQueryModifiers] = None + team: "Team", modifiers: Optional[HogQLQueryModifiers] = None ) -> HogQLQueryModifiers: if modifiers is None: modifiers = HogQLQueryModifiers() diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index 751b9fb46b860..8ca5f5b582ab1 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -3,7 +3,7 @@ from posthog.clickhouse.client.connection import Workload from posthog.errors import ExposedCHQueryError from posthog.hogql import ast -from posthog.hogql.constants import HogQLGlobalSettings +from posthog.hogql.constants import HogQLGlobalSettings, LimitContext from posthog.hogql.errors import HogQLException from posthog.hogql.hogql import HogQLContext from posthog.hogql.modifiers import create_default_modifiers_for_team @@ -34,7 +34,7 @@ def execute_hogql_query( workload: Workload = Workload.ONLINE, settings: Optional[HogQLGlobalSettings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, timings: Optional[HogQLTimings] = None, explain: Optional[bool] = False, ) -> HogQLQueryResponse: @@ -80,7 +80,7 @@ def execute_hogql_query( if one_query.limit is None: # One more "max" of MAX_SELECT_RETURNED_ROWS (10k) in applied in the query printer. one_query.limit = ast.Constant( - value=MAX_SELECT_RETURNED_ROWS if in_export_context else DEFAULT_RETURNED_ROWS + value=MAX_SELECT_RETURNED_ROWS if limit_context == LimitContext.EXPORT else DEFAULT_RETURNED_ROWS ) # Get printed HogQL query, and returned columns. Using a cloned query. @@ -122,7 +122,7 @@ def execute_hogql_query( ) settings = settings or HogQLGlobalSettings() - if in_export_context: + if limit_context == LimitContext.EXPORT: settings.max_execution_time = EXPORT_CONTEXT_MAX_EXECUTION_TIME # Print the ClickHouse SQL query diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index e7ec26a441ded..f9ee10c648f25 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -12,7 +12,7 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import action_to_expr, has_aggregation, property_to_expr -from posthog.hogql.query import execute_hogql_query +from posthog.hogql.query import execute_hogql_query, LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner from posthog.models import Action, Person @@ -187,7 +187,7 @@ def calculate(self) -> EventsQueryResponse: query_type="EventsQuery", timings=self.timings, modifiers=self.modifiers, - in_export_context=self.in_export_context, + limit_context=self.limit_context, ) # Convert star field from tuple to dict in each result @@ -265,7 +265,7 @@ def limit(self) -> int: return ( min( MAX_SELECT_RETURNED_ROWS, - (MAX_SELECT_RETURNED_ROWS if self.in_export_context else DEFAULT_RETURNED_ROWS) + (MAX_SELECT_RETURNED_ROWS if self.limit_context == LimitContext.EXPORT else DEFAULT_RETURNED_ROWS) if self.query.limit is None else self.query.limit, ) diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index a79e875d14a73..1a6bcc89c730c 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -49,7 +49,7 @@ def calculate(self) -> HogQLQueryResponse: team=self.team, workload=Workload.ONLINE, timings=self.timings, - in_export_context=self.in_export_context, + limit_context=self.limit_context, explain=bool(self.query.explain), ) diff --git a/posthog/hogql_queries/insights/insight_persons_query_runner.py b/posthog/hogql_queries/insights/insight_persons_query_runner.py index 51cf792346992..e0681bc5af08a 100644 --- a/posthog/hogql_queries/insights/insight_persons_query_runner.py +++ b/posthog/hogql_queries/insights/insight_persons_query_runner.py @@ -16,7 +16,7 @@ class InsightPersonsQueryRunner(QueryRunner): @cached_property def source_runner(self) -> QueryRunner: - return get_query_runner(self.query.source, self.team, self.timings, self.in_export_context) + return get_query_runner(self.query.source, self.team, self.timings, self.limit_context) def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: if isinstance(self.source_runner, LifecycleQueryRunner): diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 76f204c8a310f..eb450338a446a 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -51,9 +51,9 @@ def __init__( team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = None, + limit_context: Optional[bool] = None, ): - super().__init__(query, team=team, timings=timings, modifiers=modifiers, in_export_context=in_export_context) + super().__init__(query, team=team, timings=timings, modifiers=modifiers, limit_context=limit_context) self.series = self.setup_series() def _is_stale(self, cached_result_package): diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 1f2b0d43ad743..ed08a9fcbb34e 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -9,6 +9,7 @@ from posthog.clickhouse.query_tagging import tag_queries from posthog.hogql import ast +from posthog.hogql.constants import LimitContext from posthog.hogql.context import HogQLContext from posthog.hogql.printer import print_ast from posthog.hogql.query import create_default_modifiers_for_team @@ -88,7 +89,7 @@ def get_query_runner( query: Dict[str, Any] | RunnableQueryNode | BaseModel, team: Team, timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, modifiers: Optional[HogQLQueryModifiers] = None, ) -> "QueryRunner": kind = None @@ -106,7 +107,7 @@ def get_query_runner( query=cast(LifecycleQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "TrendsQuery": @@ -116,7 +117,7 @@ def get_query_runner( query=cast(TrendsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "EventsQuery": @@ -126,7 +127,7 @@ def get_query_runner( query=cast(EventsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "PersonsQuery": @@ -136,7 +137,7 @@ def get_query_runner( query=cast(PersonsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "InsightPersonsQuery": @@ -146,7 +147,7 @@ def get_query_runner( query=cast(InsightPersonsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "HogQLQuery": @@ -156,7 +157,7 @@ def get_query_runner( query=cast(HogQLQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "SessionsTimelineQuery": @@ -190,7 +191,7 @@ class QueryRunner(ABC): team: Team timings: HogQLTimings modifiers: HogQLQueryModifiers - in_export_context: bool + limit_context: LimitContext def __init__( self, @@ -198,11 +199,11 @@ def __init__( team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, ): self.team = team self.timings = timings or HogQLTimings() - self.in_export_context = in_export_context or False + self.limit_context = limit_context or LimitContext.QUERY self.modifiers = create_default_modifiers_for_team(team, modifiers) if isinstance(query, self.query_type): self.query = query # type: ignore @@ -216,7 +217,7 @@ def calculate(self) -> BaseModel: raise NotImplementedError() def run(self, refresh_requested: Optional[bool] = None) -> CachedQueryResponse: - cache_key = self._cache_key() + ("_export" if self.in_export_context else "") + cache_key = self._cache_key() + ("_export" if self.limit_context == LimitContext.EXPORT else "") tag_queries(cache_key=cache_key) if not refresh_requested: diff --git a/posthog/migrations/0370_externaldatajob_workflow_id.py b/posthog/migrations/0370_externaldatajob_workflow_id.py new file mode 100644 index 0000000000000..e81ce145b4f6a --- /dev/null +++ b/posthog/migrations/0370_externaldatajob_workflow_id.py @@ -0,0 +1,17 @@ +# Generated by Django 3.2.19 on 2023-12-04 02:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0369_user_theme_mode"), + ] + + operations = [ + migrations.AddField( + model_name="externaldatajob", + name="workflow_id", + field=models.CharField(blank=True, max_length=400, null=True), + ), + ] diff --git a/posthog/migrations/0370_cohort_query.py b/posthog/migrations/0371_cohort_query.py similarity index 85% rename from posthog/migrations/0370_cohort_query.py rename to posthog/migrations/0371_cohort_query.py index 0105655e4365a..3beba9ce4da46 100644 --- a/posthog/migrations/0370_cohort_query.py +++ b/posthog/migrations/0371_cohort_query.py @@ -5,7 +5,7 @@ class Migration(migrations.Migration): dependencies = [ - ("posthog", "0369_user_theme_mode"), + ("posthog", "0370_externaldatajob_workflow_id"), ] operations = [ diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 8f6fffd0c9f90..c0dc99ff436fc 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -19,6 +19,7 @@ EXPORT_TIMER, ) from ...constants import CSV_EXPORT_LIMIT +from ...hogql.query import LimitContext logger = structlog.get_logger(__name__) @@ -184,7 +185,7 @@ def _export_to_csv(exported_asset: ExportedAsset, limit: int = 1000) -> None: if resource.get("source"): query = resource.get("source") - query_response = process_query(team=exported_asset.team, query_json=query, in_export_context=True) + query_response = process_query(team=exported_asset.team, query_json=query, limit_context=LimitContext.EXPORT) all_csv_rows = _convert_response_to_csv_data(query_response) else: diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index 28d188460c584..c29ec1a1f6b49 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -21,7 +21,7 @@ update_batch_export_backfill_status, update_batch_export_run_status, ) -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import ( get_export_finished_metric, get_export_started_metric, @@ -504,7 +504,7 @@ async def create_export_run(inputs: CreateBatchExportRunInputs) -> str: Intended to be used in all export workflows, usually at the start, to create a model instance to represent them in our database. """ - logger = await bind_batch_exports_logger(team_id=inputs.team_id) + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) logger.info( "Creating batch export for range %s - %s", inputs.data_interval_start, @@ -536,7 +536,7 @@ class UpdateBatchExportRunStatusInputs: @activity.defn async def update_export_run_status(inputs: UpdateBatchExportRunStatusInputs): """Activity that updates the status of an BatchExportRun.""" - logger = await bind_batch_exports_logger(team_id=inputs.team_id) + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) batch_export_run = await sync_to_async(update_batch_export_run_status)( run_id=uuid.UUID(inputs.id), @@ -574,7 +574,7 @@ async def create_batch_export_backfill_model(inputs: CreateBatchExportBackfillIn Intended to be used in all batch export backfill workflows, usually at the start, to create a model instance to represent them in our database. """ - logger = await bind_batch_exports_logger(team_id=inputs.team_id) + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) logger.info( "Creating historical export for batches in range %s - %s", inputs.start_at, @@ -608,7 +608,7 @@ async def update_batch_export_backfill_model_status(inputs: UpdateBatchExportBac backfill = await sync_to_async(update_batch_export_backfill_status)( backfill_id=uuid.UUID(inputs.id), status=inputs.status ) # type: ignore - logger = await bind_batch_exports_logger(team_id=backfill.team_id) + logger = await bind_temporal_worker_logger(team_id=backfill.team_id) if backfill.status == "Failed": logger.error("Historical export failed") diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index 4550e237bfcfd..b40a13bc83345 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -23,13 +23,13 @@ get_rows_count, ) from posthog.temporal.batch_exports.clickhouse import get_client -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) from posthog.temporal.common.utils import ( - HeartbeatDetails, + BatchExportHeartbeatDetails, should_resume_from_activity_heartbeat, ) @@ -63,7 +63,7 @@ async def create_table_in_bigquery( @dataclasses.dataclass -class BigQueryHeartbeatDetails(HeartbeatDetails): +class BigQueryHeartbeatDetails(BatchExportHeartbeatDetails): """The BigQuery batch export details included in every heartbeat.""" pass @@ -114,7 +114,7 @@ def bigquery_client(inputs: BigQueryInsertInputs): @activity.defn async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs): """Activity streams data from ClickHouse to BigQuery.""" - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="BigQuery") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="BigQuery") logger.info( "Exporting batch %s - %s", inputs.data_interval_start, diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py index 3ebc51c4ee6a2..3429556694d04 100644 --- a/posthog/temporal/batch_exports/postgres_batch_export.py +++ b/posthog/temporal/batch_exports/postgres_batch_export.py @@ -24,7 +24,7 @@ get_rows_count, ) from posthog.temporal.batch_exports.clickhouse import get_client -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, @@ -155,7 +155,7 @@ class PostgresInsertInputs: @activity.defn async def insert_into_postgres_activity(inputs: PostgresInsertInputs): """Activity streams data from ClickHouse to Postgres.""" - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="PostgreSQL") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="PostgreSQL") logger.info( "Exporting batch %s - %s", inputs.data_interval_start, diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index 57333351c1c12..9fc3a579d091f 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -23,7 +23,7 @@ get_rows_count, ) from posthog.temporal.batch_exports.clickhouse import get_client -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import get_rows_exported_metric from posthog.temporal.batch_exports.postgres_batch_export import ( PostgresInsertInputs, @@ -202,7 +202,7 @@ async def insert_into_redshift_activity(inputs: RedshiftInsertInputs): the Redshift-specific properties_data_type to indicate the type of JSON-like fields. """ - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="Redshift") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Redshift") logger.info( "Exporting batch %s - %s", inputs.data_interval_start, diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index f813b877ac4b5..d665b9b2a4528 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -25,7 +25,7 @@ get_rows_count, ) from posthog.temporal.batch_exports.clickhouse import get_client -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, @@ -307,7 +307,7 @@ class S3InsertInputs: async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]: """Initialize a S3MultiPartUpload and resume it from a hearbeat state if available.""" - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="S3") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="S3") key = get_s3_key(inputs) s3_upload = S3MultiPartUpload( @@ -370,7 +370,7 @@ async def insert_into_s3_activity(inputs: S3InsertInputs): runs, timing out after say 30 seconds or something and upload multiple files. """ - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="S3") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="S3") logger.info( "Exporting batch %s - %s", inputs.data_interval_start, diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py index c71e0907c1fe8..717674b78b4ac 100644 --- a/posthog/temporal/batch_exports/snowflake_batch_export.py +++ b/posthog/temporal/batch_exports/snowflake_batch_export.py @@ -26,13 +26,13 @@ get_rows_count, ) from posthog.temporal.batch_exports.clickhouse import get_client -from posthog.temporal.batch_exports.logger import bind_batch_exports_logger +from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, get_rows_exported_metric, ) from posthog.temporal.common.utils import ( - HeartbeatDetails, + BatchExportHeartbeatDetails, HeartbeatParseError, NotEnoughHeartbeatValuesError, should_resume_from_activity_heartbeat, @@ -58,7 +58,7 @@ def __init__(self, table_name: str, status: str, errors_seen: int, first_error: @dataclasses.dataclass -class SnowflakeHeartbeatDetails(HeartbeatDetails): +class SnowflakeHeartbeatDetails(BatchExportHeartbeatDetails): """The Snowflake batch export details included in every heartbeat. Attributes: @@ -301,7 +301,7 @@ async def insert_into_snowflake_activity(inputs: SnowflakeInsertInputs): TODO: We're using JSON here, it's not the most efficient way to do this. """ - logger = await bind_batch_exports_logger(team_id=inputs.team_id, destination="Snowflake") + logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="Snowflake") logger.info( "Exporting batch %s - %s", inputs.data_interval_start, diff --git a/posthog/temporal/common/heartbeat.py b/posthog/temporal/common/heartbeat.py deleted file mode 100644 index 3a535b4d45fae..0000000000000 --- a/posthog/temporal/common/heartbeat.py +++ /dev/null @@ -1,37 +0,0 @@ -import asyncio -import collections.abc -import datetime as dt -import typing - -import temporalio.activity - - -class AsyncHeartbeatDetails(typing.NamedTuple): - """Details sent over in a Temporal Activity heartbeat.""" - - def make_activity_heartbeat_while_running( - self, function_to_run: collections.abc.Callable, heartbeat_every: dt.timedelta - ) -> collections.abc.Callable[..., collections.abc.Coroutine]: - """Return a callable that returns a coroutine that heartbeats with these HeartbeatDetails. - - The returned callable wraps 'function_to_run' while heartbeating every 'heartbeat_every' - seconds. - """ - - async def heartbeat() -> None: - """Heartbeat every 'heartbeat_every' seconds.""" - while True: - await asyncio.sleep(heartbeat_every.total_seconds()) - temporalio.activity.heartbeat(self) - - async def heartbeat_while_running(*args, **kwargs): - """Wrap 'function_to_run' to asynchronously heartbeat while awaiting.""" - heartbeat_task = asyncio.create_task(heartbeat()) - - try: - return await function_to_run(*args, **kwargs) - finally: - heartbeat_task.cancel() - await asyncio.wait([heartbeat_task]) - - return heartbeat_while_running diff --git a/posthog/temporal/batch_exports/logger.py b/posthog/temporal/common/logger.py similarity index 93% rename from posthog/temporal/batch_exports/logger.py rename to posthog/temporal/common/logger.py index 03cf980238078..dbb18789b82bf 100644 --- a/posthog/temporal/batch_exports/logger.py +++ b/posthog/temporal/common/logger.py @@ -16,8 +16,8 @@ BACKGROUND_LOGGER_TASKS = set() -async def bind_batch_exports_logger(team_id: int, destination: str | None = None) -> FilteringBoundLogger: - """Return a bound logger for BatchExports.""" +async def bind_temporal_worker_logger(team_id: int, destination: str | None = None) -> FilteringBoundLogger: + """Return a bound logger for Temporal Workers.""" if not structlog.is_configured(): configure_logger() @@ -67,7 +67,7 @@ def configure_logger( # We save the error to log it later as the logger hasn't yet been configured at this time. log_producer_error = e else: - put_in_queue = PutInBatchExportsLogQueueProcessor(log_queue) + put_in_queue = PutInLogQueueProcessor(log_queue) base_processors.append(put_in_queue) base_processors += [ @@ -118,7 +118,7 @@ def create_logger_background_task(task) -> asyncio.Task: return new_task -class PutInBatchExportsLogQueueProcessor: +class PutInLogQueueProcessor: """A StructLog processor that puts event_dict into a queue. We format event_dict as a message to be sent to Kafka by a queue listener. @@ -156,14 +156,14 @@ def __call__( def get_temporal_context() -> dict[str, str | int]: - """Return batch export context variables from Temporal. + """Return context variables from Temporal. - More specifically, the batch export context variables coming from Temporal are: + More specifically, the context variables coming from Temporal are: * attempt: The current attempt number of the Temporal Workflow. - * log_source: Either "batch_exports" or "batch_exports_backfill". - * log_source_id: The batch export ID. - * workflow_id: The ID of the Temporal Workflow running the batch export. - * workflow_run_id: The ID of the Temporal Workflow Execution running the batch export. + * log_source: Either "batch_exports" or "batch_exports_backfill" or "external_data_jobs". + * log_source_id: The batch export ID or external data source id. + * workflow_id: The ID of the Temporal Workflow running job. + * workflow_run_id: The ID of the Temporal Workflow Execution running the workflow. * workflow_type: The name of the Temporal Workflow. We attempt to fetch the context from the activity information. If undefined, an empty dict @@ -182,6 +182,10 @@ def get_temporal_context() -> dict[str, str | int]: # This works because the WorkflowID is made up like f"{batch_export_id}-Backfill-{data_interval_end}" log_source_id = workflow_id.split("-Backfill")[0] log_source = "batch_exports_backfill" + elif workflow_type == "external-data-job": + # This works because the WorkflowID is made up like f"{external_data_source_id}-{data_interval_end}" + log_source_id = workflow_id.rsplit("-", maxsplit=3)[0] + log_source = "external_data_jobs" else: # This works because the WorkflowID is made up like f"{batch_export_id}-{data_interval_end}" # Since 'data_interval_end' is an iso formatted datetime string, it has two '-' to separate the diff --git a/posthog/temporal/common/utils.py b/posthog/temporal/common/utils.py index efc19c9e8ef4a..1b61a356dc898 100644 --- a/posthog/temporal/common/utils.py +++ b/posthog/temporal/common/utils.py @@ -2,6 +2,7 @@ import dataclasses import datetime as dt import typing +import abc class EmptyHeartbeatError(Exception): @@ -29,7 +30,7 @@ def __init__(self, field: str): @dataclasses.dataclass -class HeartbeatDetails: +class HeartbeatDetails(metaclass=abc.ABCMeta): """The batch export details included in every heartbeat. Each batch export destination should subclass this and implement whatever details are specific to that @@ -40,7 +41,6 @@ class HeartbeatDetails: _remaining: Any remaining values in the heartbeat_details tuple that we do not parse. """ - last_inserted_at: dt.datetime _remaining: collections.abc.Sequence[typing.Any] @property @@ -48,6 +48,15 @@ def total_details(self) -> int: """The total number of details that we have parsed + those remaining to parse.""" return (len(dataclasses.fields(self.__class__)) - 1) + len(self._remaining) + @abc.abstractclassmethod + def from_activity(cls, activity): + pass + + +@dataclasses.dataclass +class BatchExportHeartbeatDetails(HeartbeatDetails): + last_inserted_at: dt.datetime + @classmethod def from_activity(cls, activity): """Attempt to initialize HeartbeatDetails from an activity's info.""" @@ -61,7 +70,33 @@ def from_activity(cls, activity): except (TypeError, ValueError) as e: raise HeartbeatParseError("last_inserted_at") from e - return cls(last_inserted_at, _remaining=details[1:]) + return cls(last_inserted_at=last_inserted_at, _remaining=details[1:]) + + +@dataclasses.dataclass +class DataImportHeartbeatDetails(HeartbeatDetails): + """Data import heartbeat details. + + Attributes: + endpoint: The endpoint we are importing data from. + cursor: The cursor we are using to paginate through the endpoint. + """ + + endpoint: str + cursor: str + + @classmethod + def from_activity(cls, activity): + """Attempt to initialize DataImportHeartbeatDetails from an activity's info.""" + details = activity.info().heartbeat_details + + if len(details) == 0: + raise EmptyHeartbeatError() + + if len(details) != 2: + raise NotEnoughHeartbeatValuesError(len(details), 2) + + return cls(endpoint=details[0], cursor=details[1], _remaining=details[2:]) HeartbeatType = typing.TypeVar("HeartbeatType", bound=HeartbeatDetails) diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index 69590c6a5ac29..aad09fae8047c 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -6,6 +6,5 @@ create_external_data_job_model, update_external_data_job_model, run_external_data_job, - move_draft_to_production_activity, validate_schema_activity, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 4eb673b2530de..de627f8612528 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -9,24 +9,18 @@ # TODO: remove dependency from posthog.temporal.batch_exports.base import PostHogWorkflow -from posthog.temporal.common.heartbeat import AsyncHeartbeatDetails -from posthog.warehouse.data_load.pipeline import ( +from posthog.temporal.data_imports.pipelines.stripe.stripe_pipeline import ( PIPELINE_TYPE_INPUTS_MAPPING, PIPELINE_TYPE_RUN_MAPPING, - SourceSchema, - move_draft_to_production, -) -from posthog.warehouse.data_load.sync_table import ( - SchemaValidationError, - is_schema_valid, ) +from posthog.warehouse.data_load.sync_table import SchemaValidationError, validate_schema_and_update_table from posthog.warehouse.external_data_source.jobs import ( create_external_data_job, - get_external_data_source, + get_external_data_job, update_external_job_status, ) from posthog.warehouse.models.external_data_job import ExternalDataJob -from posthog.warehouse.models.external_data_source import ExternalDataSource +from posthog.temporal.common.logger import bind_temporal_worker_logger @dataclasses.dataclass @@ -40,6 +34,12 @@ async def create_external_data_job_model(inputs: CreateExternalDataJobInputs) -> run = await sync_to_async(create_external_data_job)( # type: ignore team_id=inputs.team_id, external_data_source_id=inputs.external_data_source_id, + workflow_id=activity.info().workflow_id, + ) + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + + logger.info( + f"Created external data job with for external data source {inputs.external_data_source_id}", ) return str(run.id) @@ -48,6 +48,7 @@ async def create_external_data_job_model(inputs: CreateExternalDataJobInputs) -> @dataclasses.dataclass class UpdateExternalDataJobStatusInputs: id: str + team_id: int run_id: str status: str latest_error: str | None @@ -59,73 +60,77 @@ async def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInpu run_id=uuid.UUID(inputs.id), status=inputs.status, latest_error=inputs.latest_error, + team_id=inputs.team_id, + ) + + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + logger.info( + f"Updated external data job with for external data source {inputs.run_id} to status {inputs.status}", ) @dataclasses.dataclass class ValidateSchemaInputs: - source_schemas: list[SourceSchema] - external_data_source_id: str - create: bool + run_id: str + team_id: int @activity.defn -async def validate_schema_activity(inputs: ValidateSchemaInputs) -> bool: - return await sync_to_async(is_schema_valid)( # type: ignore - source_schemas=inputs.source_schemas, - external_data_source_id=inputs.external_data_source_id, - create=inputs.create, +async def validate_schema_activity(inputs: ValidateSchemaInputs) -> None: + await sync_to_async(validate_schema_and_update_table)( # type: ignore + run_id=inputs.run_id, + team_id=inputs.team_id, + ) + + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + logger.info( + f"Validated schema for external data job {inputs.run_id}", ) @dataclasses.dataclass -class MoveDraftToProductionExternalDataJobInputs: +class ExternalDataWorkflowInputs: team_id: int external_data_source_id: str -@activity.defn -async def move_draft_to_production_activity(inputs: MoveDraftToProductionExternalDataJobInputs) -> None: - await sync_to_async(move_draft_to_production)( # type: ignore - team_id=inputs.team_id, - external_data_source_id=inputs.external_data_source_id, - ) - - @dataclasses.dataclass class ExternalDataJobInputs: team_id: int - external_data_source_id: str + run_id: str @activity.defn -async def run_external_data_job(inputs: ExternalDataJobInputs) -> list[SourceSchema]: - model: ExternalDataSource = await sync_to_async(get_external_data_source)( # type: ignore +async def run_external_data_job(inputs: ExternalDataJobInputs) -> None: + model: ExternalDataJob = await sync_to_async(get_external_data_job)( # type: ignore team_id=inputs.team_id, - external_data_source_id=inputs.external_data_source_id, + run_id=inputs.run_id, ) - job_inputs = PIPELINE_TYPE_INPUTS_MAPPING[model.source_type]( - team_id=inputs.team_id, job_type=model.source_type, dataset_name=model.draft_folder_path, **model.job_inputs + job_inputs = PIPELINE_TYPE_INPUTS_MAPPING[model.pipeline.source_type]( + run_id=inputs.run_id, + team_id=inputs.team_id, + job_type=model.pipeline.source_type, + dataset_name=model.folder_path, + **model.pipeline.job_inputs, ) - job_fn = PIPELINE_TYPE_RUN_MAPPING[model.source_type] - - heartbeat_details = AsyncHeartbeatDetails() - func = heartbeat_details.make_activity_heartbeat_while_running(job_fn, dt.timedelta(seconds=10)) + job_fn = PIPELINE_TYPE_RUN_MAPPING[model.pipeline.source_type] - return await func(job_inputs) + await job_fn(job_inputs) # TODO: update retry policies @workflow.defn(name="external-data-job") class ExternalDataJobWorkflow(PostHogWorkflow): @staticmethod - def parse_inputs(inputs: list[str]) -> ExternalDataJobInputs: + def parse_inputs(inputs: list[str]) -> ExternalDataWorkflowInputs: loaded = json.loads(inputs[0]) - return ExternalDataJobInputs(**loaded) + return ExternalDataWorkflowInputs(**loaded) @workflow.run - async def run(self, inputs: ExternalDataJobInputs): + async def run(self, inputs: ExternalDataWorkflowInputs): + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + # create external data job and trigger activity create_external_data_job_inputs = CreateExternalDataJobInputs( team_id=inputs.team_id, @@ -145,45 +150,27 @@ async def run(self, inputs: ExternalDataJobInputs): ) update_inputs = UpdateExternalDataJobStatusInputs( - id=run_id, run_id=run_id, status=ExternalDataJob.Status.COMPLETED, latest_error=None + id=run_id, run_id=run_id, status=ExternalDataJob.Status.COMPLETED, latest_error=None, team_id=inputs.team_id ) try: - # TODO: can make this a child workflow for separate worker pool - source_schemas = await workflow.execute_activity( - run_external_data_job, - inputs, - start_to_close_timeout=dt.timedelta(minutes=60), - retry_policy=RetryPolicy(maximum_attempts=1), - heartbeat_timeout=dt.timedelta(minutes=1), - ) - - # check schema first - validate_inputs = ValidateSchemaInputs( - source_schemas=source_schemas, external_data_source_id=inputs.external_data_source_id, create=False - ) - - await workflow.execute_activity( - validate_schema_activity, - validate_inputs, - start_to_close_timeout=dt.timedelta(minutes=2), - retry_policy=RetryPolicy(maximum_attempts=2), - ) - - move_inputs = MoveDraftToProductionExternalDataJobInputs( + job_inputs = ExternalDataJobInputs( team_id=inputs.team_id, - external_data_source_id=inputs.external_data_source_id, + run_id=run_id, ) + # TODO: can make this a child workflow for separate worker pool await workflow.execute_activity( - move_draft_to_production_activity, - move_inputs, - start_to_close_timeout=dt.timedelta(minutes=1), - retry_policy=RetryPolicy(maximum_attempts=2), + run_external_data_job, + job_inputs, + start_to_close_timeout=dt.timedelta(minutes=120), + retry_policy=RetryPolicy(maximum_attempts=10), + heartbeat_timeout=dt.timedelta(seconds=60), ) - # if not errors, then create the schema - validate_inputs.create = True + # check schema first + validate_inputs = ValidateSchemaInputs(run_id=run_id, team_id=inputs.team_id) + await workflow.execute_activity( validate_schema_activity, validate_inputs, @@ -196,14 +183,20 @@ async def run(self, inputs: ExternalDataJobInputs): update_inputs.status = ExternalDataJob.Status.CANCELLED else: update_inputs.status = ExternalDataJob.Status.FAILED - + logger.error( + f"External data job failed for external data source {inputs.external_data_source_id} with error: {e.cause}" + ) update_inputs.latest_error = str(e.cause) raise except SchemaValidationError as e: + logger.error(f"Schema validation failed for external data source {inputs.external_data_source_id}") update_inputs.latest_error = str(e) update_inputs.status = ExternalDataJob.Status.FAILED raise - except Exception: + except Exception as e: + logger.error( + f"External data job failed for external data source {inputs.external_data_source_id} with error: {e}" + ) # Catch all update_inputs.latest_error = "An unexpected error has ocurred" update_inputs.status = ExternalDataJob.Status.FAILED @@ -217,6 +210,6 @@ async def run(self, inputs: ExternalDataJobInputs): initial_interval=dt.timedelta(seconds=10), maximum_interval=dt.timedelta(seconds=60), maximum_attempts=0, - non_retryable_error_types=["NotNullViolation", "IntegrityError"], + non_retryable_error_types=["NotNullViolation", "IntegrityError", "DoesNotExist"], ), ) diff --git a/posthog/temporal/data_imports/pipelines/__init__.py b/posthog/temporal/data_imports/pipelines/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/temporal/data_imports/pipelines/stripe/__init__.py b/posthog/temporal/data_imports/pipelines/stripe/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/posthog/warehouse/data_load/stripe/helper.py b/posthog/temporal/data_imports/pipelines/stripe/helpers.py similarity index 68% rename from posthog/warehouse/data_load/stripe/helper.py rename to posthog/temporal/data_imports/pipelines/stripe/helpers.py index 0d47171a2ab23..501aa976fbf2f 100644 --- a/posthog/warehouse/data_load/stripe/helper.py +++ b/posthog/temporal/data_imports/pipelines/stripe/helpers.py @@ -1,14 +1,22 @@ """Stripe analytics source helpers""" -from typing import Any, Dict, Iterable, Optional, Union +from typing import Any, Dict, Optional, Union import stripe from dlt.common import pendulum -from dlt.common.typing import TDataItem from pendulum import DateTime +from asgiref.sync import sync_to_async +stripe.api_version = "2022-11-15" -def pagination(endpoint: str, start_date: Optional[Any] = None, end_date: Optional[Any] = None) -> Iterable[TDataItem]: + +async def stripe_pagination( + api_key: str, + endpoint: str, + start_date: Optional[Any] = None, + end_date: Optional[Any] = None, + starting_after: Optional[str] = None, +): """ Retrieves data from an endpoint with pagination. @@ -20,9 +28,9 @@ def pagination(endpoint: str, start_date: Optional[Any] = None, end_date: Option Returns: Iterable[TDataItem]: Data items retrieved from the endpoint. """ - starting_after = None while True: - response = stripe_get_data( + response = await stripe_get_data( + api_key, endpoint, start_date=start_date, end_date=end_date, @@ -31,7 +39,7 @@ def pagination(endpoint: str, start_date: Optional[Any] = None, end_date: Option if len(response["data"]) > 0: starting_after = response["data"][-1]["id"] - yield response["data"] + yield response["data"], starting_after if not response["has_more"]: break @@ -46,7 +54,8 @@ def transform_date(date: Union[str, DateTime, int]) -> int: return date -def stripe_get_data( +async def stripe_get_data( + api_key: str, resource: str, start_date: Optional[Any] = None, end_date: Optional[Any] = None, @@ -60,5 +69,11 @@ def stripe_get_data( if resource == "Subscription": kwargs.update({"status": "all"}) - resource_dict = getattr(stripe, resource).list(created={"gte": start_date, "lt": end_date}, limit=100, **kwargs) + _resource = getattr(stripe, resource) + resource_dict = await sync_to_async(_resource.list)( + api_key=api_key, + created={"gte": start_date, "lt": end_date}, + limit=100, + **kwargs, # type: ignore + ) return dict(resource_dict) diff --git a/posthog/warehouse/data_load/stripe/settings.py b/posthog/temporal/data_imports/pipelines/stripe/settings.py similarity index 100% rename from posthog/warehouse/data_load/stripe/settings.py rename to posthog/temporal/data_imports/pipelines/stripe/settings.py diff --git a/posthog/temporal/data_imports/pipelines/stripe/stripe_pipeline.py b/posthog/temporal/data_imports/pipelines/stripe/stripe_pipeline.py new file mode 100644 index 0000000000000..8f3c1defd56d0 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/stripe/stripe_pipeline.py @@ -0,0 +1,111 @@ +from dataclasses import dataclass +from typing import Dict + +import dlt +from django.conf import settings +from dlt.pipeline.exceptions import PipelineStepFailed + +from posthog.warehouse.models import ExternalDataSource + +from posthog.temporal.data_imports.pipelines.stripe.helpers import stripe_pagination +from posthog.temporal.data_imports.pipelines.stripe.settings import ENDPOINTS +from posthog.temporal.common.logger import bind_temporal_worker_logger + +import os +from temporalio import activity +from posthog.temporal.common.utils import ( + DataImportHeartbeatDetails, + should_resume_from_activity_heartbeat, +) +import asyncio + + +@dataclass +class PipelineInputs: + run_id: str + dataset_name: str + job_type: str + team_id: int + + +@dataclass +class SourceColumnType: + name: str + data_type: str + nullable: bool + + +@dataclass +class SourceSchema: + resource: str + name: str + columns: Dict[str, SourceColumnType] + write_disposition: str + + +@dataclass +class StripeJobInputs(PipelineInputs): + stripe_secret_key: str + + +def create_pipeline(inputs: PipelineInputs): + pipeline_name = f"{inputs.job_type}_pipeline_{inputs.team_id}_run_{inputs.run_id}" + pipelines_dir = f"{os.getcwd()}/.dlt/{inputs.team_id}/{inputs.run_id}/{inputs.job_type}" + return dlt.pipeline( + pipeline_name=pipeline_name, + pipelines_dir=pipelines_dir, # workers can be created and destroyed so it doesn't matter where the metadata gets put temporarily + destination="filesystem", + dataset_name=inputs.dataset_name, + credentials={ + "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, + "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, + }, + ) + + +# a temporal activity +async def run_stripe_pipeline(inputs: StripeJobInputs) -> None: + ordered_endpoints = list(ENDPOINTS) + + # basic logger for now + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + should_resume, details = await should_resume_from_activity_heartbeat(activity, DataImportHeartbeatDetails, logger) + + if should_resume and details: + ordered_endpoints = ordered_endpoints[ordered_endpoints.index(details.endpoint) :] + logger.info(f"Resuming from {details.endpoint} with cursor {details.cursor}") + + endpoint = ordered_endpoints[0] + cursor = None + + async def worker_shutdown_handler(): + """Handle the Worker shutting down by heart-beating our latest status.""" + await activity.wait_for_worker_shutdown() + activity.heartbeat(endpoint, cursor) + + asyncio.create_task(worker_shutdown_handler()) + + for endpoint in ordered_endpoints: + if should_resume and details and endpoint == details.endpoint: + starting_after = details.cursor + else: + starting_after = None + + async for item, cursor in stripe_pagination(inputs.stripe_secret_key, endpoint, starting_after=starting_after): + try: + # init pipeline and run data import + pipeline = create_pipeline(inputs) + pipeline.run(item, table_name=endpoint.lower(), loader_file_format="parquet") + + # clear everything from pipeline + pipeline.drop() + pipeline.deactivate() + activity.heartbeat(endpoint, cursor) + except PipelineStepFailed: + logger.error(f"Data import failed for endpoint {endpoint} with cursor {cursor}") + raise + + +PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = {ExternalDataSource.Type.STRIPE: ENDPOINTS} +PIPELINE_TYPE_INPUTS_MAPPING = {ExternalDataSource.Type.STRIPE: StripeJobInputs} +PIPELINE_TYPE_RUN_MAPPING = {ExternalDataSource.Type.STRIPE: run_stripe_pipeline} diff --git a/posthog/temporal/tests/batch_exports/test_logger.py b/posthog/temporal/tests/batch_exports/test_logger.py index 4ed57bcd999fa..b3e2611979363 100644 --- a/posthog/temporal/tests/batch_exports/test_logger.py +++ b/posthog/temporal/tests/batch_exports/test_logger.py @@ -23,9 +23,9 @@ TRUNCATE_LOG_ENTRIES_TABLE_SQL, ) from posthog.kafka_client.topics import KAFKA_LOG_ENTRIES -from posthog.temporal.batch_exports.logger import ( +from posthog.temporal.common.logger import ( BACKGROUND_LOGGER_TASKS, - bind_batch_exports_logger, + bind_temporal_worker_logger, configure_logger, ) @@ -155,7 +155,7 @@ async def configure(log_capture, queue, producer): async def test_batch_exports_logger_binds_context(log_capture): """Test whether we can bind context variables.""" - logger = await bind_batch_exports_logger(team_id=1, destination="Somewhere") + logger = await bind_temporal_worker_logger(team_id=1, destination="Somewhere") logger.info("Hi! This is an info log") logger.error("Hi! This is an erro log") @@ -173,7 +173,7 @@ async def test_batch_exports_logger_binds_context(log_capture): async def test_batch_exports_logger_formats_positional_args(log_capture): """Test whether positional arguments are formatted in the message.""" - logger = await bind_batch_exports_logger(team_id=1, destination="Somewhere") + logger = await bind_temporal_worker_logger(team_id=1, destination="Somewhere") logger.info("Hi! This is an %s log", "info") logger.error("Hi! This is an %s log", "error") @@ -234,7 +234,7 @@ async def test_batch_exports_logger_binds_activity_context( @temporalio.activity.defn async def log_activity(): """A simple temporal activity that just logs.""" - logger = await bind_batch_exports_logger(team_id=1, destination="Somewhere") + logger = await bind_temporal_worker_logger(team_id=1, destination="Somewhere") logger.info("Hi! This is an %s log from an activity", "info") @@ -282,7 +282,7 @@ async def test_batch_exports_logger_puts_in_queue(activity_environment, queue): @temporalio.activity.defn async def log_activity(): """A simple temporal activity that just logs.""" - logger = await bind_batch_exports_logger(team_id=2, destination="Somewhere") + logger = await bind_temporal_worker_logger(team_id=2, destination="Somewhere") logger.info("Hi! This is an %s log from an activity", "info") @@ -347,7 +347,7 @@ async def test_batch_exports_logger_produces_to_kafka(activity_environment, prod @temporalio.activity.defn async def log_activity(): """A simple temporal activity that just logs.""" - logger = await bind_batch_exports_logger(team_id=3, destination="Somewhere") + logger = await bind_temporal_worker_logger(team_id=3, destination="Somewhere") logger.info("Hi! This is an %s log from an activity", "info") diff --git a/posthog/temporal/tests/test_external_data_job.py b/posthog/temporal/tests/test_external_data_job.py index 970ca073b3c47..d193da98d9188 100644 --- a/posthog/temporal/tests/test_external_data_job.py +++ b/posthog/temporal/tests/test_external_data_job.py @@ -3,30 +3,22 @@ import pytest from asgiref.sync import sync_to_async -from django.conf import settings from django.test import override_settings -from temporalio.client import Client -from temporalio.worker import UnsandboxedWorkflowRunner, Worker from posthog.temporal.data_imports.external_data_job import ( CreateExternalDataJobInputs, - ExternalDataJobWorkflow, UpdateExternalDataJobStatusInputs, ValidateSchemaInputs, create_external_data_job, create_external_data_job_model, - move_draft_to_production_activity, run_external_data_job, update_external_data_job_model, validate_schema_activity, ) -from posthog.warehouse.data_load.pipeline import ( - SourceColumnType, - SourceSchema, +from posthog.temporal.data_imports.pipelines.stripe.stripe_pipeline import ( StripeJobInputs, ) -from posthog.warehouse.data_load.service import ExternalDataJobInputs -from posthog.warehouse.data_load.stripe import ENDPOINTS +from posthog.temporal.data_imports.external_data_job import ExternalDataJobInputs from posthog.warehouse.models import ( DataWarehouseTable, ExternalDataJob, @@ -42,6 +34,9 @@ @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_create_external_job_activity(activity_environment, team, **kwargs): + """ + Test that the create external job activity creates a new job + """ new_source = await sync_to_async(ExternalDataSource.objects.create)( source_id=uuid.uuid4(), connection_id=uuid.uuid4(), @@ -62,6 +57,9 @@ async def test_create_external_job_activity(activity_environment, team, **kwargs @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_update_external_job_activity(activity_environment, team, **kwargs): + """ + Test that the update external job activity updates the job status + """ new_source = await sync_to_async(ExternalDataSource.objects.create)( source_id=uuid.uuid4(), connection_id=uuid.uuid4(), @@ -71,10 +69,16 @@ async def test_update_external_job_activity(activity_environment, team, **kwargs source_type="Stripe", ) # type: ignore - new_job = await sync_to_async(create_external_data_job)(team_id=team.id, external_data_source_id=new_source.pk) # type: ignore + new_job = await sync_to_async(create_external_data_job)( + team_id=team.id, external_data_source_id=new_source.pk, workflow_id=activity_environment.info.workflow_id + ) # type: ignore inputs = UpdateExternalDataJobStatusInputs( - id=str(new_job.id), run_id=str(new_job.id), status=ExternalDataJob.Status.COMPLETED, latest_error=None + id=str(new_job.id), + run_id=str(new_job.id), + status=ExternalDataJob.Status.COMPLETED, + latest_error=None, + team_id=team.id, ) await activity_environment.run(update_external_data_job_model, inputs) @@ -96,42 +100,45 @@ async def test_run_stripe_job(activity_environment, team, **kwargs): job_inputs={"stripe_secret_key": "test-key"}, ) # type: ignore - inputs = ExternalDataJobInputs(team_id=team.id, external_data_source_id=new_source.pk) + new_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( # type: ignore + team_id=team.id, + pipeline_id=new_source.pk, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + ) + + new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() # type: ignore + + inputs = ExternalDataJobInputs(team_id=team.id, run_id=new_job.pk) with mock.patch( - "posthog.warehouse.data_load.pipeline.create_pipeline", + "posthog.temporal.data_imports.pipelines.stripe.stripe_pipeline.create_pipeline", ) as mock_create_pipeline, mock.patch( - "posthog.warehouse.data_load.pipeline.stripe_source", - ) as mock_run_stripe, mock.patch( - "posthog.warehouse.data_load.pipeline.get_schema", - ) as mock_data_tables: - mock_data_tables.return_value = [ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ] - schemas = await activity_environment.run(run_external_data_job, inputs) - mock_create_pipeline.assert_called_once_with( + "posthog.temporal.data_imports.pipelines.stripe.helpers.stripe_get_data" + ) as mock_stripe_get_data: # noqa: B015 + mock_stripe_get_data.return_value = { + "data": [{"id": "test-id", "object": "test-object"}], + "has_more": False, + } + await activity_environment.run(run_external_data_job, inputs) + + assert mock_stripe_get_data.call_count == 5 + assert mock_create_pipeline.call_count == 5 + + mock_create_pipeline.assert_called_with( StripeJobInputs( + run_id=new_job.pk, job_type="Stripe", team_id=team.id, stripe_secret_key="test-key", - dataset_name=new_source.draft_folder_path, + dataset_name=new_job.folder_path, ) ) - mock_run_stripe.assert_called_once_with(stripe_secret_key="test-key", endpoints=ENDPOINTS) - assert len(schemas) == 1 @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio -async def test_is_schema_valid_activity(activity_environment, team, **kwargs): +async def test_validate_schema_and_update_table_activity(activity_environment, team, **kwargs): new_source = await sync_to_async(ExternalDataSource.objects.create)( source_id=uuid.uuid4(), connection_id=uuid.uuid4(), @@ -142,6 +149,13 @@ async def test_is_schema_valid_activity(activity_environment, team, **kwargs): job_inputs={"stripe_secret_key": "test-key"}, ) # type: ignore + new_job = await sync_to_async(ExternalDataJob.objects.create)( # type: ignore + team_id=team.id, + pipeline_id=new_source.pk, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + ) + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): @@ -149,28 +163,17 @@ async def test_is_schema_valid_activity(activity_environment, team, **kwargs): await activity_environment.run( validate_schema_activity, ValidateSchemaInputs( - external_data_source_id=new_source.pk, - source_schemas=[ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ], - create=False, + run_id=new_job.pk, + team_id=team.id, ), ) - assert mock_get_columns.call_count == 5 + assert mock_get_columns.call_count == 10 @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio -async def test_is_schema_valid_activity_failed(activity_environment, team, **kwargs): +async def test_validate_schema_and_update_table_activity_failed(activity_environment, team, **kwargs): new_source = await sync_to_async(ExternalDataSource.objects.create)( source_id=uuid.uuid4(), connection_id=uuid.uuid4(), @@ -181,6 +184,13 @@ async def test_is_schema_valid_activity_failed(activity_environment, team, **kwa job_inputs={"stripe_secret_key": "test-key"}, ) # type: ignore + new_job = await sync_to_async(ExternalDataJob.objects.create)( # type: ignore + team_id=team.id, + pipeline_id=new_source.pk, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + ) + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): @@ -191,19 +201,8 @@ async def test_is_schema_valid_activity_failed(activity_environment, team, **kwa await activity_environment.run( validate_schema_activity, ValidateSchemaInputs( - external_data_source_id=new_source.pk, - source_schemas=[ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ], - create=False, + run_id=new_job.pk, + team_id=team.id, ), ) @@ -223,6 +222,13 @@ async def test_create_schema_activity(activity_environment, team, **kwargs): job_inputs={"stripe_secret_key": "test-key"}, ) # type: ignore + new_job = await sync_to_async(ExternalDataJob.objects.create)( # type: ignore + team_id=team.id, + pipeline_id=new_source.pk, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + ) + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): @@ -230,138 +236,12 @@ async def test_create_schema_activity(activity_environment, team, **kwargs): await activity_environment.run( validate_schema_activity, ValidateSchemaInputs( - external_data_source_id=new_source.pk, - source_schemas=[ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ], - create=True, - ), - ) - - assert mock_get_columns.call_count == 5 - all_tables = DataWarehouseTable.objects.all() - table_length = await sync_to_async(len)(all_tables) # type: ignore - assert table_length == 5 - - # Should still have one after - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - external_data_source_id=new_source.pk, - source_schemas=[ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ], - create=True, + run_id=new_job.pk, + team_id=team.id, ), ) + assert mock_get_columns.call_count == 10 all_tables = DataWarehouseTable.objects.all() table_length = await sync_to_async(len)(all_tables) # type: ignore - assert table_length == 5 - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_external_data_job_workflow(team): - """Test the squash_person_overrides workflow end-to-end with newer overrides.""" - client = await Client.connect( - f"{settings.TEMPORAL_HOST}:{settings.TEMPORAL_PORT}", - namespace=settings.TEMPORAL_NAMESPACE, - ) - - workflow_id = str(uuid.uuid4()) - - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) # type: ignore - - inputs = ExternalDataJobInputs(team_id=team.id, external_data_source_id=new_source.pk) - - async with Worker( - client, - task_queue=settings.TEMPORAL_TASK_QUEUE, - workflows=[ExternalDataJobWorkflow], - activities=[ - create_external_data_job_model, - run_external_data_job, - update_external_data_job_model, - move_draft_to_production_activity, - validate_schema_activity, - ], - workflow_runner=UnsandboxedWorkflowRunner(), - ): - # TODO: don't need to test all the activities here, just the workflow - with mock.patch( - "posthog.warehouse.data_load.pipeline.create_pipeline", - ) as mock_create_pipeline, mock.patch( - "posthog.warehouse.data_load.pipeline.stripe_source", - ) as mock_run_stripe, mock.patch( - "posthog.warehouse.data_load.pipeline.get_schema", - ) as mock_data_tables, mock.patch( - "posthog.warehouse.models.table.DataWarehouseTable.get_columns" - ) as mock_get_columns, mock.patch( - "posthog.temporal.data_imports.external_data_job.move_draft_to_production" - ) as mock_move_draft_to_production, override_settings(**AWS_BUCKET_MOCK_SETTINGS): - mock_get_columns.return_value = {"id": "string"} - mock_data_tables.return_value = [ - SourceSchema( - resource="customers", - name="customers", - columns={ - "id": SourceColumnType(name="id", data_type="string", nullable=False), - "name": SourceColumnType(name="name", data_type="string", nullable=True), - }, - write_disposition="overwrite", - ) - ] - - await client.execute_workflow( - ExternalDataJobWorkflow.run, - inputs, - id=workflow_id, - task_queue=settings.TEMPORAL_TASK_QUEUE, - ) - mock_create_pipeline.assert_called_once_with( - StripeJobInputs( - job_type="Stripe", - team_id=team.id, - stripe_secret_key="test-key", - dataset_name=new_source.draft_folder_path, - ) - ) - mock_run_stripe.assert_called_once_with(stripe_secret_key="test-key", endpoints=ENDPOINTS) - - assert mock_get_columns.call_count == 10 - - all_tables = DataWarehouseTable.objects.all() - table_length = await sync_to_async(len)(all_tables) # type: ignore - - assert table_length == 5 - - assert mock_move_draft_to_production.call_count == 1 - - new_job = await sync_to_async(ExternalDataJob.objects.first)() # type: ignore - assert new_job.status == ExternalDataJob.Status.COMPLETED diff --git a/posthog/test/base.py b/posthog/test/base.py index 21a2fc17c68b7..a9c941f50b572 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -10,6 +10,10 @@ from unittest.mock import patch import freezegun + +# we have to import pendulum for the side effect of importing it +# freezegun.FakeDateTime and pendulum don't play nicely otherwise +import pendulum # noqa F401 import pytest import sqlparse from django.apps import apps diff --git a/posthog/urls.py b/posthog/urls.py index c271406c73469..343e699bb8eaa 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -54,11 +54,17 @@ ) from .year_in_posthog import year_in_posthog +import structlog + +logger = structlog.get_logger(__name__) + ee_urlpatterns: List[Any] = [] try: from ee.urls import extend_api_router from ee.urls import urlpatterns as ee_urlpatterns except ImportError: + if settings.DEBUG: + logger.warn(f"Could not import ee.urls", exc_info=True) pass else: extend_api_router( diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 6358fdefc9348..8bbd390586d53 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -15,9 +15,12 @@ from posthog.warehouse.data_load.service import ( sync_external_data_job_workflow, trigger_external_data_workflow, - delete_external_data_workflow, + delete_external_data_schedule, + cancel_external_data_workflow, + delete_data_import_folder, ) -from posthog.warehouse.models import ExternalDataJob, ExternalDataSource +from posthog.warehouse.models import ExternalDataSource +from posthog.warehouse.models import ExternalDataJob logger = structlog.get_logger(__name__) @@ -25,7 +28,7 @@ class ExternalDataSourceSerializers(serializers.ModelSerializer): account_id = serializers.CharField(write_only=True) client_secret = serializers.CharField(write_only=True) - status = serializers.SerializerMethodField(read_only=True) + last_run_at = serializers.SerializerMethodField(read_only=True) class Meta: model = ExternalDataSource @@ -39,16 +42,18 @@ class Meta: "account_id", "source_type", "prefix", + "last_run_at", ] - read_only_fields = ["id", "source_id", "created_by", "created_at", "status", "source_type"] + read_only_fields = ["id", "source_id", "created_by", "created_at", "status", "source_type", "last_run_at"] - # TODO: temporary just to test - def get_status(self, instance: ExternalDataSource) -> str: - job = ExternalDataJob.objects.filter(pipeline_id=instance.id).order_by("-created_at").first() - if job: - return job.status + def get_last_run_at(self, instance: ExternalDataSource) -> str: + latest_completed_run = ( + ExternalDataJob.objects.filter(pipeline_id=instance.pk, status="Completed", team_id=instance.team_id) + .order_by("-created_at") + .first() + ) - return instance.status + return latest_completed_run.created_at if latest_completed_run else None class ExternalDataSourceViewSet(StructuredViewSetMixin, viewsets.ModelViewSet): @@ -116,11 +121,37 @@ def prefix_exists(self, source_type: str, prefix: str) -> bool: def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: instance = self.get_object() - delete_external_data_workflow(instance) + + latest_running_job = ( + ExternalDataJob.objects.filter(pipeline_id=instance.pk, team_id=instance.team_id) + .order_by("-created_at") + .first() + ) + if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running": + cancel_external_data_workflow(latest_running_job.workflow_id) + + latest_completed_job = ( + ExternalDataJob.objects.filter(pipeline_id=instance.pk, team_id=instance.team_id, status="Completed") + .order_by("-created_at") + .first() + ) + if latest_completed_job: + try: + delete_data_import_folder(latest_completed_job.folder_path) + except Exception as e: + logger.exception( + f"Could not clean up data import folder: {latest_completed_job.folder_path}", exc_info=e + ) + pass + + delete_external_data_schedule(instance) return super().destroy(request, *args, **kwargs) @action(methods=["POST"], detail=True) def reload(self, request: Request, *args: Any, **kwargs: Any): instance = self.get_object() trigger_external_data_workflow(instance) + + instance.status = "Running" + instance.save() return Response(status=status.HTTP_200_OK) diff --git a/posthog/warehouse/api/table.py b/posthog/warehouse/api/table.py index 8b12ce04f1cfe..dde77e344e2f9 100644 --- a/posthog/warehouse/api/table.py +++ b/posthog/warehouse/api/table.py @@ -14,6 +14,7 @@ DataWarehouseSavedQuery, DataWarehouseTable, ) +from posthog.warehouse.api.external_data_source import ExternalDataSourceSerializers class CredentialSerializer(serializers.ModelSerializer): @@ -34,6 +35,7 @@ class TableSerializer(serializers.ModelSerializer): created_by = UserBasicSerializer(read_only=True) credential = CredentialSerializer() columns = serializers.SerializerMethodField(read_only=True) + external_data_source = ExternalDataSourceSerializers(read_only=True) class Meta: model = DataWarehouseTable @@ -47,8 +49,9 @@ class Meta: "url_pattern", "credential", "columns", + "external_data_source", ] - read_only_fields = ["id", "created_by", "created_at", "columns"] + read_only_fields = ["id", "created_by", "created_at", "columns", "external_data_source"] def get_columns(self, table: DataWarehouseTable) -> List[SerializedField]: return serialize_fields(table.hogql_definition().fields) diff --git a/posthog/warehouse/data_load/pipeline.py b/posthog/warehouse/data_load/pipeline.py deleted file mode 100644 index 1c6fd7f199aa5..0000000000000 --- a/posthog/warehouse/data_load/pipeline.py +++ /dev/null @@ -1,145 +0,0 @@ -from dataclasses import dataclass -from typing import Dict, List - -import dlt -import s3fs -from asgiref.sync import sync_to_async -from django.conf import settings -from dlt.pipeline.exceptions import PipelineStepFailed - -from posthog.warehouse.models import ExternalDataSource - -from .stripe import ENDPOINTS, stripe_source -import os - - -@dataclass -class PipelineInputs: - dataset_name: str - job_type: str - team_id: int - - -def create_pipeline(inputs: PipelineInputs): - pipeline_name = f"{inputs.job_type}_pipeline_{inputs.team_id}" - pipelines_dir = f"{os.getcwd()}/.dlt/{inputs.team_id}/{inputs.job_type}" - return dlt.pipeline( - pipeline_name=pipeline_name, - pipelines_dir=pipelines_dir, # workers can be created and destroyed so it doesn't matter where the metadata gets put temporarily - destination="filesystem", - dataset_name=inputs.dataset_name, - credentials={ - "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, - "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, - }, - ) - - -@dataclass -class SourceColumnType: - name: str - data_type: str - nullable: bool - - -@dataclass -class SourceSchema: - resource: str - name: str - columns: Dict[str, SourceColumnType] - write_disposition: str - - -@dataclass -class StripeJobInputs(PipelineInputs): - stripe_secret_key: str - - -PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING = {ExternalDataSource.Type.STRIPE: ENDPOINTS} - - -# Run pipeline on separate thread. No db clients used -@sync_to_async(thread_sensitive=False) -def run_stripe_pipeline(inputs: StripeJobInputs) -> List[SourceSchema]: - pipeline = create_pipeline(inputs) - - # TODO: decouple API calls so they can be incrementally read and sync_rows updated - source = stripe_source( - stripe_secret_key=inputs.stripe_secret_key, - endpoints=PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING[ExternalDataSource.Type.STRIPE], - ) - try: - pipeline.run(source, loader_file_format="parquet") - except PipelineStepFailed: - # TODO: log - raise - - return get_schema(pipeline) - - -def get_schema(pipeline: dlt.pipeline) -> List[SourceSchema]: - schema = pipeline.default_schema - data_tables = schema.data_tables() - schemas = [] - - for resource in data_tables: - columns = {} - try: - for column_name, column_details in resource["columns"].items(): - columns[column_name] = SourceColumnType( - name=column_details["name"], - data_type=column_details["data_type"], - nullable=column_details["nullable"], - ) - - resource_schema = SourceSchema( - resource=resource["resource"], - name=resource["name"], - columns=columns, - write_disposition=resource["write_disposition"], - ) - schemas.append(resource_schema) - except: - pass - - return schemas - - -PIPELINE_TYPE_INPUTS_MAPPING = {ExternalDataSource.Type.STRIPE: StripeJobInputs} -PIPELINE_TYPE_RUN_MAPPING = {ExternalDataSource.Type.STRIPE: run_stripe_pipeline} - - -def get_s3fs(): - return s3fs.S3FileSystem(key=settings.AIRBYTE_BUCKET_KEY, secret=settings.AIRBYTE_BUCKET_SECRET) - - -# TODO: Make this a proper async function with boto3... -def move_draft_to_production(team_id: int, external_data_source_id: str): - model = ExternalDataSource.objects.get(team_id=team_id, id=external_data_source_id) - bucket_name = settings.BUCKET_URL - s3 = get_s3fs() - try: - s3.copy( - f"{bucket_name}/{model.draft_folder_path}", - f"{bucket_name}/{model.draft_folder_path}_success", - recursive=True, - ) - except FileNotFoundError: - # TODO: log - pass - - try: - s3.delete(f"{bucket_name}/{model.folder_path}", recursive=True) - except FileNotFoundError: - # This folder won't exist on initial run - pass - - try: - s3.copy( - f"{bucket_name}/{model.draft_folder_path}_success", f"{bucket_name}/{model.folder_path}", recursive=True - ) - except FileNotFoundError: - pass - - s3.delete(f"{bucket_name}/{model.draft_folder_path}_success", recursive=True) - s3.delete(f"{bucket_name}/{model.draft_folder_path}", recursive=True) diff --git a/posthog/warehouse/data_load/service.py b/posthog/warehouse/data_load/service.py index 2377864b295ee..625de7efe721f 100644 --- a/posthog/warehouse/data_load/service.py +++ b/posthog/warehouse/data_load/service.py @@ -21,16 +21,21 @@ delete_schedule, ) from posthog.temporal.data_imports.external_data_job import ( - ExternalDataJobInputs, + ExternalDataWorkflowInputs, ExternalDataJobWorkflow, ) from posthog.warehouse.models import ExternalDataSource import temporalio +from temporalio.client import Client as TemporalClient +from asgiref.sync import async_to_sync + +from django.conf import settings +import s3fs def sync_external_data_job_workflow(external_data_source: ExternalDataSource, create: bool = False) -> str: temporal = sync_connect() - inputs = ExternalDataJobInputs( + inputs = ExternalDataWorkflowInputs( team_id=external_data_source.team.id, external_data_source_id=external_data_source.pk, ) @@ -42,7 +47,14 @@ def sync_external_data_job_workflow(external_data_source: ExternalDataSource, cr id=str(external_data_source.pk), task_queue=DATA_WAREHOUSE_TASK_QUEUE, ), - spec=ScheduleSpec(intervals=[ScheduleIntervalSpec(every=timedelta(hours=24))]), + spec=ScheduleSpec( + intervals=[ + ScheduleIntervalSpec( + every=timedelta(hours=24), offset=timedelta(hours=external_data_source.created_at.hour) + ) + ], + jitter=timedelta(hours=2), + ), state=ScheduleState(note=f"Schedule for external data source: {external_data_source.pk}"), policy=SchedulePolicy(overlap=ScheduleOverlapPolicy.CANCEL_OTHER), ) @@ -65,7 +77,7 @@ def pause_external_data_workflow(external_data_source: ExternalDataSource): pause_schedule(temporal, schedule_id=str(external_data_source.id)) -def delete_external_data_workflow(external_data_source: ExternalDataSource): +def delete_external_data_schedule(external_data_source: ExternalDataSource): temporal = sync_connect() try: delete_schedule(temporal, schedule_id=str(external_data_source.id)) @@ -74,3 +86,23 @@ def delete_external_data_workflow(external_data_source: ExternalDataSource): if e.status == temporalio.service.RPCStatusCode.NOT_FOUND: return raise + + +def cancel_external_data_workflow(workflow_id: str): + temporal = sync_connect() + cancel_workflow(temporal, workflow_id) + + +@async_to_sync +async def cancel_workflow(temporal: TemporalClient, workflow_id: str): + handle = temporal.get_workflow_handle(workflow_id) + await handle.cancel() + + +def delete_data_import_folder(folder_path: str): + s3 = s3fs.S3FileSystem( + key=settings.AIRBYTE_BUCKET_KEY, + secret=settings.AIRBYTE_BUCKET_SECRET, + ) + bucket_name = settings.BUCKET_URL + s3.delete(f"{bucket_name}/{folder_path}", recursive=True) diff --git a/posthog/warehouse/data_load/stripe/__init__.py b/posthog/warehouse/data_load/stripe/__init__.py deleted file mode 100644 index d48083af190d8..0000000000000 --- a/posthog/warehouse/data_load/stripe/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -""" This source uses Stripe API and dlt to load data such as Customer, Subscription, Event etc. to the database and to calculate the MRR and churn rate. """ - -from typing import Any, Dict, Generator, Iterable, Optional, Tuple - -import dlt -import stripe -from dlt.extract.source import DltResource -from pendulum import DateTime - -from .helper import pagination -from .settings import ENDPOINTS - - -@dlt.source -def stripe_source( - endpoints: Tuple[str, ...] = ENDPOINTS, - stripe_secret_key: str = dlt.secrets.value, - start_date: Optional[DateTime] = None, - end_date: Optional[DateTime] = None, -) -> Iterable[DltResource]: - """ - Retrieves data from the Stripe API for the specified endpoints. - - For all endpoints, Stripe API responses do not provide the key "updated", - so in most cases, we are forced to load the data in 'replace' mode. - This source is suitable for all types of endpoints, including 'Events', 'Invoice', etc. - but these endpoints can also be loaded in incremental mode (see source incremental_stripe_source). - - Args: - endpoints (Tuple[str, ...]): A tuple of endpoint names to retrieve data from. Defaults to most popular Stripe API endpoints. - stripe_secret_key (str): The API access token for authentication. Defaults to the value in the `dlt.secrets` object. - start_date (Optional[DateTime]): An optional start date to limit the data retrieved. Format: datetime(YYYY, MM, DD). Defaults to None. - end_date (Optional[DateTime]): An optional end date to limit the data retrieved. Format: datetime(YYYY, MM, DD). Defaults to None. - - Returns: - Iterable[DltResource]: Resources with data that was created during the period greater than or equal to 'start_date' and less than 'end_date'. - """ - stripe.api_key = stripe_secret_key - stripe.api_version = "2022-11-15" - - def stripe_resource( - endpoint: str, - ) -> Generator[Dict[Any, Any], Any, None]: - for item in pagination(endpoint, start_date, end_date): - yield item - - for endpoint in endpoints: - yield dlt.resource( - stripe_resource, - name=endpoint, - write_disposition="replace", - )(endpoint) diff --git a/posthog/warehouse/data_load/sync_table.py b/posthog/warehouse/data_load/sync_table.py index cc8eee3202e25..cd027a262d783 100644 --- a/posthog/warehouse/data_load/sync_table.py +++ b/posthog/warehouse/data_load/sync_table.py @@ -1,17 +1,14 @@ -from typing import List - -import structlog from django.conf import settings from django.db.models import Q -from posthog.warehouse.data_load.pipeline import ( +from posthog.temporal.data_imports.pipelines.stripe.stripe_pipeline import ( PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING, - SourceSchema, ) from posthog.warehouse.models import DataWarehouseCredential, DataWarehouseTable -from posthog.warehouse.models.external_data_source import ExternalDataSource - -logger = structlog.get_logger(__name__) +from posthog.warehouse.models.external_data_job import ExternalDataJob +from posthog.temporal.common.logger import bind_temporal_worker_logger +import s3fs +from asgiref.sync import async_to_sync class SchemaValidationError(Exception): @@ -19,60 +16,95 @@ def __init__(self): super().__init__(f"Schema validation failed") +def get_latest_run_if_exists(team_id: int, pipeline_id: str) -> ExternalDataJob | None: + job = ( + ExternalDataJob.objects.filter( + team_id=team_id, pipeline_id=pipeline_id, status=ExternalDataJob.Status.COMPLETED + ) + .order_by("-created_at") + .first() + ) + + return job + + +def get_s3_client(): + return s3fs.S3FileSystem( + key=settings.AIRBYTE_BUCKET_KEY, + secret=settings.AIRBYTE_BUCKET_SECRET, + ) + + # TODO: make async -def is_schema_valid(source_schemas: List[SourceSchema], external_data_source_id: str, create: bool = False) -> bool: - resource = ExternalDataSource.objects.get(pk=external_data_source_id) +def validate_schema_and_update_table(run_id: str, team_id: int) -> None: + logger = async_to_sync(bind_temporal_worker_logger)(team_id=team_id) + + job = ExternalDataJob.objects.get(pk=run_id) + last_successful_job = get_latest_run_if_exists(job.team_id, job.pipeline_id) + s3 = get_s3_client() + bucket_name = settings.BUCKET_URL + credential, _ = DataWarehouseCredential.objects.get_or_create( - team_id=resource.team_id, + team_id=job.team_id, access_key=settings.AIRBYTE_BUCKET_KEY, access_secret=settings.AIRBYTE_BUCKET_SECRET, ) - # TODO: currently not using source_schemas - source_schemas = PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING[resource.source_type] + source_schemas = PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING[job.pipeline.source_type] - for schema_name in source_schemas: - table_name = f"{resource.prefix or ''}{resource.source_type}_{schema_name}".lower() + def get_url_pattern(folder_path: str, schema_name: str) -> str: + return f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/{folder_path}/{schema_name.lower()}/*.parquet" - folder_path = resource.folder_path if create else resource.draft_folder_path - url_pattern = f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/{folder_path}/{schema_name.lower()}/*.parquet" + for _schema_name in source_schemas: + table_name = f"{job.pipeline.prefix or ''}{job.pipeline.source_type}_{_schema_name}".lower() + new_url_pattern = get_url_pattern(job.folder_path, _schema_name) + # Check data = { "credential": credential, "name": table_name, "format": "Parquet", - "url_pattern": url_pattern, - "team_id": resource.team_id, + "url_pattern": new_url_pattern, + "team_id": job.team_id, } - if create: - exists = ( - DataWarehouseTable.objects.filter( - team_id=resource.team_id, external_data_source_id=resource.id, url_pattern=url_pattern - ) - .filter(Q(deleted=False) | Q(deleted__isnull=True)) - .exists() - ) - - if exists: - table = DataWarehouseTable.objects.filter(Q(deleted=False) | Q(deleted__isnull=True)).get( - team_id=resource.team_id, external_data_source_id=resource.id, url_pattern=url_pattern - ) - else: - table = DataWarehouseTable.objects.create(external_data_source_id=resource.id, **data) - else: - table = DataWarehouseTable(**data) + table = DataWarehouseTable(**data) try: table.columns = table.get_columns() except Exception as e: logger.exception( - f"Data Warehouse: Sync Resource failed with an unexpected exception for connection: {resource.pk}", + f"Data Warehouse: Sync Resource failed with an unexpected exception for connection: {job.pipeline.pk}", exc_info=e, ) raise SchemaValidationError() - else: - if create: - table.save() - return True + # create or update + + table_created = None + if last_successful_job: + old_url_pattern = get_url_pattern(last_successful_job.folder_path, _schema_name) + try: + table_created = DataWarehouseTable.objects.filter(Q(deleted=False) | Q(deleted__isnull=True)).get( + team_id=job.team_id, external_data_source_id=job.pipeline.id, url_pattern=old_url_pattern + ) + table_created.url_pattern = new_url_pattern + table_created.save() + except Exception: + table_created = None + + if not table_created: + table_created = DataWarehouseTable.objects.create(external_data_source_id=job.pipeline.id, **data) + + table_created.columns = table_created.get_columns() + table_created.save() + + if last_successful_job: + try: + s3.delete(f"{bucket_name}/{last_successful_job.folder_path}", recursive=True) + except Exception as e: + logger.exception( + f"Data Warehouse: Could not delete deprecated data source {last_successful_job.pk}", + exc_info=e, + ) + pass diff --git a/posthog/warehouse/external_data_source/jobs.py b/posthog/warehouse/external_data_source/jobs.py index 46b2b77014376..12ab1d53b1543 100644 --- a/posthog/warehouse/external_data_source/jobs.py +++ b/posthog/warehouse/external_data_source/jobs.py @@ -8,19 +8,36 @@ def get_external_data_source(team_id: str, external_data_source_id: str) -> Exte return ExternalDataSource.objects.get(team_id=team_id, id=external_data_source_id) -def create_external_data_job(external_data_source_id: str, team_id: str) -> ExternalDataJob: +def get_external_data_job(team_id: str, run_id: str) -> ExternalDataJob: + return ExternalDataJob.objects.prefetch_related("pipeline").get(id=run_id, team_id=team_id) + + +def create_external_data_job( + external_data_source_id: str, + workflow_id: str, + team_id: str, +) -> ExternalDataJob: job = ExternalDataJob.objects.create( - team_id=team_id, pipeline_id=external_data_source_id, status=ExternalDataJob.Status.RUNNING, rows_synced=0 + team_id=team_id, + pipeline_id=external_data_source_id, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + workflow_id=workflow_id, ) return job -def update_external_job_status(run_id: UUID, status: str, latest_error: str | None) -> ExternalDataJob: - model = ExternalDataJob.objects.filter(id=run_id) - updated = model.update(status=status, latest_error=latest_error) +def update_external_job_status(run_id: UUID, team_id: str, status: str, latest_error: str | None) -> ExternalDataJob: + model = ExternalDataJob.objects.get(id=run_id, team_id=team_id) + model.status = status + model.latest_error = latest_error + model.save() + + pipeline = ExternalDataSource.objects.get(id=model.pipeline_id, team_id=team_id) + pipeline.status = status + pipeline.save() - if not updated: - raise ValueError(f"ExternalDataJob with id {run_id} not found.") + model.refresh_from_db() - return model.get() + return model diff --git a/posthog/warehouse/models/external_data_job.py b/posthog/warehouse/models/external_data_job.py index 207a8171863f5..e15b3e36f3dbc 100644 --- a/posthog/warehouse/models/external_data_job.py +++ b/posthog/warehouse/models/external_data_job.py @@ -19,4 +19,10 @@ class Status(models.TextChoices): null=True, help_text="The latest error that occurred during this run." ) + workflow_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) + __repr__ = sane_repr("id") + + @property + def folder_path(self) -> str: + return f"team_{self.team_id}_{self.pipeline.source_type}_{str(self.pk)}".lower().replace("-", "_") diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py index 4c828bf3a93c6..bdb420efbaa67 100644 --- a/posthog/warehouse/models/external_data_source.py +++ b/posthog/warehouse/models/external_data_source.py @@ -22,11 +22,3 @@ class Type(models.TextChoices): prefix: models.CharField = models.CharField(max_length=100, null=True, blank=True) __repr__ = sane_repr("source_id") - - @property - def folder_path(self) -> str: - return f"team_{self.team_id}_{self.source_type}_{str(self.pk)}".lower().replace("-", "_") - - @property - def draft_folder_path(self) -> str: - return f"team_{self.team_id}_{self.source_type}_{str(self.pk)}_draft".lower().replace("-", "_")