diff --git a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap index 849d883b906cf..8e19e415a5dbe 100644 --- a/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap +++ b/ee/frontend/mobile-replay/__snapshots__/transform.test.ts.snap @@ -48,7 +48,7 @@ exports[`replay/transform transform can convert images 1`] = ` "childNodes": [ { "attributes": { - "style": "color: #ffffff;width: 100px;height: 30px;position: absolute;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", + "style": "color: #ffffff;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { @@ -65,7 +65,7 @@ exports[`replay/transform transform can convert images 1`] = ` "attributes": { "height": 30, "src": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAApgAAAKYB3X3/OAAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAANCSURBVEiJtZZPbBtFFMZ/M7ubXdtdb1xSFyeilBapySVU8h8OoFaooFSqiihIVIpQBKci6KEg9Q6H9kovIHoCIVQJJCKE1ENFjnAgcaSGC6rEnxBwA04Tx43t2FnvDAfjkNibxgHxnWb2e/u992bee7tCa00YFsffekFY+nUzFtjW0LrvjRXrCDIAaPLlW0nHL0SsZtVoaF98mLrx3pdhOqLtYPHChahZcYYO7KvPFxvRl5XPp1sN3adWiD1ZAqD6XYK1b/dvE5IWryTt2udLFedwc1+9kLp+vbbpoDh+6TklxBeAi9TL0taeWpdmZzQDry0AcO+jQ12RyohqqoYoo8RDwJrU+qXkjWtfi8Xxt58BdQuwQs9qC/afLwCw8tnQbqYAPsgxE1S6F3EAIXux2oQFKm0ihMsOF71dHYx+f3NND68ghCu1YIoePPQN1pGRABkJ6Bus96CutRZMydTl+TvuiRW1m3n0eDl0vRPcEysqdXn+jsQPsrHMquGeXEaY4Yk4wxWcY5V/9scqOMOVUFthatyTy8QyqwZ+kDURKoMWxNKr2EeqVKcTNOajqKoBgOE28U4tdQl5p5bwCw7BWquaZSzAPlwjlithJtp3pTImSqQRrb2Z8PHGigD4RZuNX6JYj6wj7O4TFLbCO/Mn/m8R+h6rYSUb3ekokRY6f/YukArN979jcW+V/S8g0eT/N3VN3kTqWbQ428m9/8k0P/1aIhF36PccEl6EhOcAUCrXKZXXWS3XKd2vc/TRBG9O5ELC17MmWubD2nKhUKZa26Ba2+D3P+4/MNCFwg59oWVeYhkzgN/JDR8deKBoD7Y+ljEjGZ0sosXVTvbc6RHirr2reNy1OXd6pJsQ+gqjk8VWFYmHrwBzW/n+uMPFiRwHB2I7ih8ciHFxIkd/3Omk5tCDV1t+2nNu5sxxpDFNx+huNhVT3/zMDz8usXC3ddaHBj1GHj/As08fwTS7Kt1HBTmyN29vdwAw+/wbwLVOJ3uAD1wi/dUH7Qei66PfyuRj4Ik9is+hglfbkbfR3cnZm7chlUWLdwmprtCohX4HUtlOcQjLYCu+fzGJH2QRKvP3UNz8bWk1qMxjGTOMThZ3kvgLI5AzFfo379UAAAAASUVORK5CYII=", - "style": "width: 100px;height: 30px;position: absolute;left: 25px;top: 42px;", + "style": "width: 100px;height: 30px;position: fixed;left: 25px;top: 42px;", "width": 100, }, "childNodes": [], @@ -147,7 +147,7 @@ exports[`replay/transform transform can convert rect with text 1`] = ` "childNodes": [ { "attributes": { - "style": "width: 100px;height: 30px;position: absolute;left: 11px;top: 12px;", + "style": "width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;", "viewBox": "0 0 100 30", }, "childNodes": [ @@ -157,7 +157,7 @@ exports[`replay/transform transform can convert rect with text 1`] = ` "height": 30, "rx": "10px", "stroke": "#ee3ee4", - "stroke-width": "4", + "stroke-width": "4px", "width": 100, "x": 0, "y": 0, @@ -174,7 +174,7 @@ exports[`replay/transform transform can convert rect with text 1`] = ` }, { "attributes": { - "style": "width: 100px;height: 30px;position: absolute;left: 13px;top: 17px;overflow:hidden;white-space:nowrap;", + "style": "width: 100px;height: 30px;position: fixed;left: 13px;top: 17px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { @@ -346,7 +346,7 @@ exports[`replay/transform transform can process unknown types without error 1`] "attributes": { "height": 30, "src": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZAAAAGQCAIAAAAP3aGbAAAViUlEQVR4nO3d0XLjOA5GYfTWPuO8Y+Yp98JTGa8lSyJBEvjBcy5nA4mbbn/lKDT7z8/PjxERKfSf6AUQET0NsIhIJsAiIpkAi4hkAiwikum/z7/0r7/+uvhf//777+5LeWaJyFPUK7dp9reGd1jXN2hy5ONSnlki6k5LK2v9kRCziMokodXHbPMzLMwiKpCiVtb30B2ziKQT1cq6f0uIWUSi6Wplnm0Nt2Y9p+doVvcsEV0krZWdgjUQC8+lMItobIpafcyev8PCLKJiJRHHOfv1R0LMIipTHnGcs1fPsDCLqECpxHHO3jx0xywi6bKJ45y9/y0hZhGJllAc5+wJWFOxwCyiNeUUxzl7/g4Ls4ikSyuOc/brj4SYRSRaZnGcs1fPsDCLSK7k4jhnbx66O7EY9ZFDzCJ6Un5xnLP3vyU8ujPw88mYRTQqCXGcJ/k9/fAzZhFlTkIc/7mj/UckYxZRkiTEWX2m+/EemEUUnoQ4MWe6H++EWUSBSYgTeab78X6KZsEWFUhRHM+sjToieeAZoWvMul0GUfIUxXFqZWOPSA4xa97bPaK0KYrj18qGH5G83qzjpTCLaqcozhCtbMYRyZhFNC9FcQbOTjkiGbOIZpREjcDZWUckYxbR2PKoETg78YjkJGaxRYsKlEqNwNm5RyRnMGvgMohCyqZG4Oz0I5IHmpVhGUSLS6hG4OyKI5IHYpFkGURryqlG4OyiI5Ixi6i1tGoEzq47IhmziJ6XWY3A2blHJA+czbkMohklVyNw9tERydc3mzeb4WPSmEWLy69G4OyjDz9HmXU6vn4ZmEXLklAjata6z3Q/3njS7On4+mVgFi1IQo1Arcxzpvvx9tez8z43g1lUIAk1YrUy55nux0U0jWMW0SsJNcK1sr4z3Qe6g1lEEmokmR12RHLI7EeYRXIpqhE1awOPSE5ypgJmkVCKagRqZR6wjrdPcqYCZpFEimrEamXOM92Pi8AsoicpqhGulfnPdD8uxfnznZxZsEWtKaqRQSsbcqb76YL2Met2GUTvKaqRRCu7PdM9yh3MopIpqpFHK8t8RDJmUbEU1UillT38LeFWZs3bJkY7p6hGNq3s+baGfcw6XgqzyJmiGgm1sqZ9WJg1ZBm0W4pq5NTKWjeObm4WW7SoNUU10mplHTvddzZr4DJohxTVyKyV9X00B7OGLINqp6hGcq2s+7OEEmZdfAcxi6amqEZ+rczz4ef8Zp2Ov18nw9s9qpeiGhJamfO0BnWzli0Ds/ZJUQ0VrcwJVtMKMOvhV5JuimoIaWV+sJrWMfaZFGZRqhTV0NLKhoBlce+VMIuSpKiGnFY2CizDLMzaOEU1FLWygWBZ3M93A/egYxa1pqiGqFY2FiwLfa+EWbQ+RTV0tbLhYBlmYdY2KaohrZXNAMswC7M2SFENda1sElgW+kwKs2h2imoU0MrmgWWh75Uwi+alqEYNrWwqWIZZmFUuRTXKaGWzwbLQn+8wi8amqEYlrWwBWK8w6+EsbKVNUY1iWtkysAyzHodZCVNUo55WthIsw6zHYVaqFNUoqZUtBssw63GYlSRFNapqZevBss3MGrstlhanqEZhrSwELNvJrOOlMEslRTVqa2VRYJmIWRd/hJhVO0U1ymtlgWCZglmn40Ou49yeRlNTVGMHrSwWLNvbrKZxzFqWohqbaGXhYBlmYVamFNXYRyvLAJbFPZPCLHpPUY2ttLIkYFnce6WBZmX49SV1p6jGblpZHrBM36ymS2FWqhTV2FArSwWWhZqltRUeswamqMaeWlk2sCz0OTpmbZiiGttqZQnBstDn6Ji1VYpq7KyV5QTLQp9JYdYmKaqxuVaWFiwLfSaFWeVTVAOtLDNYFvpeCbMKp6gGWr1KDZZhFmaNTlENtPotO1iGWZg1LkU10Oo9AbAMszBrRIpqoNVHGmBZ6HN0zCqQohpodUwGrFeY5V/DhimqgVaniYFlmPV4FrZeKaqBVt/SA8sw63GYpagGWl0kCZaJmHXxtwezFqSoBlpdpwqWKZh1Op5qDYVTVAOtbhMGyzBrxBpKpqgGWj1JGyzDrBFrKJaiGmj1MHmwbDOzPq6DWR8pqoFWz6sAlsU9R19v1vE6zi21lVJUA62aKgKWxb1XymBW03hVsxTVQKvW6oBloWYN8QKzulNUA606KgWWhT6TwqyoFNVAq76qgWWYtZlZimqgVXcFwTLM2sYsRTXQylNNsCz0d3+jzIrarqGSohpo5awsWBb6HH2UFxnWkDNFNdDKX2WwLPS9EmbNS1ENtBpScbAMs8qZpagGWo2qPliGWYXMUlQDrQa2BViGWSXMUlQDrca2C1iGWeJmKaqBVsPbCCwL/d0fZnlSVAOtZrQXWK8wy7+GlSmqgVaT2hEsEzFryFZ4dbMU1UCreW0KlimYdTq+eA2xZimqgVZT2xcsw6zHsyFsKaqBVrPbGizDrBFrmJGiGmi1oN3BMswasYaxKaqBVmsCLDPMGrGGUSmqgVbLAqx/wiz/GvwpqoFWKwOsf4t6hu3cztq3hu7Z6zV4UlQDrRYHWP9X4Gt+c7MU1UCr9QHWZ5ubFbJFS1ENtAoJsE7ayqzwbaWKaqBVVIB13j5mzVvD8PEkaqBVYID1Nczyr2HgYBI10Co2wLrK+bu/5zfa0CxFNdAqPMC6yfOadz7DLmyWohpolSHAui/wNV/SLEU10CpJgPUozPKv4eEXXNx3t1k6BlhPK2BW+BoU1UCrVAFWQ+pmxa5BUQ20yhZgtYVZfbOKaqBVwgCrucCfrZxb2Nev4dtFLkqiBlrlDLA6y2/W6fjiNVy7ebuq3WbpNsDqD7PGlkQNtMocYLnCrFElUQOtkgdY3jDLXxI10Cp/gDUgzPKURA20kgiwxoRZfSVRA61UAqxhBZq1+OM7o8xKogZaCQVYIwvcKyBnVhI10EorwBpc4M93sWY1vXo/vl5RHLQKCbDGt6FZrVq9TymKg1ZRAdaUtjKrT6vbNRxLIg5aBQZYs9rHrDUlEQetYgOsiWHWqJKIg1bhAdbcMMtfEnHQKkOANT3P8yDnHqsCZiURB62SBFgrCnyvFG5Wk7nX90UrAqxF7WnW71THeBJx0CpVgLWu3cza/DOGNCPAWpq0Wcv2sntm0ap2gLU6UbOW7WX3zKJV+QArIBWznJ+8WTyLVjsEWDEF7ldo5dIjzrLQapMAKzIJs/KHVvsEWMFhljO02irAig+zukOr3QKsFEmb5dzLrr4PnlYGWFkSNWvIXnbdffC0OMBKVKBZzk/exM6i1T4BVq7W77HqGD/9+t32wVNIgJWulXusrq/WfammKcV98BQVYGVsvVkdamy7D54C+/Pz8/Pxn/jjSdLzF9Xxj0xie/qy0Eq69z8v3mHlLXy/Qo3QqlKAlTrMcoZWxQKs7GFWd2hVL8ASKGSP1agK7IOnPAGWRs7zQsN3eOrug6dUAZZMi/eFvk9p7YP/CK0qBVhKefaFto5/fL3QPnjr3SOGVvkDLLG09rJ7Zj374K39rFS0kgiw9JLYy/7rRZSSTaGVSux0V4297KPiL3zy2OleIV5mQ+LbqBVgCceLzRnfQLkAS7vwfaGxJdk5QcsCrArpvvb8e9nX7xGjwACrSOF72T2zzr3szj1iJBRg1SlqL3vfu6SovewkHWCVKnAvu2d2/R6x1hFKEmCVauUuzZB/ReLjIp7ZvkGKDbDqtP5c9sX/isTYMEsxdroXiZdfX/xtzx873auFVt3xrdMKsOTjJeeMb6BQgKVd+Rfbmj1inuf3tDLAEq78a2zIHrHnI+W/nwUCLNWWnU6X4V+RWLa/DLOSB1iSdWjl/Myd0L8icXu16zArc4ClV/d7q8C97J7ZIXvZMatGgCWW5ydBlb3so/bBf/wXzCoQYCm1Uqv3qZBZ/2/uMKtegCXTeq1iZ4fk/Keww9dPHwGWRlFaFcj5T2Fv/t3LFmAJhFbOMKtMgJU9tLrIiQ5myQVYqUOri1r3l2FWgQArb8u0CtzLvngf/OlvHjFLKMBK2kqtTi+ycta/o9W5XwGzVAKsjK3XqqPYvey317wOs0QDrHRFaSW0l/3bCGaVD7BypaLV+1TIme6YtWeAlSgtrWJnL8Yxq3CAlSVFrcLDrN0CrBShVXcXZnncwaycAVZ8aOXs4v8IZhULsIJDq4uGkIFZlQKsyCS0it3Ljln0HmCFpaLV6UWWzTaNY1b5ACsmLa068sx6roZZtQOsgBS1Ct/LrmUWbE0KsFYnIU74PvjTwSFkrDHrdJz8AdbSJMRJsg/+dHwIGZilG2CtS0KcbLsfMIveA6xFSYiTTatXmEW/AdaKJMTJqdUrzKJXgDU9CXGitAonI3wB1BRgzU1CnMB98Me7X5TTLLZorQywJqYijn82cC/7EDKcV8CsZQHWrLTE8c86CycjfAH0JMCakqI4i595Dd/Ljlk7BFjjk1AjwxN6zKLWAGtwEmpk0OrbeDgZzg8AYdbUAGtkEmrk0erbRcLJcP7yEbPmBVjDklAjm1bfLhVORvgC6DTAGpOEGjm1+nbBcDLCF0DHAGtAEmokmb0Is+g2wPKmqMbiWczyLIDeAyxXQmrEzmKWZwH0G2D1J6dG4Ozpf/lWTrPWrB+zrgOszhTVCH9uFfiZwSFkhC+AAKsnRTXCtXryv97eNJyM8AVsHmA1p6hGEq2ef83FrZeRMePjO5jlDLDaUlQjlVatXxlo1re7L1sAZh0DrIYU1UioVevXD//8jZZZsPUeYD1NUY20WnVMbWvWxQI2DLAepahGcq06ZjGLAOs+RTUktOq4AmZtHmDdpKiGkFYd18GsnQOsqxTVkNOq42rDzVr58R3M8gRYX1NUQ1SrjmsO3y6AWRIB1nmKakhr9XvlwC1O4WaxRes2wDpJUY0CWnXcophZQxZQO8D6TFGNSlq13giztgqw/i9FNepp1Xq74VvJMSttgPVvimpU1ar1psMfY2NWzgDrnxTVqK1V660xa4cAy0xTjR20al0AZpUPsCTV2EerV/uYNWMBldodLEU1dtPq1SZmTVpAmbYGS1GNPbV6hVmeBdRoX7AU1dhZq1eBW8kxK0ObgqWoBlq9CnyM7Xwa1XQF/wKe30ioHcFSVAOt3pMwy3935wJKmrUdWIpqoNUxzOq+u3R7gaWoBlp9S92s2Adqom0ElqIaaHWdtFnLFlDJrF3AUlQDrZ4UuC0Ts9a3BViKaqDV8wIfCWmZVYCt+mApqoFWrcWatfjjO92zFwtQqThYimqgVV+xv3rDrDVVBktRDbTyhFnOBeSvLFiKaqCVP8xyLiB5NcFSVAOtRoVZzgVkriBYimqg1dhit2Vi1ryqgaWoBlpNKnC7QLhZVbdolQJLUQ20mlrgW5VYs4YsIGF1wFJUA60WhFmeBWSrCFiKaqDVsjDLs4BUVQBLUQ20WhxmeRaQJ3mwFNVAq5Awy7OAJGmDpagGWgWGWZ4FZEgYLEU10Cq8wO0Czq0SrVcYPp4hVbAU1UCrJAW+7P27Ojc3SxIsRTXQKlX7mDXjh9PA9MBSVAOtEraJWc4FZDNLDCxFNdAqbZjVffeolMBSVAOtkodZ3XcPSQYsRTXQSiLM6r77+jTAUlQDrYQKfIzt3CrRdPdvC/DcfXECYCmqgVZyxb7sMeth2cFSVAOtRFM3a9mm1odfOaPUYCmqgVbSSZvlvIKEWXnBUlQDrQqEWQ9nQ9hKCpaiGmhVJsxyLmBeGcFSVAOtioVZzgVMKh1YimqgVcliH2Nj1mm5wFJUA61qF/hICLOOJQJLUQ202qHAlz1mfZQFLEU10GqfMMuzgIGlAEtRDbTarZ3NyrNFKx4sRTXQas+2NWvIAoYUDJaiGmi1cypmTfr0j2d8SJFgKaqBViRh1ul40xVymhUGlqIaaEWvAh8JbW5WDFiKaqAVvRf4st/ZrACwFNVAKzqGWd3j3a0GS1ENtKJvYVb3eF9LwVJUA63oOszqHu9oHViKaqAVPSnWrJV3n7FBrKlFYCmqgVb0vNi3KsvGT6+w0qwVYCmqgVbUWuxblU3Mmg6WohpoRX3Fvux3MGsuWIpqoBV5wqzuuz9pIliKaqAV+cOs7rvfNgssRTXQikaFWd13v24KWIpqoBWNTd2sNRvEWs0aD5aiGmhFM5I2y3kF5+9MvzUYLEU10IrmtfKtyox3OrEb8Y+NBEtRDbSiBQW+7IuZNQwsRTXQipaFWZ4F/DYGLEU10IoWh1meBbwaAJaiGmhFIWGWZwHmB0tRDbSiwFTMGv6bxyFmucBSVAOtKDwJs07Hm65wenfnG71+sBTVQCtKUuDLPtYs53gnWIpqoBWlKvBlr2tWD1iKaqAVJQyzWmsGS1ENtKK0YVZTbWApqoFWlDzMel4DWIpqoBVJFGtW4N2bxu05WIpqoBUJpaJG7PgjsBTVQCuSS0WNwPF7sBTVQCsSzbkZXQIdz/gNWIpqoBVJF/uZweTjV2ApqoFWVKB9zGpd/FewFNVAKyrTJmadXuFi/BwsRTXQioqFWcdOwFJUA62oZJj1ket4mSRqoBUVbqVZgeSdXuE43g9WEjXQisrn3Iyu8jbtyd07wUqiBlrRPm1r1ns9YCVRA61ot6TNcl7hVTNYSdRAK9ozFbNmPMW3VrCSqIFWtHMSZp2ON13hdLwBrCRqoBXRtmb9+fn5eTKZRA20Ivrt+V/p07/PnnH/q6npCr89eoeVRA20InovcF/oyvdZ792DlUQNtCI6tptZN2AlUQOtiL61lVlXYCVRA62Iros1a+W20q9gJVEDrYiepPI5Z+f4OVhJ1EArouepoOMZPwEriRpoRdSaBDqecc50RysqVX50POOc6U5UreToeMY5052oYM4PKqc1izPdiWq28mwGz92bxjnTnahs9cza/Ux3otoVM2vrM92JdmilWbPJ2/dMd6J9WmaWc/z2Fb3pme5Eu1XDrB3PdCfaM+cHlWN/tHy13ZnuRJsnYdbpuO12pjsRmbJZT8FKIg5aEQ1J1KxdznQnoo+cZi0j770tznQnotMC94X2vUjrn+lORBdpmVX8THciui3WrKbXbOUz3YnoYdk+M/itsme6E1FTEmbVPNOdiDrKb1bBM92JqLvkZlU7052InGU2q9SZ7kQ0pLRm1TnTnYgGltOsPz8/Pw+vexpaERXO+ap8Pn76iu7/8POTe6AVUbFiz+07jsuf6U5EU0tllvaZ7kS0oFiz3hM+052IlpXhfGTTPdOdiBYXfj6yiZ7pTkQhhZuld6Y7EQW28qxR+TPdiShDUWYpnelORHniTPerWSLKFme6n88SUc5Wno9sEme6E1Hmln1M2vKf6U5E+Vtmlve0BiKiZblOayAiWhlgEZFMgEVEMv0PU/uJezostYUAAAAASUVORK5CYII=", - "style": "width: 100px;height: 30px;position: absolute;left: 25px;top: 42px;", + "style": "width: 100px;height: 30px;position: fixed;left: 25px;top: 42px;", "width": 100, }, "childNodes": [], @@ -451,17 +451,17 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` "childNodes": [ { "attributes": { - "style": "overflow:hidden;white-space:nowrap;", + "style": "position: fixed;left: 0px;top: 0px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { "attributes": { - "style": "overflow:hidden;white-space:nowrap;", + "style": "position: fixed;left: 0px;top: 0px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { "attributes": { - "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: absolute;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", + "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { @@ -476,7 +476,7 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` }, { "attributes": { - "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: absolute;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", + "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { @@ -496,7 +496,7 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` }, { "attributes": { - "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: absolute;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", + "style": "color: #ffffff;background-color: #000000;border-width: 4px;border-radius: 10px;border-color: #000ddd;border-style: solid;width: 100px;height: 30px;position: fixed;left: 11px;top: 12px;overflow:hidden;white-space:nowrap;", }, "childNodes": [ { @@ -540,6 +540,81 @@ exports[`replay/transform transform child wireframes are processed 1`] = ` ] `; +exports[`replay/transform transform omitting x and y is equivalent to setting them to 0 1`] = ` +[ + { + "data": { + "initialOffset": { + "left": 0, + "top": 0, + }, + "node": { + "childNodes": [ + { + "id": 2, + "name": "html", + "publicId": "", + "systemId": "", + "type": 1, + }, + { + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": {}, + "childNodes": [], + "id": 4, + "tagName": "head", + "type": 2, + }, + { + "attributes": { + "style": "height: 100vh; width: 100vw;", + }, + "childNodes": [ + { + "attributes": {}, + "childNodes": [ + { + "attributes": { + "height": 30, + "src": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZAAAAGQCAIAAAAP3aGbAAAViUlEQVR4nO3d0XLjOA5GYfTWPuO8Y+Yp98JTGa8lSyJBEvjBcy5nA4mbbn/lKDT7z8/PjxERKfSf6AUQET0NsIhIJsAiIpkAi4hkAiwikum/z7/0r7/+uvhf//777+5LeWaJyFPUK7dp9reGd1jXN2hy5ONSnlki6k5LK2v9kRCziMokodXHbPMzLMwiKpCiVtb30B2ziKQT1cq6f0uIWUSi6Wplnm0Nt2Y9p+doVvcsEV0krZWdgjUQC8+lMItobIpafcyev8PCLKJiJRHHOfv1R0LMIipTHnGcs1fPsDCLqECpxHHO3jx0xywi6bKJ45y9/y0hZhGJllAc5+wJWFOxwCyiNeUUxzl7/g4Ls4ikSyuOc/brj4SYRSRaZnGcs1fPsDCLSK7k4jhnbx66O7EY9ZFDzCJ6Un5xnLP3vyU8ujPw88mYRTQqCXGcJ/k9/fAzZhFlTkIc/7mj/UckYxZRkiTEWX2m+/EemEUUnoQ4MWe6H++EWUSBSYgTeab78X6KZsEWFUhRHM+sjToieeAZoWvMul0GUfIUxXFqZWOPSA4xa97bPaK0KYrj18qGH5G83qzjpTCLaqcozhCtbMYRyZhFNC9FcQbOTjkiGbOIZpREjcDZWUckYxbR2PKoETg78YjkJGaxRYsKlEqNwNm5RyRnMGvgMohCyqZG4Oz0I5IHmpVhGUSLS6hG4OyKI5IHYpFkGURryqlG4OyiI5Ixi6i1tGoEzq47IhmziJ6XWY3A2blHJA+czbkMohklVyNw9tERydc3mzeb4WPSmEWLy69G4OyjDz9HmXU6vn4ZmEXLklAjata6z3Q/3njS7On4+mVgFi1IQo1Arcxzpvvx9tez8z43g1lUIAk1YrUy55nux0U0jWMW0SsJNcK1sr4z3Qe6g1lEEmokmR12RHLI7EeYRXIpqhE1awOPSE5ypgJmkVCKagRqZR6wjrdPcqYCZpFEimrEamXOM92Pi8AsoicpqhGulfnPdD8uxfnznZxZsEWtKaqRQSsbcqb76YL2Met2GUTvKaqRRCu7PdM9yh3MopIpqpFHK8t8RDJmUbEU1UillT38LeFWZs3bJkY7p6hGNq3s+baGfcw6XgqzyJmiGgm1sqZ9WJg1ZBm0W4pq5NTKWjeObm4WW7SoNUU10mplHTvddzZr4DJohxTVyKyV9X00B7OGLINqp6hGcq2s+7OEEmZdfAcxi6amqEZ+rczz4ef8Zp2Ov18nw9s9qpeiGhJamfO0BnWzli0Ds/ZJUQ0VrcwJVtMKMOvhV5JuimoIaWV+sJrWMfaZFGZRqhTV0NLKhoBlce+VMIuSpKiGnFY2CizDLMzaOEU1FLWygWBZ3M93A/egYxa1pqiGqFY2FiwLfa+EWbQ+RTV0tbLhYBlmYdY2KaohrZXNAMswC7M2SFENda1sElgW+kwKs2h2imoU0MrmgWWh75Uwi+alqEYNrWwqWIZZmFUuRTXKaGWzwbLQn+8wi8amqEYlrWwBWK8w6+EsbKVNUY1iWtkysAyzHodZCVNUo55WthIsw6zHYVaqFNUoqZUtBssw63GYlSRFNapqZevBss3MGrstlhanqEZhrSwELNvJrOOlMEslRTVqa2VRYJmIWRd/hJhVO0U1ymtlgWCZglmn40Ou49yeRlNTVGMHrSwWLNvbrKZxzFqWohqbaGXhYBlmYVamFNXYRyvLAJbFPZPCLHpPUY2ttLIkYFnce6WBZmX49SV1p6jGblpZHrBM36ymS2FWqhTV2FArSwWWhZqltRUeswamqMaeWlk2sCz0OTpmbZiiGttqZQnBstDn6Ji1VYpq7KyV5QTLQp9JYdYmKaqxuVaWFiwLfSaFWeVTVAOtLDNYFvpeCbMKp6gGWr1KDZZhFmaNTlENtPotO1iGWZg1LkU10Oo9AbAMszBrRIpqoNVHGmBZ6HN0zCqQohpodUwGrFeY5V/DhimqgVaniYFlmPV4FrZeKaqBVt/SA8sw63GYpagGWl0kCZaJmHXxtwezFqSoBlpdpwqWKZh1Op5qDYVTVAOtbhMGyzBrxBpKpqgGWj1JGyzDrBFrKJaiGmj1MHmwbDOzPq6DWR8pqoFWz6sAlsU9R19v1vE6zi21lVJUA62aKgKWxb1XymBW03hVsxTVQKvW6oBloWYN8QKzulNUA606KgWWhT6TwqyoFNVAq76qgWWYtZlZimqgVXcFwTLM2sYsRTXQylNNsCz0d3+jzIrarqGSohpo5awsWBb6HH2UFxnWkDNFNdDKX2WwLPS9EmbNS1ENtBpScbAMs8qZpagGWo2qPliGWYXMUlQDrQa2BViGWSXMUlQDrca2C1iGWeJmKaqBVsPbCCwL/d0fZnlSVAOtZrQXWK8wy7+GlSmqgVaT2hEsEzFryFZ4dbMU1UCreW0KlimYdTq+eA2xZimqgVZT2xcsw6zHsyFsKaqBVrPbGizDrBFrmJGiGmi1oN3BMswasYaxKaqBVmsCLDPMGrGGUSmqgVbLAqx/wiz/GvwpqoFWKwOsf4t6hu3cztq3hu7Z6zV4UlQDrRYHWP9X4Gt+c7MU1UCr9QHWZ5ubFbJFS1ENtAoJsE7ayqzwbaWKaqBVVIB13j5mzVvD8PEkaqBVYID1Nczyr2HgYBI10Co2wLrK+bu/5zfa0CxFNdAqPMC6yfOadz7DLmyWohpolSHAui/wNV/SLEU10CpJgPUozPKv4eEXXNx3t1k6BlhPK2BW+BoU1UCrVAFWQ+pmxa5BUQ20yhZgtYVZfbOKaqBVwgCrucCfrZxb2Nev4dtFLkqiBlrlDLA6y2/W6fjiNVy7ebuq3WbpNsDqD7PGlkQNtMocYLnCrFElUQOtkgdY3jDLXxI10Cp/gDUgzPKURA20kgiwxoRZfSVRA61UAqxhBZq1+OM7o8xKogZaCQVYIwvcKyBnVhI10EorwBpc4M93sWY1vXo/vl5RHLQKCbDGt6FZrVq9TymKg1ZRAdaUtjKrT6vbNRxLIg5aBQZYs9rHrDUlEQetYgOsiWHWqJKIg1bhAdbcMMtfEnHQKkOANT3P8yDnHqsCZiURB62SBFgrCnyvFG5Wk7nX90UrAqxF7WnW71THeBJx0CpVgLWu3cza/DOGNCPAWpq0Wcv2sntm0ap2gLU6UbOW7WX3zKJV+QArIBWznJ+8WTyLVjsEWDEF7ldo5dIjzrLQapMAKzIJs/KHVvsEWMFhljO02irAig+zukOr3QKsFEmb5dzLrr4PnlYGWFkSNWvIXnbdffC0OMBKVKBZzk/exM6i1T4BVq7W77HqGD/9+t32wVNIgJWulXusrq/WfammKcV98BQVYGVsvVkdamy7D54C+/Pz8/Pxn/jjSdLzF9Xxj0xie/qy0Eq69z8v3mHlLXy/Qo3QqlKAlTrMcoZWxQKs7GFWd2hVL8ASKGSP1agK7IOnPAGWRs7zQsN3eOrug6dUAZZMi/eFvk9p7YP/CK0qBVhKefaFto5/fL3QPnjr3SOGVvkDLLG09rJ7Zj374K39rFS0kgiw9JLYy/7rRZSSTaGVSux0V4297KPiL3zy2OleIV5mQ+LbqBVgCceLzRnfQLkAS7vwfaGxJdk5QcsCrArpvvb8e9nX7xGjwACrSOF72T2zzr3szj1iJBRg1SlqL3vfu6SovewkHWCVKnAvu2d2/R6x1hFKEmCVauUuzZB/ReLjIp7ZvkGKDbDqtP5c9sX/isTYMEsxdroXiZdfX/xtzx873auFVt3xrdMKsOTjJeeMb6BQgKVd+Rfbmj1inuf3tDLAEq78a2zIHrHnI+W/nwUCLNWWnU6X4V+RWLa/DLOSB1iSdWjl/Myd0L8icXu16zArc4ClV/d7q8C97J7ZIXvZMatGgCWW5ydBlb3so/bBf/wXzCoQYCm1Uqv3qZBZ/2/uMKtegCXTeq1iZ4fk/Keww9dPHwGWRlFaFcj5T2Fv/t3LFmAJhFbOMKtMgJU9tLrIiQ5myQVYqUOri1r3l2FWgQArb8u0CtzLvngf/OlvHjFLKMBK2kqtTi+ycta/o9W5XwGzVAKsjK3XqqPYvey317wOs0QDrHRFaSW0l/3bCGaVD7BypaLV+1TIme6YtWeAlSgtrWJnL8Yxq3CAlSVFrcLDrN0CrBShVXcXZnncwaycAVZ8aOXs4v8IZhULsIJDq4uGkIFZlQKsyCS0it3Ljln0HmCFpaLV6UWWzTaNY1b5ACsmLa068sx6roZZtQOsgBS1Ct/LrmUWbE0KsFYnIU74PvjTwSFkrDHrdJz8AdbSJMRJsg/+dHwIGZilG2CtS0KcbLsfMIveA6xFSYiTTatXmEW/AdaKJMTJqdUrzKJXgDU9CXGitAonI3wB1BRgzU1CnMB98Me7X5TTLLZorQywJqYijn82cC/7EDKcV8CsZQHWrLTE8c86CycjfAH0JMCakqI4i595Dd/Ljlk7BFjjk1AjwxN6zKLWAGtwEmpk0OrbeDgZzg8AYdbUAGtkEmrk0erbRcLJcP7yEbPmBVjDklAjm1bfLhVORvgC6DTAGpOEGjm1+nbBcDLCF0DHAGtAEmokmb0Is+g2wPKmqMbiWczyLIDeAyxXQmrEzmKWZwH0G2D1J6dG4Ozpf/lWTrPWrB+zrgOszhTVCH9uFfiZwSFkhC+AAKsnRTXCtXryv97eNJyM8AVsHmA1p6hGEq2ef83FrZeRMePjO5jlDLDaUlQjlVatXxlo1re7L1sAZh0DrIYU1UioVevXD//8jZZZsPUeYD1NUY20WnVMbWvWxQI2DLAepahGcq06ZjGLAOs+RTUktOq4AmZtHmDdpKiGkFYd18GsnQOsqxTVkNOq42rDzVr58R3M8gRYX1NUQ1SrjmsO3y6AWRIB1nmKakhr9XvlwC1O4WaxRes2wDpJUY0CWnXcophZQxZQO8D6TFGNSlq13giztgqw/i9FNepp1Xq74VvJMSttgPVvimpU1ar1psMfY2NWzgDrnxTVqK1V660xa4cAy0xTjR20al0AZpUPsCTV2EerV/uYNWMBldodLEU1dtPq1SZmTVpAmbYGS1GNPbV6hVmeBdRoX7AU1dhZq1eBW8kxK0ObgqWoBlq9CnyM7Xwa1XQF/wKe30ioHcFSVAOt3pMwy3935wJKmrUdWIpqoNUxzOq+u3R7gaWoBlp9S92s2Adqom0ElqIaaHWdtFnLFlDJrF3AUlQDrZ4UuC0Ts9a3BViKaqDV8wIfCWmZVYCt+mApqoFWrcWatfjjO92zFwtQqThYimqgVV+xv3rDrDVVBktRDbTyhFnOBeSvLFiKaqCVP8xyLiB5NcFSVAOtRoVZzgVkriBYimqg1dhit2Vi1ryqgaWoBlpNKnC7QLhZVbdolQJLUQ20mlrgW5VYs4YsIGF1wFJUA60WhFmeBWSrCFiKaqDVsjDLs4BUVQBLUQ20WhxmeRaQJ3mwFNVAq5Awy7OAJGmDpagGWgWGWZ4FZEgYLEU10Cq8wO0Czq0SrVcYPp4hVbAU1UCrJAW+7P27Ojc3SxIsRTXQKlX7mDXjh9PA9MBSVAOtEraJWc4FZDNLDCxFNdAqbZjVffeolMBSVAOtkodZ3XcPSQYsRTXQSiLM6r77+jTAUlQDrYQKfIzt3CrRdPdvC/DcfXECYCmqgVZyxb7sMeth2cFSVAOtRFM3a9mm1odfOaPUYCmqgVbSSZvlvIKEWXnBUlQDrQqEWQ9nQ9hKCpaiGmhVJsxyLmBeGcFSVAOtioVZzgVMKh1YimqgVcliH2Nj1mm5wFJUA61qF/hICLOOJQJLUQ202qHAlz1mfZQFLEU10GqfMMuzgIGlAEtRDbTarZ3NyrNFKx4sRTXQas+2NWvIAoYUDJaiGmi1cypmTfr0j2d8SJFgKaqBViRh1ul40xVymhUGlqIaaEWvAh8JbW5WDFiKaqAVvRf4st/ZrACwFNVAKzqGWd3j3a0GS1ENtKJvYVb3eF9LwVJUA63oOszqHu9oHViKaqAVPSnWrJV3n7FBrKlFYCmqgVb0vNi3KsvGT6+w0qwVYCmqgVbUWuxblU3Mmg6WohpoRX3Fvux3MGsuWIpqoBV5wqzuuz9pIliKaqAV+cOs7rvfNgssRTXQikaFWd13v24KWIpqoBWNTd2sNRvEWs0aD5aiGmhFM5I2y3kF5+9MvzUYLEU10IrmtfKtyox3OrEb8Y+NBEtRDbSiBQW+7IuZNQwsRTXQipaFWZ4F/DYGLEU10IoWh1meBbwaAJaiGmhFIWGWZwHmB0tRDbSiwFTMGv6bxyFmucBSVAOtKDwJs07Hm65wenfnG71+sBTVQCtKUuDLPtYs53gnWIpqoBWlKvBlr2tWD1iKaqAVJQyzWmsGS1ENtKK0YVZTbWApqoFWlDzMel4DWIpqoBVJFGtW4N2bxu05WIpqoBUJpaJG7PgjsBTVQCuSS0WNwPF7sBTVQCsSzbkZXQIdz/gNWIpqoBVJF/uZweTjV2ApqoFWVKB9zGpd/FewFNVAKyrTJmadXuFi/BwsRTXQioqFWcdOwFJUA62oZJj1ket4mSRqoBUVbqVZgeSdXuE43g9WEjXQisrn3Iyu8jbtyd07wUqiBlrRPm1r1ns9YCVRA61ot6TNcl7hVTNYSdRAK9ozFbNmPMW3VrCSqIFWtHMSZp2ON13hdLwBrCRqoBXRtmb9+fn5eTKZRA20Ivrt+V/p07/PnnH/q6npCr89eoeVRA20InovcF/oyvdZ792DlUQNtCI6tptZN2AlUQOtiL61lVlXYCVRA62Iros1a+W20q9gJVEDrYiepPI5Z+f4OVhJ1EArouepoOMZPwEriRpoRdSaBDqecc50RysqVX50POOc6U5UreToeMY5052oYM4PKqc1izPdiWq28mwGz92bxjnTnahs9cza/Ux3otoVM2vrM92JdmilWbPJ2/dMd6J9WmaWc/z2Fb3pme5Eu1XDrB3PdCfaM+cHlWN/tHy13ZnuRJsnYdbpuO12pjsRmbJZT8FKIg5aEQ1J1KxdznQnoo+cZi0j770tznQnotMC94X2vUjrn+lORBdpmVX8THciui3WrKbXbOUz3YnoYdk+M/itsme6E1FTEmbVPNOdiDrKb1bBM92JqLvkZlU7052InGU2q9SZ7kQ0pLRm1TnTnYgGltOsPz8/Pw+vexpaERXO+ap8Pn76iu7/8POTe6AVUbFiz+07jsuf6U5EU0tllvaZ7kS0oFiz3hM+052IlpXhfGTTPdOdiBYXfj6yiZ7pTkQhhZuld6Y7EQW28qxR+TPdiShDUWYpnelORHniTPerWSLKFme6n88SUc5Wno9sEme6E1Hmln1M2vKf6U5E+Vtmlve0BiKiZblOayAiWhlgEZFMgEVEMv0PU/uJezostYUAAAAASUVORK5CYII=", + "style": "width: 100px;height: 30px;position: fixed;left: 0px;top: 0px;", + "width": 100, + }, + "childNodes": [], + "id": 12345, + "tagName": "img", + "type": 2, + }, + ], + "id": 111, + "tagName": "div", + "type": 2, + }, + ], + "id": 5, + "tagName": "body", + "type": 2, + }, + ], + "id": 3, + "tagName": "html", + "type": 2, + }, + ], + "id": 1, + "type": 0, + }, + }, + "timestamp": 1, + "type": 2, + }, +] +`; + exports[`replay/transform transform respect incremental ids, replace with body otherwise 1`] = ` [ { diff --git a/ee/frontend/mobile-replay/transform.test.ts b/ee/frontend/mobile-replay/transform.test.ts index 002767ea2ccf1..584366e9cc535 100644 --- a/ee/frontend/mobile-replay/transform.test.ts +++ b/ee/frontend/mobile-replay/transform.test.ts @@ -346,5 +346,25 @@ describe('replay/transform', () => { ]) expect(textEvent).toMatchSnapshot() }) + test('omitting x and y is equivalent to setting them to 0', () => { + expect( + posthogEEModule.mobileReplay?.transformToWeb([ + { + type: 2, + data: { + wireframes: [ + { + id: 12345, + width: 100, + height: 30, + type: 'image', + }, + ], + }, + timestamp: 1, + }, + ]) + ).toMatchSnapshot() + }) }) }) diff --git a/ee/frontend/mobile-replay/wireframeStyle.ts b/ee/frontend/mobile-replay/wireframeStyle.ts index 4a7b3fd662a3b..eb6d8e5469ab6 100644 --- a/ee/frontend/mobile-replay/wireframeStyle.ts +++ b/ee/frontend/mobile-replay/wireframeStyle.ts @@ -1,17 +1,33 @@ import { MobileStyles, wireframe } from './mobile.types' +function isNumber(candidate: unknown): candidate is number { + return typeof candidate === 'number' +} + +function isString(candidate: unknown): candidate is string { + return typeof candidate === 'string' +} + +function isUnitLike(candidate: unknown): candidate is string | number { + return isNumber(candidate) || (isString(candidate) && candidate.length > 0) +} + function ensureUnit(value: string | number): string { - return typeof value === 'number' ? `${value}px` : value.replace(/px$/g, '') + 'px' + return isNumber(value) ? `${value}px` : value.replace(/px$/g, '') + 'px' } function makeBorderStyles(wireframe: wireframe): string { let styles = '' - if (wireframe.style?.borderWidth) { + if (!wireframe.style) { + return styles + } + + if (isUnitLike(wireframe.style.borderWidth)) { const borderWidth = ensureUnit(wireframe.style.borderWidth) styles += `border-width: ${borderWidth};` } - if (wireframe.style?.borderRadius) { + if (isUnitLike(wireframe.style.borderRadius)) { const borderRadius = ensureUnit(wireframe.style.borderRadius) styles += `border-radius: ${borderRadius};` } @@ -29,13 +45,17 @@ function makeBorderStyles(wireframe: wireframe): string { export function makeSvgBorder(style: MobileStyles | undefined): Record { const svgBorderStyles: Record = {} - if (style?.borderWidth) { - svgBorderStyles['stroke-width'] = style.borderWidth.toString() + if (!style) { + return svgBorderStyles + } + + if (isUnitLike(style.borderWidth)) { + svgBorderStyles['stroke-width'] = ensureUnit(style.borderWidth) } - if (style?.borderColor) { + if (style.borderColor) { svgBorderStyles.stroke = style.borderColor } - if (style?.borderRadius) { + if (isUnitLike(style.borderRadius)) { svgBorderStyles.rx = ensureUnit(style.borderRadius) } @@ -44,19 +64,22 @@ export function makeSvgBorder(style: MobileStyles | undefined): Record([ listeners(({ actions, values }) => ({ openSettingsPanel: ({ settingsLogicProps }) => { - if (!values.featureFlags[FEATURE_FLAGS.POSTHOG_3000]) { + if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'control') { LemonDialog.open({ title: 'Settings', content: , diff --git a/frontend/src/layout/navigation-3000/themeLogic.ts b/frontend/src/layout/navigation-3000/themeLogic.ts index 8d238bf5631f3..22b482d34662c 100644 --- a/frontend/src/layout/navigation-3000/themeLogic.ts +++ b/frontend/src/layout/navigation-3000/themeLogic.ts @@ -36,7 +36,7 @@ export const themeLogic = kea([ } // Dark mode is a PostHog 3000 feature // User-saved preference is used when set, oterwise we fall back to the system value - return featureFlags[FEATURE_FLAGS.POSTHOG_3000] + return featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' ? user?.theme_mode ? user.theme_mode === 'dark' : darkModeSystemPreference diff --git a/frontend/src/layout/navigation/TopBar/NotebookButton.tsx b/frontend/src/layout/navigation/TopBar/NotebookButton.tsx index 87e08761ffd06..215616a66faf2 100644 --- a/frontend/src/layout/navigation/TopBar/NotebookButton.tsx +++ b/frontend/src/layout/navigation/TopBar/NotebookButton.tsx @@ -7,7 +7,7 @@ import { notebookPanelLogic } from 'scenes/notebooks/NotebookPanel/notebookPanel export function NotebookButton(): JSX.Element { const { visibility } = useValues(notebookPanelLogic) const { toggleVisibility } = useActions(notebookPanelLogic) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const overrides3000: Partial = is3000 ? { diff --git a/frontend/src/layout/navigation/TopBar/SitePopover.tsx b/frontend/src/layout/navigation/TopBar/SitePopover.tsx index 930497e3403e4..8c7e5060145ee 100644 --- a/frontend/src/layout/navigation/TopBar/SitePopover.tsx +++ b/frontend/src/layout/navigation/TopBar/SitePopover.tsx @@ -300,7 +300,7 @@ export function SitePopoverOverlay(): JSX.Element { )} - + diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index f0762c235657d..f6e25140bea89 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -251,7 +251,7 @@ export const commandPaletteLogic = kea([ selectors({ isUsingCmdKSearch: [ (selectors) => [selectors.featureFlags], - (featureFlags) => featureFlags[FEATURE_FLAGS.POSTHOG_3000], + (featureFlags) => featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test', ], isSqueak: [ (selectors) => [selectors.input], @@ -1008,7 +1008,7 @@ export const commandPaletteLogic = kea([ actions.registerCommand(createDashboard) actions.registerCommand(shareFeedback) actions.registerCommand(debugCopySessionRecordingURL) - if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000]) { + if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test') { actions.registerCommand(toggleTheme) actions.registerCommand(toggleHedgehogMode) actions.registerCommand(shortcuts) diff --git a/frontend/src/lib/components/NotFound/index.tsx b/frontend/src/lib/components/NotFound/index.tsx index 3af20f36fb23d..502a73809a747 100644 --- a/frontend/src/lib/components/NotFound/index.tsx +++ b/frontend/src/lib/components/NotFound/index.tsx @@ -2,6 +2,7 @@ import './NotFound.scss' import { LemonButton } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { Link } from 'lib/lemon-ui/Link' import { capitalizeFirstLetter } from 'lib/utils' import { useNotebookNode } from 'scenes/notebooks/Nodes/NotebookNodeContext' @@ -17,6 +18,7 @@ interface NotFoundProps { export function NotFound({ object, caption }: NotFoundProps): JSX.Element { const { preflight } = useValues(preflightLogic) const { openSupportForm } = useActions(supportLogic) + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const nodeLogic = useNotebookNode() @@ -47,7 +49,11 @@ export function NotFound({ object, caption }: NotFoundProps): JSX.Element {

{nodeLogic && ( - nodeLogic.actions.deleteNode()}> + nodeLogic.actions.deleteNode()} + > Remove from Notebook )} diff --git a/frontend/src/lib/components/PageHeader.tsx b/frontend/src/lib/components/PageHeader.tsx index b5be77f167ce5..27b4773d25947 100644 --- a/frontend/src/lib/components/PageHeader.tsx +++ b/frontend/src/lib/components/PageHeader.tsx @@ -27,12 +27,13 @@ export function PageHeader({ delimited, notebookProps, }: PageHeaderProps): JSX.Element | null { - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const { actionsContainer } = useValues(breadcrumbsLogic) return ( <> {!is3000 && ( + // eslint-disable-next-line react/forbid-dom-props
{!is3000 && diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index cae978a3c3a6e..e5be094824d64 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -336,7 +336,7 @@ export const supportLogic = kea([ actionToUrl(({ values }) => { return { closeSupportForm: () => { - if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000]) { + if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test') { return } diff --git a/frontend/src/lib/hooks/use3000Body.ts b/frontend/src/lib/hooks/use3000Body.ts index 2b7ba96747d70..6ada43cb822c3 100644 --- a/frontend/src/lib/hooks/use3000Body.ts +++ b/frontend/src/lib/hooks/use3000Body.ts @@ -6,7 +6,7 @@ import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { useFeatureFlag } from './useFeatureFlag' export function use3000Body(): void { - const is3000 = !!useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const { isDarkModeOn } = useValues(themeLogic) useEffect(() => { diff --git a/frontend/src/lib/hooks/useFeatureFlag.ts b/frontend/src/lib/hooks/useFeatureFlag.ts index 721805a362395..df0e8bb99341c 100644 --- a/frontend/src/lib/hooks/useFeatureFlag.ts +++ b/frontend/src/lib/hooks/useFeatureFlag.ts @@ -2,8 +2,12 @@ import { useValues } from 'kea' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -export const useFeatureFlag = (flag: keyof typeof FEATURE_FLAGS): boolean => { +export const useFeatureFlag = (flag: keyof typeof FEATURE_FLAGS, match?: string): boolean => { const { featureFlags } = useValues(featureFlagLogic) + if (match) { + return featureFlags[FEATURE_FLAGS[flag]] === match + } + return !!featureFlags[FEATURE_FLAGS[flag]] } diff --git a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx index e3b251ad78e49..5e4d4def77a1c 100644 --- a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx +++ b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx @@ -131,7 +131,7 @@ export function LemonMenuOverlay({ items, tooltipPlacement, itemsRef }: LemonMen const { featureFlags } = useValues(featureFlagLogic) const sectionsOrItems = useMemo(() => normalizeItems(items), [items]) - const buttonSize = featureFlags[FEATURE_FLAGS.POSTHOG_3000] ? 'small' : 'medium' + const buttonSize = featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' ? 'small' : 'medium' return sectionsOrItems.length > 0 && isLemonMenuSection(sectionsOrItems[0]) ? ( ({ HTMLDivElement, HTMLLIElement >(value, 200) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') let buttonProps = {} diff --git a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss index 371bb05d23964..3696f6b12c088 100644 --- a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss +++ b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.scss @@ -1,4 +1,8 @@ .LemonTabs { + --lemon-tabs-margin-bottom: 1rem; + --lemon-tabs-margin-right: 2rem; + --lemon-tabs-content-padding: 0.75rem 0; + position: relative; display: flex; flex-direction: column; @@ -8,83 +12,95 @@ .Navigation3000__scene > :first-child > &:first-child { margin-top: -0.75rem; } -} -.LemonTabs__bar { - position: relative; - display: flex; - flex-direction: row; - flex-shrink: 0; - align-items: stretch; - margin-bottom: 1rem; - overflow-x: auto; - list-style: none; - - &::before { - position: absolute; - bottom: 0; - left: 0; - width: 100%; - height: 1px; - - // The bottom border - content: ''; - background: var(--border); + &.LemonTabs--inline { + --lemon-tabs-margin-bottom: 0; + --lemon-tabs-margin-right: 1rem; + --lemon-tabs-content-padding: 0.25rem 0rem; + } + + &.LemonTabs--borderless { + .LemonTabs__bar::before { + content: none; + } } - &::after { - position: absolute; - bottom: 0; - left: 0; - width: var(--lemon-tabs-slider-width); - height: 0.125rem; + .LemonTabs__bar { + position: relative; + display: flex; + flex-direction: row; + flex-shrink: 0; + align-items: stretch; + margin-bottom: var(--lemon-tabs-margin-bottom); + overflow-x: auto; + list-style: none; - // The active tab slider - content: ''; - background: var(--link); - transform: translateX(var(--lemon-tabs-slider-offset)); + &::before { + position: absolute; + bottom: 0; + left: 0; + width: 100%; + height: 1px; - .LemonTabs--transitioning & { - transition: width 200ms ease, transform 200ms ease; + // The bottom border + content: ''; + background: var(--border); } - } -} -.LemonTabs__tab { - .LemonTabs--transitioning & { - transition: color 200ms ease; - } + &::after { + position: absolute; + bottom: 0; + left: 0; + width: var(--lemon-tabs-slider-width); + height: 0.125rem; - &:not(:last-child) { - margin-right: 2rem; - } + // The active tab slider + content: ''; + background: var(--link); + transform: translateX(var(--lemon-tabs-slider-offset)); - &:hover { - color: var(--link); - } + .LemonTabs--transitioning & { + transition: width 200ms ease, transform 200ms ease; + } + } - &:active { - color: var(--primary-3000-active); - } + .LemonTabs__tab { + .LemonTabs--transitioning & { + transition: color 200ms ease; + } - &.LemonTabs__tab--active { - color: var(--link); - text-shadow: 0 0 0.25px currentColor; // Simulate increased weight without affecting width - } + &:not(:last-child) { + margin-right: var(--lemon-tabs-margin-right); + } - a { - color: inherit; + &:hover { + color: var(--link); + } - // Make tab labels that are links the same colors as regular tab labels - text-decoration: none; - transition: none; - } -} + &:active { + color: var(--primary-3000-active); + } -.LemonTabs__tab-content { - display: flex; - align-items: center; - padding: 0.75rem 0; - white-space: nowrap; - cursor: pointer; + &.LemonTabs__tab--active { + color: var(--link); + text-shadow: 0 0 0.25px currentColor; // Simulate increased weight without affecting width + } + + a { + color: inherit; + + // Make tab labels that are links the same colors as regular tab labels + text-decoration: none; + transition: none; + } + + .LemonTabs__tab-content { + display: flex; + align-items: center; + padding: var(--lemon-tabs-content-padding); + white-space: nowrap; + cursor: pointer; + } + } + } } diff --git a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.tsx b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.tsx index d94f9732623cd..44820a6523698 100644 --- a/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.tsx +++ b/frontend/src/lib/lemon-ui/LemonTabs/LemonTabs.tsx @@ -29,6 +29,8 @@ export interface LemonTabsProps { onChange?: (key: T) => void /** List of tabs. Falsy entries are ignored - they're there to make conditional tabs convenient. */ tabs: (LemonTab | null | false)[] + inline?: boolean + borderless?: boolean 'data-attr'?: string } @@ -53,6 +55,8 @@ export function LemonTabs({ activeKey, onChange, tabs, + inline = false, + borderless = false, 'data-attr': dataAttr, }: LemonTabsProps): JSX.Element { const { containerRef, selectionRef, sliderWidth, sliderOffset, transitioning } = useSliderPositioning< @@ -66,7 +70,12 @@ export function LemonTabs({ return (
> = Reac href: typeof to === 'string' ? to : undefined, }) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const { openDocsPage } = useActions(sidePanelDocsLogic) const onClick = (event: React.MouseEvent): void => { diff --git a/frontend/src/lib/logic/featureFlagLogic.ts b/frontend/src/lib/logic/featureFlagLogic.ts index 592ea0f6646d7..ac1d3f160c13c 100644 --- a/frontend/src/lib/logic/featureFlagLogic.ts +++ b/frontend/src/lib/logic/featureFlagLogic.ts @@ -1,4 +1,5 @@ import { actions, afterMount, kea, path, reducers } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' import { getAppContext } from 'lib/utils/getAppContext' import posthog from 'posthog-js' @@ -22,7 +23,24 @@ function notifyFlagIfNeeded(flag: string, flagState: string | boolean | undefine function getPersistedFeatureFlags(appContext: AppContext | undefined = getAppContext()): FeatureFlagsSet { const persistedFeatureFlags = appContext?.persisted_feature_flags || [] - return Object.fromEntries(persistedFeatureFlags.map((f) => [f, true])) + /** :HACKY: Handle experiment (non-boolean) feature flag for 3000. */ + let has3000Flag = false + const flags = Object.fromEntries( + persistedFeatureFlags.map((f) => { + if (f === FEATURE_FLAGS.POSTHOG_3000) { + has3000Flag = true + return [f, 'test'] + } else { + return [f, true] + } + }) + ) + + if (!has3000Flag) { + flags[FEATURE_FLAGS.POSTHOG_3000] = 'control' + } + + return flags } function spyOnFeatureFlags(featureFlags: FeatureFlagsSet): FeatureFlagsSet { diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 03a177c28908e..15401da60b193 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -443,6 +443,8 @@ export const eventUsageLogic = kea([ reportAutocaptureToggled: (autocapture_opt_out: boolean) => ({ autocapture_opt_out }), reportAutocaptureExceptionsToggled: (autocapture_opt_in: boolean) => ({ autocapture_opt_in }), reportFailedToCreateFeatureFlagWithCohort: (code: string, detail: string) => ({ code, detail }), + reportFeatureFlagCopySuccess: true, + reportFeatureFlagCopyFailure: (error) => ({ error }), reportInviteMembersButtonClicked: true, reportDashboardLoadingTime: (loadingMilliseconds: number, dashboardId: number) => ({ loadingMilliseconds, @@ -1048,6 +1050,12 @@ export const eventUsageLogic = kea([ reportFailedToCreateFeatureFlagWithCohort: ({ detail, code }) => { posthog.capture('failed to create feature flag with cohort', { detail, code }) }, + reportFeatureFlagCopySuccess: () => { + posthog.capture('feature flag copied') + }, + reportFeatureFlagCopyFailure: ({ error }) => { + posthog.capture('feature flag copy failure', { error }) + }, reportInviteMembersButtonClicked: () => { posthog.capture('invite members button clicked') }, diff --git a/frontend/src/mocks/fixtures/_billing_unsubscribed.json b/frontend/src/mocks/fixtures/_billing_unsubscribed.json new file mode 100644 index 0000000000000..d640aac7c7ea8 --- /dev/null +++ b/frontend/src/mocks/fixtures/_billing_unsubscribed.json @@ -0,0 +1,1923 @@ +{ + "available_features": [ + "surveys_unlimited_surveys", + "surveys_all_question_types", + "surveys_user_targeting", + "surveys_user_sampling", + "surveys_api_mode", + "surveys_results_analysis", + "surveys_templates", + "surveys_data_retention", + "zapier", + "slack_integration", + "microsoft_teams_integration", + "discord_integration", + "apps", + "boolean_flags", + "persist_flags_cross_authentication", + "feature_flag_payloads", + "multiple_release_conditions", + "release_condition_overrides", + "targeting_by_group", + "local_evaluation_and_bootstrapping", + "flag_usage_stats", + "feature_flags_data_retention", + "console_logs", + "recordings_playlists", + "session_replay_data_retention", + "dashboards", + "funnels", + "graphs_trends", + "paths", + "product_analytics_data_retention", + "tracked_users", + "team_members", + "organizations_projects", + "api_access", + "social_sso", + "community_support", + "terms_and_conditions" + ], + "license": { + "plan": "dev" + }, + "customer_id": "cus_OfAfHKf4QiD0rs", + "deactivated": false, + "has_active_subscription": false, + "billing_period": { + "current_period_start": "2023-11-17T23:19:00.444Z", + "current_period_end": "2023-12-17T23:19:00.444Z", + "interval": "month" + }, + "available_product_features": [ + { + "key": "surveys_unlimited_surveys", + "name": "Unlimited surveys", + "description": "Create as many surveys as you want.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_all_question_types", + "name": "All question types", + "description": "Rating scale (for NPS and the like), multiple choice, single choice, emoji rating, link, free text.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_targeting", + "name": "User property targeting", + "description": "Target users based on any of their user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_sampling", + "name": "User sampling", + "description": "Sample users to only survey a portion of the users who match the criteria.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_api_mode", + "name": "API mode", + "description": "Create surveys via the API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_results_analysis", + "name": "Results analysis", + "description": "Analyze your survey results including completion rates and drop offs.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_templates", + "name": "Templates", + "description": "Use our templates to get started quickly with NPS, customer satisfaction surveys, user interviews, and more.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + }, + { + "key": "zapier", + "name": "Zapier", + "description": "Zapier lets you connect PostHog with thousands of the most popular apps, so you can automate your work and have more time for what matters most—no code required.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "slack_integration", + "name": "Slack", + "description": "Get notified about new actions in Slack.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "microsoft_teams_integration", + "name": "Microsoft Teams", + "description": "Get notified about new actions in Microsoft Teams.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "discord_integration", + "name": "Discord", + "description": "Get notified about new actions in Discord.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "apps", + "name": "CDP + Apps library", + "description": "Connect your data with 50+ apps including BigQuery, Redshift, and more.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "boolean_flags", + "name": "Boolean feature flags", + "description": "Turn features on and off for specific users.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "persist_flags_cross_authentication", + "name": "Persist flags across authentication", + "description": "Persist feature flags across authentication events so that flag values don't change when an anonymous user logs in and becomes identified.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flag_payloads", + "name": "Payloads", + "description": "Send additional pieces of information (any valid JSON) to your app when a flag is matched for a user.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "multiple_release_conditions", + "name": "Multiple release conditions", + "description": "Target multiple groups of users with different release conditions for the same feature flag.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "release_condition_overrides", + "name": "Release condition overrides", + "description": "For any release condition, specify which flag value the users or groups in that condition should receive.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "targeting_by_group", + "name": "Flag targeting by groups", + "description": "Target feature flag release conditions by group properties, not just user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "local_evaluation_and_bootstrapping", + "name": "Local evaluation & bootstrapping", + "description": "Bootstrap flags on initialization so all flags are available immediately, without having to make extra network requests.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "flag_usage_stats", + "name": "Flag usage stats", + "description": "See how many times a flag has been evaluated, how many times each variant has been returned, and what values users received.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flags_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + }, + { + "key": "console_logs", + "name": "Console logs", + "description": "Diagnose issues by inspecting errors in the user's network console", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "recordings_playlists", + "name": "Recording playlists", + "description": "Create playlists of certain session recordings to easily find and watch them again in the future.", + "unit": "playlists", + "limit": 5, + "note": null + }, + { + "key": "session_replay_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "month", + "limit": 1, + "note": null + }, + { + "key": "dashboards", + "name": "Dashboards", + "description": "Save trends, funnels, and other insights for easy reference by your whole team.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "funnels", + "name": "Funnels", + "description": "Visualize user dropoff between a sequence of events.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "graphs_trends", + "name": "Graphs & trends", + "description": "Plot any number of events or actions over time.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "paths", + "name": "Paths", + "description": "Limited paths excludes: customizing path insights by setting the maximum number of paths, number of people on each path, how path names appear", + "unit": null, + "limit": null, + "note": "Limited" + }, + { + "key": "product_analytics_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + }, + { + "key": "tracked_users", + "name": "Tracked users", + "description": "Track users across devices and sessions.", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "team_members", + "name": "Team members", + "description": "PostHog doesn't charge per seat add your entire team!", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "organizations_projects", + "name": "Projects", + "description": "Create silos of data within PostHog. All data belongs to a single project and all queries are project-specific.", + "unit": "project", + "limit": 1, + "note": null + }, + { + "key": "api_access", + "name": "API access", + "description": "Access your data via our developer-friendly API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "social_sso", + "name": "SSO via Google, Github, or Gitlab", + "description": "Log in to PostHog with your Google, Github, or Gitlab account.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "community_support", + "name": "Community support", + "description": "Get help from other users and PostHog team members in our Community forums.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "terms_and_conditions", + "name": "Terms and conditions", + "description": "Terms and conditions", + "unit": null, + "limit": null, + "note": "Standard" + } + ], + "current_total_amount_usd": null, + "current_total_amount_usd_after_discount": null, + "products": [ + { + "name": "Product analytics + data stack", + "description": "Trends, funnels, path analysis, CDP + more.", + "price_description": null, + "usage_key": "events", + "image_url": "https://posthog.com/images/product/product-icons/product-analytics.svg", + "icon_key": "IconGraph", + "docs_url": "https://posthog.com/docs/product-analytics", + "subscribed": false, + "plans": [ + { + "plan_key": "free-20230117", + "product_key": "product_analytics", + "name": "Product analytics + data stack", + "description": "Trends, funnels, path analysis, CDP + more.", + "image_url": "https://posthog.com/images/product/product-icons/product-analytics.svg", + "docs_url": "https://posthog.com/docs/product-analytics", + "note": null, + "unit": "event", + "free_allocation": 1000000, + "features": [ + { + "key": "dashboards", + "name": "Dashboards", + "description": "Save trends, funnels, and other insights for easy reference by your whole team.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "funnels", + "name": "Funnels", + "description": "Visualize user dropoff between a sequence of events.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "graphs_trends", + "name": "Graphs & trends", + "description": "Plot any number of events or actions over time.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "paths", + "name": "Paths", + "description": "Limited paths excludes: customizing path insights by setting the maximum number of paths, number of people on each path, how path names appear", + "unit": null, + "limit": null, + "note": "Limited" + }, + { + "key": "product_analytics_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + } + ], + "tiers": null, + "current_plan": true, + "included_if": null + }, + { + "plan_key": "paid-20230509", + "product_key": "product_analytics", + "name": "Product analytics + data stack", + "description": "Trends, funnels, path analysis, CDP + more.", + "image_url": "https://posthog.com/images/product/product-icons/product-analytics.svg", + "docs_url": "https://posthog.com/docs/product-analytics", + "note": null, + "unit": "event", + "free_allocation": null, + "features": [ + { + "key": "dashboards", + "name": "Dashboards", + "description": "Save trends, funnels, and other insights for easy reference by your whole team.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "funnels", + "name": "Funnels", + "description": "Visualize user dropoff between a sequence of events.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "graphs_trends", + "name": "Graphs & trends", + "description": "Plot any number of events or actions over time.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "paths", + "name": "Paths", + "description": "Limited paths excludes: customizing path insights by setting the maximum number of paths, number of people on each path, how path names appear", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "subscriptions", + "name": "Insight & dashboard subscriptions", + "description": "Create a subscription for any insight or dashboard in PostHog to receive regular reports with their updates.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "paths_advanced", + "name": "Advanced paths", + "description": "Customize your path insights by setting the maximum number of paths, number of people on each path, and how path names should appear.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "dashboard_permissioning", + "name": "Dashboard permissions", + "description": "Restrict access to dashboards within the organization to only those who need it.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "dashboard_collaboration", + "name": "Tags & text cards", + "description": "Keep organized by adding tags to your dashboards, cohorts and more. Add text cards and descriptions to your dashboards to provide context to your team.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "ingestion_taxonomy", + "name": "Ingestion taxonomy", + "description": "Ingestion taxonomy", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "correlation_analysis", + "name": "Correlation analysis", + "description": "Automatically highlight significant factors that affect the conversion rate of users within a funnel.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "tagging", + "name": "Dashboard tags", + "description": "Organize dashboards with tags.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "behavioral_cohort_filtering", + "name": "Lifecycle cohorts", + "description": "Group users based on their long term behavior, such as whether they frequently performed an event, or have recently stopped performing an event.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "product_analytics_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "years", + "limit": 7, + "note": null + } + ], + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 1000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0003068", + "up_to": 2000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.00013", + "up_to": 15000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000819", + "up_to": 50000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000455", + "up_to": 100000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000234", + "up_to": 250000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000052", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "current_plan": false, + "included_if": null + } + ], + "type": "product_analytics", + "free_allocation": 1000000, + "tiers": null, + "tiered": true, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 1000000, + "has_exceeded_limit": false, + "percentage_usage": 0.0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": "event", + "addons": [ + { + "name": "Group analytics", + "description": "Associate events with a group or entity - such as a company, community, or project. Analyze these events as if they were sent by that entity itself. Great for B2B, marketplaces, and more.", + "price_description": null, + "image_url": "https://posthog.com/images/product/product-icons/group-analytics.svg", + "icon_key": "IconPeople", + "docs_url": "https://posthog.com/docs/product-analytics/group-analytics", + "type": "group_analytics", + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 1000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000708", + "up_to": 2000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.00003", + "up_to": 15000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000189", + "up_to": 50000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000105", + "up_to": 100000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000054", + "up_to": 250000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000012", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "tiered": true, + "included_with_main_product": false, + "subscribed": false, + "unit": "event", + "unit_amount_usd": null, + "current_amount_usd": null, + "current_usage": 0, + "projected_usage": 0, + "projected_amount_usd": null, + "plans": [ + { + "plan_key": "addon-20230509", + "product_key": "group_analytics", + "name": "Group analytics", + "description": "Associate events with a group or entity - such as a company, community, or project. Analyze these events as if they were sent by that entity itself. Great for B2B, marketplaces, and more.", + "image_url": "https://posthog.com/images/product/product-icons/group-analytics.svg", + "docs_url": "https://posthog.com/docs/product-analytics/group-analytics", + "note": null, + "unit": "event", + "free_allocation": null, + "features": [ + { + "key": "group_analytics", + "name": "Group analytics", + "description": "Associate events with a group - such as a company, community, or project - and analyze them in that context.", + "unit": null, + "limit": null, + "note": null + } + ], + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 1000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000708", + "up_to": 2000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.00003", + "up_to": 15000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000189", + "up_to": 50000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000105", + "up_to": 100000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000054", + "up_to": 250000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0000012", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "current_plan": false, + "included_if": null + } + ], + "contact_support": false + } + ], + "contact_support": false, + "inclusion_only": false + }, + { + "name": "Session replay", + "description": "Searchable recordings of people using your app or website with console logs and behavioral bucketing.", + "price_description": null, + "usage_key": "recordings", + "image_url": "https://posthog.com/images/product/product-icons/session-replay.svg", + "icon_key": "IconRewindPlay", + "docs_url": "https://posthog.com/docs/session-replay", + "subscribed": false, + "plans": [ + { + "plan_key": "free-20230117", + "product_key": "session_replay", + "name": "Session replay", + "description": "Searchable recordings of people using your app or website with console logs and behavioral bucketing.", + "image_url": "https://posthog.com/images/product/product-icons/session-replay.svg", + "docs_url": "https://posthog.com/docs/session-replay", + "note": null, + "unit": "recording", + "free_allocation": 15000, + "features": [ + { + "key": "console_logs", + "name": "Console logs", + "description": "Diagnose issues by inspecting errors in the user's network console", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "recordings_playlists", + "name": "Recording playlists", + "description": "Create playlists of certain session recordings to easily find and watch them again in the future.", + "unit": "playlists", + "limit": 5, + "note": null + }, + { + "key": "session_replay_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "month", + "limit": 1, + "note": null + } + ], + "tiers": null, + "current_plan": true, + "included_if": null + }, + { + "plan_key": "paid-20230117", + "product_key": "session_replay", + "name": "Session replay", + "description": "Searchable recordings of people using your app or website with console logs and behavioral bucketing.", + "image_url": "https://posthog.com/images/product/product-icons/session-replay.svg", + "docs_url": "https://posthog.com/docs/session-replay", + "note": null, + "unit": "recording", + "free_allocation": null, + "features": [ + { + "key": "console_logs", + "name": "Console logs", + "description": "Diagnose issues by inspecting errors in the user's network console", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "recordings_playlists", + "name": "Recording playlists", + "description": "Create playlists of certain session recordings to easily find and watch them again in the future.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "recordings_performance", + "name": "Network performance on recordings", + "description": "See your end-user's network performance and information alongside session recordings.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "recordings_file_export", + "name": "Recordings file export", + "description": "Save session recordings as a file to your local filesystem.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "session_replay_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "months", + "limit": 3, + "note": null + } + ], + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 15000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.005", + "up_to": 50000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0045", + "up_to": 150000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.004", + "up_to": 500000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0035", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "current_plan": false, + "included_if": null + } + ], + "type": "session_replay", + "free_allocation": 15000, + "tiers": null, + "tiered": true, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 15000, + "has_exceeded_limit": false, + "percentage_usage": 0.0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": "recording", + "addons": [], + "contact_support": false, + "inclusion_only": false + }, + { + "name": "Feature flags & A/B testing", + "description": "Safely roll out new features and run experiments on changes.", + "price_description": null, + "usage_key": "feature_flag_requests", + "image_url": "https://posthog.com/images/product/product-icons/feature-flags.svg", + "icon_key": "IconToggle", + "docs_url": "https://posthog.com/docs/feature-flags", + "subscribed": false, + "plans": [ + { + "plan_key": "free-20230117", + "product_key": "feature_flags", + "name": "Feature flags & A/B testing", + "description": "Safely roll out new features and run experiments on changes.", + "image_url": "https://posthog.com/images/product/product-icons/feature-flags.svg", + "docs_url": "https://posthog.com/docs/feature-flags", + "note": null, + "unit": "request", + "free_allocation": 1000000, + "features": [ + { + "key": "boolean_flags", + "name": "Boolean feature flags", + "description": "Turn features on and off for specific users.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "persist_flags_cross_authentication", + "name": "Persist flags across authentication", + "description": "Persist feature flags across authentication events so that flag values don't change when an anonymous user logs in and becomes identified.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flag_payloads", + "name": "Payloads", + "description": "Send additional pieces of information (any valid JSON) to your app when a flag is matched for a user.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "multiple_release_conditions", + "name": "Multiple release conditions", + "description": "Target multiple groups of users with different release conditions for the same feature flag.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "release_condition_overrides", + "name": "Release condition overrides", + "description": "For any release condition, specify which flag value the users or groups in that condition should receive.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "targeting_by_group", + "name": "Flag targeting by groups", + "description": "Target feature flag release conditions by group properties, not just user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "local_evaluation_and_bootstrapping", + "name": "Local evaluation & bootstrapping", + "description": "Bootstrap flags on initialization so all flags are available immediately, without having to make extra network requests.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "flag_usage_stats", + "name": "Flag usage stats", + "description": "See how many times a flag has been evaluated, how many times each variant has been returned, and what values users received.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flags_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + } + ], + "tiers": null, + "current_plan": true, + "included_if": null + }, + { + "plan_key": "paid-20230623", + "product_key": "feature_flags", + "name": "Feature flags & A/B testing", + "description": "Safely roll out new features and run experiments on changes.", + "image_url": "https://posthog.com/images/product/product-icons/feature-flags.svg", + "docs_url": "https://posthog.com/docs/feature-flags", + "note": null, + "unit": "request", + "free_allocation": null, + "features": [ + { + "key": "boolean_flags", + "name": "Boolean feature flags", + "description": "Turn features on and off for specific users.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "multivariate_flags", + "name": "Multivariate feature flags & experiments", + "description": "Create three or more variants of a feature flag to test or release different versions of a feature.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "persist_flags_cross_authentication", + "name": "Persist flags across authentication", + "description": "Persist feature flags across authentication events so that flag values don't change when an anonymous user logs in and becomes identified.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flag_payloads", + "name": "Payloads", + "description": "Send additional pieces of information (any valid JSON) to your app when a flag is matched for a user.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "multiple_release_conditions", + "name": "Multiple release conditions", + "description": "Target multiple groups of users with different release conditions for the same feature flag.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "release_condition_overrides", + "name": "Release condition overrides", + "description": "For any release condition, specify which flag value the users or groups in that condition should receive.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "targeting_by_group", + "name": "Flag targeting by groups", + "description": "Target feature flag release conditions by group properties, not just user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "local_evaluation_and_bootstrapping", + "name": "Local evaluation & bootstrapping", + "description": "Bootstrap flags on initialization so all flags are available immediately, without having to make extra network requests.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "flag_usage_stats", + "name": "Flag usage stats", + "description": "See how many times a flag has been evaluated, how many times each variant has been returned, and what values users received.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "experimentation", + "name": "A/B testing", + "description": "Test changes to your product and evaluate the impacts those changes make.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "group_experiments", + "name": "Group experiments", + "description": "Target experiments to specific groups of users so everyone in the same group gets the same variant.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "funnel_experiments", + "name": "Funnel & trend experiments", + "description": "Measure the impact of a change on a aggregate values or a series of events, like a signup flow.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "secondary_metrics", + "name": "Secondary experiment metrics", + "description": "Track additional metrics to see how your experiment affects other parts of your app or different flows.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "statistical_analysis", + "name": "Statistical analysis", + "description": "Get a statistical analysis of your experiment results to see if the results are significant, or if they're likely just due to chance.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "feature_flags_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "years", + "limit": 7, + "note": null + } + ], + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 1000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.0001", + "up_to": 2000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.000045", + "up_to": 10000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.000025", + "up_to": 50000000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.00001", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "current_plan": false, + "included_if": null + } + ], + "type": "feature_flags", + "free_allocation": 1000000, + "tiers": null, + "tiered": true, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 1000000, + "has_exceeded_limit": false, + "percentage_usage": 0.0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": "request", + "addons": [], + "contact_support": false, + "inclusion_only": false + }, + { + "name": "Surveys", + "description": "Collect feedback from your users. Multiple choice, rating, open text, and more.", + "price_description": null, + "usage_key": "survey_responses", + "image_url": "https://posthog.com/images/product/product-icons/surveys.svg", + "icon_key": "IconMessage", + "docs_url": "https://posthog.com/docs/surveys", + "subscribed": false, + "plans": [ + { + "plan_key": "free-20230928", + "product_key": "surveys", + "name": "Surveys", + "description": "Collect feedback from your users. Multiple choice, rating, open text, and more.", + "image_url": "https://posthog.com/images/product/product-icons/surveys.svg", + "docs_url": "https://posthog.com/docs/surveys", + "note": null, + "unit": "survey response", + "free_allocation": 250, + "features": [ + { + "key": "surveys_unlimited_surveys", + "name": "Unlimited surveys", + "description": "Create as many surveys as you want.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_all_question_types", + "name": "All question types", + "description": "Rating scale (for NPS and the like), multiple choice, single choice, emoji rating, link, free text.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_targeting", + "name": "User property targeting", + "description": "Target users based on any of their user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_sampling", + "name": "User sampling", + "description": "Sample users to only survey a portion of the users who match the criteria.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_api_mode", + "name": "API mode", + "description": "Create surveys via the API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_results_analysis", + "name": "Results analysis", + "description": "Analyze your survey results including completion rates and drop offs.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_templates", + "name": "Templates", + "description": "Use our templates to get started quickly with NPS, customer satisfaction surveys, user interviews, and more.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "year", + "limit": 1, + "note": null + } + ], + "tiers": null, + "current_plan": true, + "included_if": null + }, + { + "plan_key": "paid-20230928", + "product_key": "surveys", + "name": "Surveys", + "description": "Collect feedback from your users. Multiple choice, rating, open text, and more.", + "image_url": "https://posthog.com/images/product/product-icons/surveys.svg", + "docs_url": "https://posthog.com/docs/surveys", + "note": null, + "unit": "survey response", + "free_allocation": null, + "features": [ + { + "key": "surveys_unlimited_surveys", + "name": "Unlimited surveys", + "description": "Create as many surveys as you want.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_all_question_types", + "name": "All question types", + "description": "Rating scale (for NPS and the like), multiple choice, single choice, emoji rating, link, free text.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_multiple_questions", + "name": "Multiple questions", + "description": "Create multiple questions in a single survey.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_targeting", + "name": "User property targeting", + "description": "Target users based on any of their user properties.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_user_sampling", + "name": "User sampling", + "description": "Sample users to only survey a portion of the users who match the criteria.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_styling", + "name": "Custom colors & positioning", + "description": "Customize the colors of your surveys to match your brand and set survey position.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_text_html", + "name": "Custom HTML text", + "description": "Add custom HTML to your survey text.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_api_mode", + "name": "API mode", + "description": "Create surveys via the API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_results_analysis", + "name": "Results analysis", + "description": "Analyze your survey results including completion rates and drop offs.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_templates", + "name": "Templates", + "description": "Use our templates to get started quickly with NPS, customer satisfaction surveys, user interviews, and more.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "surveys_data_retention", + "name": "Data retention", + "description": "Keep a historical record of your data.", + "unit": "years", + "limit": 7, + "note": null + } + ], + "tiers": [ + { + "flat_amount_usd": "0", + "unit_amount_usd": "0", + "up_to": 250, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.2", + "up_to": 500, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.1", + "up_to": 1000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.035", + "up_to": 10000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.015", + "up_to": 20000, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + }, + { + "flat_amount_usd": "0", + "unit_amount_usd": "0.01", + "up_to": null, + "current_amount_usd": "0.00", + "current_usage": 0, + "projected_usage": null, + "projected_amount_usd": null + } + ], + "current_plan": false, + "included_if": null + } + ], + "type": "surveys", + "free_allocation": 250, + "tiers": null, + "tiered": true, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 250, + "has_exceeded_limit": false, + "percentage_usage": 0.0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": "survey response", + "addons": [], + "contact_support": false, + "inclusion_only": false + }, + { + "name": "Integrations + CDP", + "description": "Connect PostHog to your favorite tools.", + "price_description": null, + "usage_key": null, + "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", + "icon_key": "IconBolt", + "docs_url": "https://posthog.com/docs/apps", + "subscribed": null, + "plans": [ + { + "plan_key": "free-20230117", + "product_key": "integrations", + "name": "Integrations + CDP", + "description": "Connect PostHog to your favorite tools.", + "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", + "docs_url": "https://posthog.com/docs/apps", + "note": null, + "unit": null, + "free_allocation": null, + "features": [ + { + "key": "zapier", + "name": "Zapier", + "description": "Zapier lets you connect PostHog with thousands of the most popular apps, so you can automate your work and have more time for what matters most—no code required.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "slack_integration", + "name": "Slack", + "description": "Get notified about new actions in Slack.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "microsoft_teams_integration", + "name": "Microsoft Teams", + "description": "Get notified about new actions in Microsoft Teams.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "discord_integration", + "name": "Discord", + "description": "Get notified about new actions in Discord.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "apps", + "name": "CDP + Apps library", + "description": "Connect your data with 50+ apps including BigQuery, Redshift, and more.", + "unit": null, + "limit": null, + "note": null + } + ], + "tiers": null, + "current_plan": true, + "included_if": "no_active_subscription" + }, + { + "plan_key": "paid-20230117", + "product_key": "integrations", + "name": "Integrations + CDP", + "description": "Connect PostHog to your favorite tools.", + "image_url": "https://posthog.com/images/product/product-icons/integrations.svg", + "docs_url": "https://posthog.com/docs/apps", + "note": null, + "unit": null, + "free_allocation": null, + "features": [ + { + "key": "zapier", + "name": "Zapier", + "description": "Zapier lets you connect PostHog with thousands of the most popular apps, so you can automate your work and have more time for what matters most—no code required.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "slack_integration", + "name": "Slack", + "description": "Get notified about new actions in Slack.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "microsoft_teams_integration", + "name": "Microsoft Teams", + "description": "Get notified about new actions in Microsoft Teams.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "discord_integration", + "name": "Discord", + "description": "Get notified about new actions in Discord.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "apps", + "name": "CDP + Apps library", + "description": "Connect your data with 50+ apps including BigQuery, Redshift, and more.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "app_metrics", + "name": "App metrics", + "description": "Get metrics on your apps to see their usage, reliability, and more.", + "unit": null, + "limit": null, + "note": null + } + ], + "tiers": null, + "current_plan": false, + "included_if": "has_subscription" + } + ], + "type": "integrations", + "free_allocation": 0, + "tiers": null, + "tiered": false, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 0, + "has_exceeded_limit": false, + "percentage_usage": 0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": null, + "addons": [], + "contact_support": false, + "inclusion_only": true + }, + { + "name": "Platform and support", + "description": "SSO, permission management, and support.", + "price_description": null, + "usage_key": null, + "image_url": "https://posthog.com/images/product/product-icons/platform.svg", + "icon_key": "IconStack", + "docs_url": "https://posthog.com/docs", + "subscribed": null, + "plans": [ + { + "plan_key": "free-20230117", + "product_key": "platform_and_support", + "name": "Platform and support", + "description": "SSO, permission management, and support.", + "image_url": "https://posthog.com/images/product/product-icons/platform.svg", + "docs_url": "https://posthog.com/docs", + "note": null, + "unit": null, + "free_allocation": null, + "features": [ + { + "key": "tracked_users", + "name": "Tracked users", + "description": "Track users across devices and sessions.", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "team_members", + "name": "Team members", + "description": "PostHog doesn't charge per seat add your entire team!", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "organizations_projects", + "name": "Projects", + "description": "Create silos of data within PostHog. All data belongs to a single project and all queries are project-specific.", + "unit": "project", + "limit": 1, + "note": null + }, + { + "key": "api_access", + "name": "API access", + "description": "Access your data via our developer-friendly API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "social_sso", + "name": "SSO via Google, Github, or Gitlab", + "description": "Log in to PostHog with your Google, Github, or Gitlab account.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "community_support", + "name": "Community support", + "description": "Get help from other users and PostHog team members in our Community forums.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "terms_and_conditions", + "name": "Terms and conditions", + "description": "Terms and conditions", + "unit": null, + "limit": null, + "note": "Standard" + } + ], + "tiers": null, + "current_plan": true, + "included_if": "no_active_subscription" + }, + { + "plan_key": "paid-20230926", + "product_key": "platform_and_support", + "name": "Platform and support", + "description": "SSO, permission management, and support.", + "image_url": "https://posthog.com/images/product/product-icons/platform.svg", + "docs_url": "https://posthog.com/docs", + "note": null, + "unit": null, + "free_allocation": null, + "features": [ + { + "key": "tracked_users", + "name": "Tracked users", + "description": "Track users across devices and sessions.", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "team_members", + "name": "Team members", + "description": "PostHog doesn't charge per seat add your entire team!", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "organizations_projects", + "name": "Projects", + "description": "Create silos of data within PostHog. All data belongs to a single project and all queries are project-specific.", + "unit": null, + "limit": null, + "note": "Unlimited" + }, + { + "key": "api_access", + "name": "API access", + "description": "Access your data via our developer-friendly API.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "social_sso", + "name": "SSO via Google, Github, or Gitlab", + "description": "Log in to PostHog with your Google, Github, or Gitlab account.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "project_based_permissioning", + "name": "Project permissions", + "description": "Restrict access to data within the organization to only those who need it.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "white_labelling", + "name": "White labeling", + "description": "Use your own branding in your PostHog organization.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "community_support", + "name": "Community support", + "description": "Get help from other users and PostHog team members in our Community forums.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "dedicated_support", + "name": "Slack (dedicated channel)", + "description": "Get help firectly from our support team in a dedicated Slack channel shared between you and the PostHog team.", + "unit": null, + "limit": null, + "note": "$2k/month spend or above" + }, + { + "key": "email_support", + "name": "Direct access to engineers", + "description": "Get help directly from our product engineers via email. No wading through multiple support people before you get help.", + "unit": null, + "limit": null, + "note": null + }, + { + "key": "terms_and_conditions", + "name": "Terms and conditions", + "description": "Terms and conditions", + "unit": null, + "limit": null, + "note": "Standard" + }, + { + "key": "security_assessment", + "name": "Security assessment", + "description": "Security assessment", + "unit": null, + "limit": null, + "note": null + } + ], + "tiers": null, + "current_plan": false, + "included_if": "has_subscription" + } + ], + "type": "platform_and_support", + "free_allocation": 0, + "tiers": null, + "tiered": false, + "unit_amount_usd": null, + "current_amount_usd_before_addons": null, + "current_amount_usd": null, + "current_usage": 0, + "usage_limit": 0, + "has_exceeded_limit": false, + "percentage_usage": 0, + "projected_usage": 0, + "projected_amount_usd": null, + "unit": null, + "addons": [], + "contact_support": true, + "inclusion_only": true + } + ], + "custom_limits_usd": {}, + "usage_summary": { + "events": { + "usage": 0, + "limit": 1000000 + }, + "recordings": { + "usage": 0, + "limit": 15000 + }, + "feature_flag_requests": { + "usage": 0, + "limit": 1000000 + }, + "survey_responses": { + "usage": 0, + "limit": 250 + } + }, + "free_trial_until": null, + "discount_percent": null, + "discount_amount_usd": null, + "amount_off_expires_at": null, + "never_drop_data": null, + "stripe_portal_url": "https://billing.stripe.com/p/session/test_YWNjdF8xSElNRERFdUlhdFJYU2R6LF9QN0ltZVQ3RmpLbTZacXgzYWo3Q0FFbFpITHZydlpK0100iKmkfAZi" +} diff --git a/frontend/src/scenes/App.tsx b/frontend/src/scenes/App.tsx index a12a690e5fb53..e403138c07f86 100644 --- a/frontend/src/scenes/App.tsx +++ b/frontend/src/scenes/App.tsx @@ -155,7 +155,7 @@ function AppScene(): JSX.Element | null { ) : null } - const Navigation = featureFlags[FEATURE_FLAGS.POSTHOG_3000] ? Navigation3000 : NavigationClassic + const Navigation = featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' ? Navigation3000 : NavigationClassic return ( <> diff --git a/frontend/src/scenes/actions/ActionEdit.tsx b/frontend/src/scenes/actions/ActionEdit.tsx index 7c38dd21066be..eaea5b848bd51 100644 --- a/frontend/src/scenes/actions/ActionEdit.tsx +++ b/frontend/src/scenes/actions/ActionEdit.tsx @@ -170,6 +170,7 @@ export function ActionEdit({ action: loadedAction, id }: ActionEditLogicProps): type="primary" htmlType="submit" loading={actionLoading} + onClick={submitAction} > Save diff --git a/frontend/src/scenes/authentication/InviteSignup.tsx b/frontend/src/scenes/authentication/InviteSignup.tsx index 0128e51c01a68..aa87fca3c9d9d 100644 --- a/frontend/src/scenes/authentication/InviteSignup.tsx +++ b/frontend/src/scenes/authentication/InviteSignup.tsx @@ -193,7 +193,7 @@ function AuthenticatedAcceptInvite({ invite }: { invite: PrevalidatedInvite }): function UnauthenticatedAcceptInvite({ invite }: { invite: PrevalidatedInvite }): JSX.Element { const { signup, isSignupSubmitting } = useValues(inviteSignupLogic) const { preflight } = useValues(preflightLogic) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') return ( { - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') return is3000 ? { diff --git a/frontend/src/scenes/billing/BillingLimitInput.tsx b/frontend/src/scenes/billing/BillingLimitInput.tsx index db519a096265a..d7ce9d6398437 100644 --- a/frontend/src/scenes/billing/BillingLimitInput.tsx +++ b/frontend/src/scenes/billing/BillingLimitInput.tsx @@ -140,7 +140,6 @@ export const BillingLimitInput = ({ product }: { product: BillingProductV2Type } {customLimitUsd ? ( } status="danger" size="small" tooltip="Remove billing limit" diff --git a/frontend/src/scenes/billing/PlanComparison.tsx b/frontend/src/scenes/billing/PlanComparison.tsx index 3b341f534f9ec..6c54369d984ca 100644 --- a/frontend/src/scenes/billing/PlanComparison.tsx +++ b/frontend/src/scenes/billing/PlanComparison.tsx @@ -63,7 +63,7 @@ const getProductTiers = ( {tiers ? ( tiers?.map((tier, i) => (
@@ -79,7 +79,7 @@ const getProductTiers = ( )) ) : product?.free_allocation ? (
@@ -111,7 +111,7 @@ export const PlanComparison = ({ const upgradeButtons = plans?.map((plan) => { return ( - + - {!feature ? ( - <> - - - ) : feature.limit ? ( - <> - - {feature.limit && - `${convertLargeNumberToWords(feature.limit, null)} ${feature.unit && feature.unit}${ - timeDenominator ? `/${timeDenominator}` : '' - }`} - {feature.note} - - ) : ( - <> - - {feature.note} - - )} -
- ) -} - -const getPlanBasePrice = (plan: BillingV2PlanType): number | string => { - const basePlan = plan.products.find((product) => product.type === 'enterprise' || product.type === 'base') - if (basePlan?.unit_amount_usd) { - return `$${parseInt(basePlan.unit_amount_usd)}/mo` - } - if (plan.is_free) { - return 'Free forever' - } - return '$0/mo' -} - -const convertLargeNumberToWords = ( - // The number to convert - num: number | null, - // The previous tier's number - previousNum: number | null, - // Whether we will be showing multiple tiers (to denote the first tier with 'first') - multipleTiers: boolean = false, - // The product type (to denote the unit) - productType: BillingProductV2Type['type'] | null = null -): string => { - if (num === null && previousNum) { - return `${convertLargeNumberToWords(previousNum, null)} +` - } - if (num === null) { - return '' - } - - let denominator = 1 - - if (num >= 1000000) { - denominator = 1000000 - } else if (num >= 1000) { - denominator = 1000 - } - - return `${previousNum ? `${(previousNum / denominator).toFixed(0)}-` : multipleTiers ? 'First ' : ''}${( - num / denominator - ).toFixed(0)}${denominator === 1000000 ? ' million' : denominator === 1000 ? 'k' : ''}${ - !previousNum && multipleTiers ? ` ${productType}/mo` : '' - }` -} - -const getProductTiers = (plan: BillingV2PlanType, productType: BillingProductV2Type['type']): JSX.Element => { - const product = plan.products.find((planProduct) => planProduct.type === productType) - const tiers = product?.tiers - return ( - <> - {tiers ? ( - tiers?.map((tier, i) => ( -
- - {convertLargeNumberToWords(tier.up_to, tiers[i - 1]?.up_to, true, productType)} - - - {i === 0 && parseFloat(tier.unit_amount_usd) === 0 - ? // if the base product has a price, then the first tier is included, otherwise it's free - plan.products.filter((p) => p.type === 'base')?.[0]?.unit_amount_usd - ? 'Included' - : 'Free' - : `$${parseFloat(tier.unit_amount_usd).toFixed(6)}`} - -
- )) - ) : product?.free_allocation ? ( -
- - Up to {convertLargeNumberToWords(product?.free_allocation, null)} {product?.type}/mo - - Free -
- ) : null} - - ) -} - -export function PlanTable({ redirectPath }: { redirectPath: string }): JSX.Element { - const { billing } = useValues(billingLogic) - const { reportBillingUpgradeClicked } = useActions(eventUsageLogic) - - const plans = billing?.available_plans?.filter((plan) => plan.name !== 'Enterprise') - - const excludedFeatures: string[] = [AvailableFeature.DASHBOARD_COLLABORATION] - - const upgradeButtons = plans?.map((plan) => ( - - { - if (!plan.is_free) { - reportBillingUpgradeClicked(plan.name) - } - }} - > - {!billing?.has_active_subscription && plan.is_free ? 'Current plan' : 'Upgrade'} - - - )) - - return !plans?.length ? ( - - ) : ( -
- - - - - ))} - - - - - - - - - {plans?.map((plan) => ( - - ))} - - {plans - ? plans[plans.length - 1].products - .filter((product) => product.type !== 'base') - .map((product, i) => ( - - - {plans?.map((plan) => ( - - ))} - - )) - : null} - - - - - - - {plans?.length > 0 - ? plans[plans.length - 1].products.map((product) => - product.feature_groups?.map((feature_group) => ( - <> - - - {(product.type === 'events' || product.type === 'recordings') && - plans?.map((plan) => ( - - ))} - - {feature_group.features.map((feature: BillingV2FeatureType, j: number) => { - return excludedFeatures.includes(feature.key) ? ( - <> - ) : ( - - - {plans?.map((plan) => ( - - ))} - - ) - })} - - )) - ) - : null} - - - -
- {plans?.map((plan) => ( - -

{plan.name}

-

{plan.description}

-
- Pricing -
Monthly base price - {getPlanBasePrice(plan)} -
- {product.name} -

- Priced per {product.type === 'events' ? 'event' : 'recording'} -

-
- {getProductTiers(plan, product.type)} -
- {upgradeButtons} -
- Features -
{feature_group.name} - p.type === product.type) - ?.free_allocation, - }} - timeDenominator="mo" - className={'text-base'} - /> -
- {feature.name} - - p.type === product.type) - ?.feature_groups?.find( - (fg) => fg.name === feature_group.name - ) - ?.features?.find((f) => f.key === feature.key)} - className={'text-base'} - /> -
- {upgradeButtons} -
-
- ) -} diff --git a/frontend/src/scenes/billing/billingLogic.ts b/frontend/src/scenes/billing/billingLogic.ts index 63c5c5222d9e8..ec549af6fe264 100644 --- a/frontend/src/scenes/billing/billingLogic.ts +++ b/frontend/src/scenes/billing/billingLogic.ts @@ -200,7 +200,7 @@ export const billingLogic = kea([ } const productOverLimit = billing.products?.find((x: BillingProductV2Type) => { - return x.percentage_usage > 1 + return x.percentage_usage > 1 && x.usage_key }) if (productOverLimit) { @@ -223,7 +223,9 @@ export const billingLogic = kea([ title: 'You will soon hit your usage limit', message: `You have currently used ${parseFloat( (productApproachingLimit.percentage_usage * 100).toFixed(2) - )}% of your ${productApproachingLimit.usage_key.toLowerCase()} allocation.`, + )}% of your ${ + productApproachingLimit.usage_key && productApproachingLimit.usage_key.toLowerCase() + } allocation.`, } } diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index 5d78ef5ac7e81..56e279ac2a66b 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -104,7 +104,10 @@ export const billingProductLogic = kea([ customLimitUsd: [ (s, p) => [s.billing, p.product], (billing, product) => { - return billing?.custom_limits_usd?.[product.type] || billing?.custom_limits_usd?.[product.usage_key] + return ( + billing?.custom_limits_usd?.[product.type] || + (product.usage_key ? billing?.custom_limits_usd?.[product.usage_key] : '') + ) }, ], currentAndUpgradePlans: [ diff --git a/frontend/src/scenes/cohorts/CohortEdit.tsx b/frontend/src/scenes/cohorts/CohortEdit.tsx index c86cabb5137a0..585f2f5f36d1d 100644 --- a/frontend/src/scenes/cohorts/CohortEdit.tsx +++ b/frontend/src/scenes/cohorts/CohortEdit.tsx @@ -94,9 +94,7 @@ export function CohortEdit({ id }: CohortLogicProps): JSX.Element { data-attr="delete-cohort" fullWidth status="danger" - onClick={() => { - deleteCohort() - }} + onClick={deleteCohort} > Delete cohort diff --git a/frontend/src/scenes/dashboard/Dashboard.tsx b/frontend/src/scenes/dashboard/Dashboard.tsx index 6ff62dd3dea72..edf3a90b7a73e 100644 --- a/frontend/src/scenes/dashboard/Dashboard.tsx +++ b/frontend/src/scenes/dashboard/Dashboard.tsx @@ -170,9 +170,8 @@ function DashboardScene(): JSX.Element {
)}
- {placement !== DashboardPlacement.Export && !featureFlags[FEATURE_FLAGS.POSTHOG_3000] && ( - - )} + {placement !== DashboardPlacement.Export && + featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'control' && }
)} diff --git a/frontend/src/scenes/dashboard/EmptyDashboardComponent.tsx b/frontend/src/scenes/dashboard/EmptyDashboardComponent.tsx index cb2db4be65f5f..3cd01c1de2c4d 100644 --- a/frontend/src/scenes/dashboard/EmptyDashboardComponent.tsx +++ b/frontend/src/scenes/dashboard/EmptyDashboardComponent.tsx @@ -12,7 +12,7 @@ import { DASHBOARD_CANNOT_EDIT_MESSAGE } from './DashboardHeader' import { dashboardLogic } from './dashboardLogic' function SkeletonCard({ children, active }: { children: React.ReactNode; active: boolean }): JSX.Element { - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const rounded = is3000 ? 'rounded-md' : 'rounded' return ( diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 742eff1b44cb8..c7542d156b1bd 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -795,10 +795,11 @@ export const featureFlagLogic = kea([ ? 'updated' : 'copied' lemonToast.success(`Feature flag ${operation} successfully!`) + eventUsageLogic.actions.reportFeatureFlagCopySuccess() } else { - lemonToast.error( - `Error while saving feature flag: ${JSON.stringify(featureFlagCopy?.failed) || featureFlagCopy}` - ) + const errorMessage = JSON.stringify(featureFlagCopy?.failed) || featureFlagCopy + lemonToast.error(`Error while saving feature flag: ${errorMessage}`) + eventUsageLogic.actions.reportFeatureFlagCopyFailure(errorMessage) } actions.loadProjectsWithCurrentFlag() diff --git a/frontend/src/scenes/instance/SystemStatus/InstanceConfigSaveModal.tsx b/frontend/src/scenes/instance/SystemStatus/InstanceConfigSaveModal.tsx index 79d221898a27a..a0db3a7e7851d 100644 --- a/frontend/src/scenes/instance/SystemStatus/InstanceConfigSaveModal.tsx +++ b/frontend/src/scenes/instance/SystemStatus/InstanceConfigSaveModal.tsx @@ -1,5 +1,6 @@ import { LemonButton, LemonModal } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { pluralize } from 'lib/utils' @@ -56,6 +57,7 @@ export function InstanceConfigSaveModal({ onClose, isOpen }: { onClose: () => vo useValues(systemStatusLogic) const { saveInstanceConfig } = useActions(systemStatusLogic) const loading = updatedInstanceConfigCount !== null + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const isChangingEnabledEmailSettings = instanceConfigEditingState.EMAIL_ENABLED !== false && @@ -79,7 +81,12 @@ export function InstanceConfigSaveModal({ onClose, isOpen }: { onClose: () => vo > Cancel - + Apply {changeNoun} diff --git a/frontend/src/scenes/instance/SystemStatus/StaffUsersTab.tsx b/frontend/src/scenes/instance/SystemStatus/StaffUsersTab.tsx index c9f0274320c1b..4609d5241d0ef 100644 --- a/frontend/src/scenes/instance/SystemStatus/StaffUsersTab.tsx +++ b/frontend/src/scenes/instance/SystemStatus/StaffUsersTab.tsx @@ -54,7 +54,7 @@ export function StaffUsersTab(): JSX.Element { data-attr="invite-delete" icon={} status="danger" - disabled={staffUsers.length < 2} + disabledReason={staffUsers.length < 2 && 'At least one staff user must remain'} title={ staffUsers.length < 2 ? 'You should always have at least one staff user.' diff --git a/frontend/src/scenes/notebooks/IconNotebook.tsx b/frontend/src/scenes/notebooks/IconNotebook.tsx index bdc4a14becc46..d18e92347d946 100644 --- a/frontend/src/scenes/notebooks/IconNotebook.tsx +++ b/frontend/src/scenes/notebooks/IconNotebook.tsx @@ -3,7 +3,7 @@ import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { IconNotebook as IconNotebookLegacy, LemonIconProps } from 'lib/lemon-ui/icons' export function IconNotebook(props: LemonIconProps): JSX.Element { - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') return is3000 ? : } diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx index c8e1a1e48613c..20e38265582d7 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx @@ -86,7 +86,8 @@ const Component = ({ icon: , onClick: () => { if (activeSessionRecording.id) { - insertReplayCommentByTimestamp(0, activeSessionRecording.id) + const time = getReplayLogic(activeSessionRecording.id)?.values.currentPlayerTime + insertReplayCommentByTimestamp(time ?? 0, activeSessionRecording.id) } }, }, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx index 5b35259410c9d..2edfad098c9d1 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx @@ -142,7 +142,7 @@ type NotebookNodeRecordingAttributes = { export const NotebookNodeRecording = createPostHogWidgetNode({ nodeType: NotebookNodeType.Recording, - titlePlaceholder: 'Session replay', + titlePlaceholder: 'Session recording', Component, heightEstimate: HEIGHT, minHeight: MIN_HEIGHT, diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx index 25ac0088e1ad5..94757955f30d1 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx @@ -16,7 +16,7 @@ export type NotebookListMiniProps = { export function NotebookListMini({ selectedNotebookId }: NotebookListMiniProps): JSX.Element { const { notebooks, notebookTemplates } = useValues(notebooksModel) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const selectedTitle = selectedNotebookId === 'scratchpad' diff --git a/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx index e6db7608942d4..b0a9b56f8fa4d 100644 --- a/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx +++ b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx @@ -25,7 +25,7 @@ export function NotebookCanvas(): JSX.Element { const { duplicateNotebook } = useActions(notebookLogic(logicProps)) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') if (!is3000) { return Canvas mode requires PostHog 3000} /> diff --git a/frontend/src/scenes/notebooks/NotebookPanel/notebookPanelLogic.ts b/frontend/src/scenes/notebooks/NotebookPanel/notebookPanelLogic.ts index 7f77123b42c53..98bc881454e75 100644 --- a/frontend/src/scenes/notebooks/NotebookPanel/notebookPanelLogic.ts +++ b/frontend/src/scenes/notebooks/NotebookPanel/notebookPanelLogic.ts @@ -67,7 +67,7 @@ export const notebookPanelLogic = kea([ })), selectors(({ cache, actions }) => ({ - is3000: [(s) => [s.featureFlags], (featureFlags) => featureFlags[FEATURE_FLAGS.POSTHOG_3000]], + is3000: [(s) => [s.featureFlags], (featureFlags) => featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test'], visibility: [ (s) => [s.selectedTab, s.sidePanelOpen, s.popoverVisibility, s.is3000], diff --git a/frontend/src/scenes/notebooks/NotebookScene.tsx b/frontend/src/scenes/notebooks/NotebookScene.tsx index 0d0b2baa69f5e..e97761af1b97b 100644 --- a/frontend/src/scenes/notebooks/NotebookScene.tsx +++ b/frontend/src/scenes/notebooks/NotebookScene.tsx @@ -41,7 +41,7 @@ export function NotebookScene(): JSX.Element { const { selectedNotebook, visibility } = useValues(notebookPanelLogic) const { featureFlags } = useValues(featureFlagLogic) - const buttonSize = featureFlags[FEATURE_FLAGS.POSTHOG_3000] ? 'small' : 'medium' + const buttonSize = featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' ? 'small' : 'medium' if (!notebook && !loading && !conflictWarningVisible) { return diff --git a/frontend/src/scenes/onboarding/Onboarding.stories.tsx b/frontend/src/scenes/onboarding/Onboarding.stories.tsx new file mode 100644 index 0000000000000..971a3651ebc47 --- /dev/null +++ b/frontend/src/scenes/onboarding/Onboarding.stories.tsx @@ -0,0 +1,93 @@ +import { Meta } from '@storybook/react' +import { useActions, useMountedLogic } from 'kea' +import { router } from 'kea-router' +import { useEffect } from 'react' +import { App } from 'scenes/App' +import { urls } from 'scenes/urls' + +import { mswDecorator, useStorybookMocks } from '~/mocks/browser' +import billingUnsubscribedJson from '~/mocks/fixtures/_billing_unsubscribed.json' +import billingJson from '~/mocks/fixtures/_billing_v2.json' +import preflightJson from '~/mocks/fixtures/_preflight.json' +import { BillingProductV2Type, ProductKey } from '~/types' + +import { onboardingLogic, OnboardingStepKey } from './onboardingLogic' + +const meta: Meta = { + title: 'Scenes-Other/Onboarding', + parameters: { + layout: 'fullscreen', + viewMode: 'story', + mockDate: '2023-05-25', + }, + decorators: [ + mswDecorator({ + get: { + '/_preflight': { + ...preflightJson, + cloud: true, + realm: 'cloud', + }, + }, + }), + ], +} +export default meta +export const _OnboardingSDKs = (): JSX.Element => { + useStorybookMocks({ + get: { + '/api/billing-v2/': { + ...billingJson, + }, + }, + }) + useMountedLogic(onboardingLogic) + const { setProduct } = useActions(onboardingLogic) + + useEffect(() => { + const product: BillingProductV2Type = billingJson.products[1] as BillingProductV2Type + setProduct(product) + router.actions.push(urls.onboarding(ProductKey.SESSION_REPLAY) + '?step=sdks') + }, []) + return +} + +export const _OnboardingBilling = (): JSX.Element => { + useStorybookMocks({ + get: { + '/api/billing-v2/': { + ...billingUnsubscribedJson, + }, + }, + }) + useMountedLogic(onboardingLogic) + const { setProduct, setStepKey } = useActions(onboardingLogic) + + useEffect(() => { + const product: BillingProductV2Type = billingUnsubscribedJson.products[1] as BillingProductV2Type + setProduct(product) + router.actions.push(urls.onboarding(ProductKey.SESSION_REPLAY)) + setStepKey(OnboardingStepKey.BILLING) + }, []) + return +} + +export const _OnboardingOtherProducts = (): JSX.Element => { + useStorybookMocks({ + get: { + '/api/billing-v2/': { + ...billingJson, + }, + }, + }) + useMountedLogic(onboardingLogic) + const { setProduct, setStepKey } = useActions(onboardingLogic) + + useEffect(() => { + const product: BillingProductV2Type = billingJson.products[1] as BillingProductV2Type + setProduct(product) + router.actions.push(urls.onboarding(ProductKey.SESSION_REPLAY)) + setStepKey(OnboardingStepKey.OTHER_PRODUCTS) + }, []) + return +} diff --git a/frontend/src/scenes/onboarding/sdks/SDKs.tsx b/frontend/src/scenes/onboarding/sdks/SDKs.tsx index 3c51ba77b0a72..0627a11bac8ea 100644 --- a/frontend/src/scenes/onboarding/sdks/SDKs.tsx +++ b/frontend/src/scenes/onboarding/sdks/SDKs.tsx @@ -73,7 +73,14 @@ export function SDKs({ onClick={selectedSDK?.key !== sdk.key ? () => setSelectedSDK(sdk) : undefined} fullWidth icon={ - typeof sdk.image === 'string' ? : sdk.image + typeof sdk.image === 'string' ? ( + + ) : // storybook handles require() differently and returns an object, from which we can use the url in .default + typeof sdk.image === 'object' && 'default' in sdk.image ? ( + + ) : ( + sdk.image + ) } > {sdk.name} diff --git a/frontend/src/scenes/persons/PersonDeleteModal.tsx b/frontend/src/scenes/persons/PersonDeleteModal.tsx index d695c33850aac..06d85fbaec6cd 100644 --- a/frontend/src/scenes/persons/PersonDeleteModal.tsx +++ b/frontend/src/scenes/persons/PersonDeleteModal.tsx @@ -1,5 +1,6 @@ import { LemonButton, LemonModal, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { personDeleteModalLogic } from 'scenes/persons/personDeleteModalLogic' import { PersonType } from '~/types' @@ -9,6 +10,7 @@ import { asDisplay } from './person-utils' export function PersonDeleteModal(): JSX.Element | null { const { personDeleteModal } = useValues(personDeleteModalLogic) const { deletePerson, showPersonDeleteModal } = useActions(personDeleteModalLogic) + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') return ( { deletePerson(personDeleteModal as PersonType, true) }} @@ -51,7 +53,7 @@ export function PersonDeleteModal(): JSX.Element | null { Cancel { deletePerson(personDeleteModal as PersonType, false) diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index e87d7f3d749f3..a8c68f528ea4b 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -79,7 +79,7 @@ export const personsLogic = kea([ ...(values.listFilters.properties || []), ...values.hiddenListProperties, ] - if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000]) { + if (values.featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test') { newFilters.include_total = true // The total count is slow, but needed for infinite loading } if (props.cohort) { diff --git a/frontend/src/scenes/pipeline/AppsManagement.tsx b/frontend/src/scenes/pipeline/AppsManagement.tsx index 5bcd34af347a4..890bac9fdbf37 100644 --- a/frontend/src/scenes/pipeline/AppsManagement.tsx +++ b/frontend/src/scenes/pipeline/AppsManagement.tsx @@ -1,6 +1,7 @@ import { LemonBanner, LemonDivider, LemonTable, Tooltip } from '@posthog/lemon-ui' import { Popconfirm } from 'antd' import { useActions, useValues } from 'kea' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { IconDelete, IconLock, IconLockOpen } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonInput } from 'lib/lemon-ui/LemonInput' @@ -74,6 +75,7 @@ type RenderAppsTable = { function AppsTable({ plugins }: RenderAppsTable): JSX.Element { const { unusedPlugins } = useValues(appsManagementLogic) const { uninstallPlugin, patchPlugin } = useActions(appsManagementLogic) + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') // TODO: row expansion to show the source code and allow updating source apps @@ -174,7 +176,7 @@ function AppsTable({ plugins }: RenderAppsTable): JSX.Element { className="Plugins__Popconfirm" > } diff --git a/frontend/src/scenes/plugins/tabs/apps/AppManagementView.tsx b/frontend/src/scenes/plugins/tabs/apps/AppManagementView.tsx index 4beb473cb45f8..cb31dc432ad49 100644 --- a/frontend/src/scenes/plugins/tabs/apps/AppManagementView.tsx +++ b/frontend/src/scenes/plugins/tabs/apps/AppManagementView.tsx @@ -1,6 +1,7 @@ import { LemonButton, Link } from '@posthog/lemon-ui' import { Popconfirm } from 'antd' import { useActions, useValues } from 'kea' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { IconCheckmark, IconCloudDownload, IconDelete, IconReplay, IconWeb } from 'lib/lemon-ui/icons' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { canGloballyManagePlugins } from 'scenes/plugins/access' @@ -19,6 +20,7 @@ export function AppManagementView({ plugin: PluginTypeWithConfig | PluginType | PluginRepositoryEntry }): JSX.Element { const { user } = useValues(userLogic) + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') if (!canGloballyManagePlugins(user?.organization)) { return <> @@ -69,7 +71,7 @@ export function AppManagementView({ className="Plugins__Popconfirm" > } diff --git a/frontend/src/scenes/products/Products.stories.tsx b/frontend/src/scenes/products/Products.stories.tsx new file mode 100644 index 0000000000000..cea22b96b3f97 --- /dev/null +++ b/frontend/src/scenes/products/Products.stories.tsx @@ -0,0 +1,44 @@ +import { Meta } from '@storybook/react' +import { router } from 'kea-router' +import { useEffect } from 'react' +import { App } from 'scenes/App' +import { urls } from 'scenes/urls' + +import { mswDecorator, useStorybookMocks } from '~/mocks/browser' +import billingJson from '~/mocks/fixtures/_billing_v2.json' +import preflightJson from '~/mocks/fixtures/_preflight.json' + +const meta: Meta = { + title: 'Scenes-Other/Products', + parameters: { + layout: 'fullscreen', + viewMode: 'story', + mockDate: '2023-05-25', + }, + decorators: [ + mswDecorator({ + get: { + '/_preflight': { + ...preflightJson, + cloud: true, + realm: 'cloud', + }, + }, + }), + ], +} +export default meta +export const _Products = (): JSX.Element => { + useStorybookMocks({ + get: { + '/api/billing-v2/': { + ...billingJson, + }, + }, + }) + + useEffect(() => { + router.actions.push(urls.products()) + }, []) + return +} diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx index 7c94b4e570f52..88672c5a5008a 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx @@ -35,7 +35,7 @@ export function ProjectHomepage(): JSX.Element { sceneDashboardChoiceModalLogic({ scene: Scene.ProjectHomepage }) ) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const headerButtons = ( <> diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 239a089cbabbf..7d1cfb8cf913d 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -320,12 +320,13 @@ export function InsightIcon({ insight }: { insight: InsightModel }): JSX.Element export function NewInsightButton({ dataAttr }: NewInsightButtonProps): JSX.Element { const { featureFlags } = useValues(featureFlagLogic) - const overrides3000: Partial = featureFlags[FEATURE_FLAGS.POSTHOG_3000] - ? { - size: 'small', - icon: , - } - : {} + const overrides3000: Partial = + featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' + ? { + size: 'small', + icon: , + } + : {} return ( predicate: (x: string) => boolean }): JSX.Element { + const gatheredProperties = gatherIconProperties(props.iconProperties) + return ( -
- - - {!props.fullScreen ? props.iconProperties['$browser'] : null} - - - - {!props.fullScreen - ? props.iconProperties['$device_type'] || props.iconProperties['$initial_device_type'] - : null} - - - - {!props.fullScreen ? props.iconProperties['$os'] : null} - - {props.iconProperties['$geoip_country_code'] && ( - - (props.fullScreen ? key === '$geoip_country_code' : key !== '$geoip_country_code')} + /> + ) +} + +function URLOrScreen({ lastUrl }: { lastUrl: string | undefined }): JSX.Element | null { + if (!lastUrl) { + return null + } + + // re-using the rrweb web schema means that this might be a mobile replay screen name + let isValidUrl = false + try { + new URL(lastUrl || '') + isValidUrl = true + } catch (_e) { + // no valid url + } + + return ( + + · + + {isValidUrl ? ( + + + {lastUrl} + + + ) : ( + lastUrl + )} + + - { - props.fullScreen && - [ - props.iconProperties['$geoip_city_name'], - props.iconProperties['$geoip_subdivision_1_code'], - ] - .filter(props.predicate) - .join(', ') /* [city, state] */ - } - )} -
+ + ) } @@ -224,25 +230,7 @@ export function PlayerMeta(): JSX.Element { - {lastUrl && ( - - · - - - - {lastUrl} - - - - - - - - )} + {lastPageviewEvent?.properties?.['$screen_name'] && ( · diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx index 1f4ae73477d30..31a4bff6525a6 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx @@ -37,7 +37,7 @@ export function PlayerInspectorControls(): JSX.Element { const { showOnlyMatching, timestampMode, miniFilters, syncScroll, searchQuery } = useValues(playerSettingsLogic) const { setShowOnlyMatching, setTimestampMode, setMiniFilter, setSyncScroll, setSearchQuery } = useActions(playerSettingsLogic) - const is3000 = useFeatureFlag('POSTHOG_3000') + const is3000 = useFeatureFlag('POSTHOG_3000', 'test') const mode = logicProps.mode ?? SessionRecordingPlayerMode.Standard diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.stories.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.stories.tsx index 9a7d1a03d1c5e..b9c6c76c40165 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.stories.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.stories.tsx @@ -27,10 +27,15 @@ WebRecording.args = { loading: false, onPropertyClick: () => {}, recordingProperties: [ - { value: 'Mac OS X', property: '$os', tooltipValue: 'Mac OS X' }, - { value: 'Chrome', property: '$browser', tooltipValue: 'Chrome' }, - { value: 'United States', property: '$geoip_country_code', tooltipValue: 'United States' }, - { value: 'Desktop', property: '$device_type', tooltipValue: 'Desktop' }, + { label: 'Mac OS X', value: 'Mac OS X', property: '$os', tooltipValue: 'Mac OS X' }, + { label: 'Chrome', value: 'Chrome', property: '$browser', tooltipValue: 'Chrome' }, + { + label: 'United States', + value: 'United States', + property: '$geoip_country_code', + tooltipValue: 'United States', + }, + { label: 'Desktop', value: 'Desktop', property: '$device_type', tooltipValue: 'Desktop' }, ], } @@ -40,10 +45,15 @@ AndroidRecording.args = { loading: false, onPropertyClick: () => {}, recordingProperties: [ - { value: 'Android', property: '$os_name', tooltipValue: 'Android' }, - { value: 'Awesome Fun App', property: '$app_name', tooltipValue: 'Awesome Fun App' }, - { value: 'United States', property: '$geoip_country_code', tooltipValue: 'United States' }, - { value: 'Mobile', property: '$device_type', tooltipValue: 'Mobile' }, + { label: 'Android', value: 'Android', property: '$os_name', tooltipValue: 'Android' }, + { label: 'Awesome Fun App', value: 'Awesome Fun App', property: '$app_name', tooltipValue: 'Awesome Fun App' }, + { + label: 'United States', + value: 'United States', + property: '$geoip_country_code', + tooltipValue: 'United States', + }, + { label: 'Mobile', value: 'Mobile', property: '$device_type', tooltipValue: 'Mobile' }, ], } diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx index 29729ea29d6bd..7c74657f483ba 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx @@ -58,26 +58,28 @@ function RecordingDuration({ interface GatheredProperty { property: string value: string | undefined + label: string | undefined tooltipValue: string } const browserIconPropertyKeys = ['$geoip_country_code', '$browser', '$device_type', '$os'] const mobileIconPropertyKeys = ['$geoip_country_code', '$device_type', '$os_name'] -function gatherIconProperties( +export function gatherIconProperties( recordingProperties: Record | undefined, - recording: SessionRecordingType + recording?: SessionRecordingType ): GatheredProperty[] { const iconProperties = recordingProperties && Object.keys(recordingProperties).length > 0 ? recordingProperties - : recording.person?.properties || {} + : recording?.person?.properties || {} const deviceType = iconProperties['$device_type'] || iconProperties['$initial_device_type'] const iconPropertyKeys = deviceType === 'Mobile' ? mobileIconPropertyKeys : browserIconPropertyKeys - return iconPropertyKeys.map((property) => { + return iconPropertyKeys.flatMap((property) => { let value = iconProperties?.[property] + let label = value if (property === '$device_type') { value = iconProperties?.['$device_type'] || iconProperties?.['$initial_device_type'] } @@ -85,16 +87,21 @@ function gatherIconProperties( let tooltipValue = value if (property === '$geoip_country_code') { tooltipValue = `${iconProperties?.['$geoip_country_name']} (${value})` + label = [iconProperties?.['$geoip_city_name'], iconProperties?.['$geoip_subdivision_1_code']] + .filter(Boolean) + .join(', ') } - return { property, value, tooltipValue } + return { property, value, tooltipValue, label } }) } export interface PropertyIconsProps { recordingProperties: GatheredProperty[] - loading: boolean + loading?: boolean onPropertyClick?: (property: string, value?: string) => void - iconClassnames: string + iconClassnames?: string + showTooltip?: boolean + showLabel?: (key: string) => boolean } export function PropertyIcons({ @@ -102,35 +109,41 @@ export function PropertyIcons({ loading, onPropertyClick, iconClassnames, + showTooltip = true, + showLabel = undefined, }: PropertyIconsProps): JSX.Element { return ( -
- {!loading ? ( - recordingProperties.map(({ property, value, tooltipValue }) => { +
+ {loading ? ( + + ) : ( + recordingProperties.map(({ property, value, tooltipValue, label }) => { return ( - { - if (e.altKey) { - e.stopPropagation() - onPropertyClick?.(property, value) - } - }} - className={iconClassnames} - property={property} - value={value} - tooltipTitle={() => ( -
- Alt + Click to filter for -
- {tooltipValue ?? 'N/A'} -
- )} - /> + <> + { + if (e.altKey) { + e.stopPropagation() + onPropertyClick?.(property, value) + } + }} + className={iconClassnames} + property={property} + value={value} + noTooltip={!showTooltip} + tooltipTitle={() => ( +
+ Alt + Click to filter for +
+ {tooltipValue ?? 'N/A'} +
+ )} + /> + {showLabel?.(property) && {label || value}} + ) }) - ) : ( - )}
) diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index 2eacd63fd34db..dc1ddfce992b9 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -335,7 +335,7 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr
{activeSessionRecordingId ? ( loadKeys(), []) @@ -121,8 +123,8 @@ function PersonalAPIKeysTable(): JSX.Element { return ( { LemonDialog.open({ title: `Permanently delete key "${key.label}"?`, diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx index feca55ba5b8a6..f63905f29efde 100644 --- a/frontend/src/scenes/web-analytics/WebDashboard.tsx +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -26,7 +26,7 @@ const Filters = (): JSX.Element => { } = useValues(webAnalyticsLogic) const { setWebAnalyticsFilters, setDates } = useActions(webAnalyticsLogic) const { featureFlags } = useValues(featureFlagLogic) - const hasPosthog3000 = featureFlags[FEATURE_FLAGS.POSTHOG_3000] + const hasPosthog3000 = featureFlags[FEATURE_FLAGS.POSTHOG_3000] === 'test' return (
void }): JSX.Element => { const activeTab = tabs.find((t) => t.id === activeTabId) - const { containerRef, selectionRef, sliderWidth, sliderOffset, transitioning } = useSliderPositioning< - HTMLUListElement, - HTMLLIElement - >(activeTabId, TRANSITION_MS) return (
{

{activeTab?.title}

} -
- {tabs.length > 1 && ( - // TODO switch to a select if more than 3 -
    - {tabs.map(({ id, linkText }) => ( -
  • - -
  • - ))} -
- )} -
-
-
-
+ ({ key: id, label: linkText }))} + />
{activeTab?.content}
diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 71c748ba01325..309fb338084dd 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -92,15 +92,6 @@ export enum AvailableFeature { SURVEYS_MULTIPLE_QUESTIONS = 'surveys_multiple_questions', } -export type AvailableProductFeature = { - key: AvailableFeature - name: string - description?: string | null - limit?: number | null - note?: string | null - unit?: string | null -} - export enum ProductKey { COHORTS = 'cohorts', ACTIONS = 'actions', @@ -234,7 +225,7 @@ export interface OrganizationType extends OrganizationBasicType { plugins_access_level: PluginsAccessLevel teams: TeamBasicType[] | null available_features: AvailableFeature[] - available_product_features: AvailableProductFeature[] + available_product_features: BillingV2FeatureType[] is_member_join_email_enabled: boolean customer_id: string | null enforce_2fa: boolean | null @@ -1203,14 +1194,13 @@ export interface CurrentBillCycleType { current_period_end: number } -export interface BillingV2FeatureType { - key: string +export type BillingV2FeatureType = { + key: AvailableFeature name: string - description?: string - unit?: string - limit?: number - note?: string - group?: AvailableFeature + description?: string | null + limit?: number | null + note?: string | null + unit?: string | null } export interface BillingV2TierType { @@ -1225,36 +1215,30 @@ export interface BillingV2TierType { export interface BillingProductV2Type { type: string - usage_key: string + usage_key: string | null name: string description: string price_description?: string | null icon_key?: string | null image_url?: string | null docs_url: string | null - free_allocation?: number - subscribed: boolean + free_allocation?: number | null + subscribed: boolean | null tiers?: BillingV2TierType[] | null tiered: boolean current_usage?: number - projected_amount_usd?: string + projected_amount_usd?: string | null projected_usage?: number percentage_usage: number current_amount_usd_before_addons: string | null current_amount_usd: string | null usage_limit: number | null has_exceeded_limit: boolean - unit: string + unit: string | null unit_amount_usd: string | null plans: BillingV2PlanType[] contact_support: boolean inclusion_only: any - feature_groups: { - // deprecated, remove after removing the billing plans table - group: string - name: string - features: BillingV2FeatureType[] - }[] addons: BillingProductV2AddonType[] // addons-only: if this addon is included with the base product and not subscribed individually. for backwards compatibility. @@ -1283,7 +1267,7 @@ export interface BillingProductV2AddonType { projected_amount_usd: string | null plans: BillingV2PlanType[] usage_key: string - free_allocation?: number + free_allocation?: number | null percentage_usage?: number } export interface BillingV2Type { @@ -1314,16 +1298,14 @@ export interface BillingV2Type { } export interface BillingV2PlanType { - free_allocation?: number + free_allocation?: number | null features: BillingV2FeatureType[] - key: string name: string description: string is_free?: boolean - products: BillingProductV2Type[] plan_key?: string current_plan?: any - tiers?: BillingV2TierType[] + tiers?: BillingV2TierType[] | null included_if?: 'no_active_subscription' | 'has_subscription' | null initial_billing_limit?: number } @@ -3433,7 +3415,13 @@ export type SDK = { key: string recommended?: boolean tags: string[] - image: string | JSX.Element + image: + | string + | JSX.Element + // storybook handles require() differently, so we need to support both + | { + default: string + } docsLink: string } diff --git a/plugin-server/functional_tests/exports-v1.test.ts b/plugin-server/functional_tests/exports-v1.test.ts index bf2dedd865e05..37bed7e22b694 100644 --- a/plugin-server/functional_tests/exports-v1.test.ts +++ b/plugin-server/functional_tests/exports-v1.test.ts @@ -3,7 +3,6 @@ import { createServer, Server } from 'http' import { UUIDT } from '../src/utils/utils' import { capture, createAndReloadPluginConfig, createOrganization, createPlugin, createTeam } from './api' import { waitForExpect } from './expectations' -import { produce } from './kafka' let organizationId: string let server: Server @@ -43,10 +42,10 @@ test.concurrent(`exports: exporting events on ingestion`, async () => { plugin_type: 'source', is_global: false, source__index_ts: ` - export const exportEvents = async (events, { global, config }) => { + export const onEvent = async (event, { global, config }) => { await fetch( "http://localhost:${server.address()?.port}/${teamId}", - {method: "POST", body: JSON.stringify(events)} + {method: "POST", body: JSON.stringify(event)} ) } `, @@ -67,14 +66,11 @@ test.concurrent(`exports: exporting events on ingestion`, async () => { }, }) - // Then check that the exportEvents function was called + // Then check that the onEvent function was called await waitForExpect( () => { - const exportEvents = webHookCalledWith[`/${teamId}`] - expect(exportEvents.length).toBeGreaterThan(0) - const exportedEvents = exportEvents[0] - - expect(exportedEvents).toEqual([ + const onEvents = webHookCalledWith[`/${teamId}`] + expect(onEvents).toEqual([ expect.objectContaining({ distinct_id: distinctId, team_id: teamId, @@ -102,10 +98,10 @@ test.concurrent(`exports: exporting $autocapture events on ingestion`, async () plugin_type: 'source', is_global: false, source__index_ts: ` - export const exportEvents = async (events, { global, config }) => { + export const onEvent = async (event, { global, config }) => { await fetch( "http://localhost:${server.address()?.port}/${teamId}", - {method: "POST", body: JSON.stringify(events)} + {method: "POST", body: JSON.stringify(event)} ) } `, @@ -128,13 +124,11 @@ test.concurrent(`exports: exporting $autocapture events on ingestion`, async () }, }) - // Then check that the exportEvents function was called + // Then check that the onEvent function was called await waitForExpect( () => { - const exportEvents = webHookCalledWith[`/${teamId}`] - expect(exportEvents.length).toBeGreaterThan(0) - const exportedEvents = exportEvents[0] - expect(exportedEvents).toEqual([ + const onEvents = webHookCalledWith[`/${teamId}`] + expect(onEvents).toEqual([ expect.objectContaining({ distinct_id: distinctId, team_id: teamId, @@ -163,99 +157,3 @@ test.concurrent(`exports: exporting $autocapture events on ingestion`, async () 1_000 ) }) - -test.concurrent(`exports: historical exports`, async () => { - const teamId = await createTeam(organizationId) - const distinctId = new UUIDT().toString() - const uuid = new UUIDT().toString() - - const plugin = await createPlugin({ - organization_id: organizationId, - name: 'export plugin', - plugin_type: 'source', - is_global: false, - source__index_ts: ` - export const exportEvents = async (events, { global, config }) => { - await fetch( - "http://localhost:${server.address()?.port}/${teamId}", - {method: "POST", body: JSON.stringify(events)} - ) - } - `, - }) - const pluginConfig = await createAndReloadPluginConfig(teamId, plugin.id) - - // First let's capture an event and wait for it to be ingested so - // so we can check that the historical event is the same as the one - // passed to processEvent on initial ingestion. - await capture({ - teamId, - distinctId, - uuid, - event: '$autocapture', - properties: { - name: 'hehe', - uuid: new UUIDT().toString(), - $elements: [{ tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' }], - }, - }) - - // Then check that the exportEvents function was called - const [exportedEvent] = await waitForExpect( - () => { - const exportEvents = webHookCalledWith[`/${teamId}`] - expect(exportEvents.length).toBeGreaterThan(0) - return exportEvents[0] - }, - 60_000, - 1_000 - ) - - // NOTE: the frontend doesn't actually push to this queue but rather - // adds directly to PostgreSQL using the graphile-worker stored - // procedure `add_job`. I'd rather keep these tests graphile - // unaware. - await produce({ - topic: 'jobs', - message: Buffer.from( - JSON.stringify({ - type: 'Export historical events', - pluginConfigId: pluginConfig.id, - pluginConfigTeam: teamId, - payload: { - dateFrom: new Date(Date.now() - 60000).toISOString(), - dateTo: new Date(Date.now()).toISOString(), - }, - }) - ), - key: teamId.toString(), - }) - - // Then check that the exportEvents function was called with the - // same data that was used with the non-historical export, with the - // additions of details related to the historical export. - await waitForExpect( - () => { - const historicallyExportedEvents = webHookCalledWith[`/${teamId}`].filter((events) => - events.some((event) => event.properties['$$is_historical_export_event']) - ) - expect(historicallyExportedEvents.length).toBeGreaterThan(0) - - const historicallyExportedEvent = historicallyExportedEvents[0] - expect(historicallyExportedEvent).toEqual([ - expect.objectContaining({ - ...exportedEvent, - ip: '', // NOTE: for some reason this is "" when exported historically, but null otherwise. - properties: { - ...exportedEvent.properties, - $$is_historical_export_event: true, - $$historical_export_timestamp: expect.any(String), - $$historical_export_source_db: 'clickhouse', - }, - }), - ]) - }, - 60_000, - 1_000 - ) -}) diff --git a/plugin-server/package.json b/plugin-server/package.json index ace8141928fa8..9c81a46063c96 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -47,7 +47,6 @@ "@google-cloud/storage": "^5.8.5", "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", - "@posthog/plugin-contrib": "^0.0.5", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index 27a6fc26af075..6cfe55d201995 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -46,9 +46,6 @@ dependencies: '@posthog/clickhouse': specifier: ^1.7.0 version: 1.7.0 - '@posthog/plugin-contrib': - specifier: ^0.0.5 - version: 0.0.5 '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3115,10 +3112,6 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/plugin-contrib@0.0.5: - resolution: {integrity: sha512-ic2JsfFUdLGF+fGYJPatWEB6gEFNoD89qz92FN1RE2QfLpr6YdyPNuMowzahya3hfC/jaLZ8QdPG/j5pSOgT7A==} - dev: false - /@posthog/plugin-scaffold@1.4.4: resolution: {integrity: sha512-3z1ENm1Ys5lEQil0H7TVOqHvD24+ydiZFk5hggpbHRx1iOxAK+Eu5qFyAROwPUcCo7NOYjmH2xL1C4B1vaHilg==} dependencies: diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index aa2ada4a10e49..434cb21eb2685 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -124,10 +124,6 @@ export function getDefaultConfig(): PluginsServerConfig { PLUGIN_SERVER_MODE: null, PLUGIN_LOAD_SEQUENTIALLY: false, KAFKAJS_LOG_LEVEL: 'WARN', - HISTORICAL_EXPORTS_ENABLED: true, - HISTORICAL_EXPORTS_MAX_RETRY_COUNT: 15, - HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW: 10 * 60 * 1000, - HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER: 1.5, APP_METRICS_GATHERED_FOR_ALL: isDevEnv() ? true : false, MAX_TEAM_ID_TO_BUFFER_ANONYMOUS_EVENTS_FOR: 0, USE_KAFKA_FOR_SCHEDULED_TASKS: true, diff --git a/plugin-server/src/main/graphile-worker/graphile-worker.ts b/plugin-server/src/main/graphile-worker/graphile-worker.ts index 041819a8fa86b..02a4d028f75a9 100644 --- a/plugin-server/src/main/graphile-worker/graphile-worker.ts +++ b/plugin-server/src/main/graphile-worker/graphile-worker.ts @@ -13,7 +13,6 @@ import { Pool } from 'pg' import { EnqueuedJob, Hub } from '../../types' import { instrument } from '../../utils/metrics' -import { runRetriableFunction } from '../../utils/retries' import { status } from '../../utils/status' import { createPostgresPool } from '../../utils/utils' import { graphileEnqueueJobCounter } from './metrics' @@ -60,12 +59,7 @@ export class GraphileWorker { await this.migrate() } - async enqueue( - jobName: string, - job: EnqueuedJob, - instrumentationContext?: InstrumentationContext, - retryOnFailure = false - ): Promise { + async enqueue(jobName: string, job: EnqueuedJob, instrumentationContext?: InstrumentationContext): Promise { const jobType = 'type' in job ? job.type : 'buffer' let jobPayload: Record = {} @@ -73,23 +67,7 @@ export class GraphileWorker { jobPayload = job.payload } - let enqueueFn = () => this._enqueue(jobName, job) - - // This branch will be removed once we implement a Kafka queue for all jobs - // as we've done for buffer events (see e.g. anonymous-event-buffer-consumer.ts) - if (retryOnFailure) { - enqueueFn = () => - runRetriableFunction({ - hub: this.hub, - metricName: `job_queues_enqueue_${jobName}`, - maxAttempts: 10, - retryBaseMs: 6000, - retryMultiplier: 2, - tryFn: async () => this._enqueue(jobName, job), - catchFn: () => status.error('🔴', 'Exhausted attempts to enqueue job.'), - payload: job, - }) - } + const enqueueFn = () => this._enqueue(jobName, job) await instrument( this.hub.statsd, diff --git a/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts b/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts index 66e5385baae60..80f4347bcb383 100644 --- a/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts @@ -24,9 +24,7 @@ export const startAsyncOnEventHandlerConsumer = async ({ }) => { /* Consumes analytics events from the Kafka topic `clickhouse_events_json` - and processes any onEvent plugin handlers configured for the team. This - also includes `exportEvents` handlers defined in plugins as these are - also handled via modifying `onEvent` to call `exportEvents`. + and processes any onEvent plugin handlers configured for the team. At the moment this is just a wrapper around `IngestionConsumer`. We may want to further remove that abstraction in the future. @@ -61,9 +59,7 @@ export const startAsyncWebhooksHandlerConsumer = async ({ }) => { /* Consumes analytics events from the Kafka topic `clickhouse_events_json` - and processes any onEvent plugin handlers configured for the team. This - also includes `exportEvents` handlers defined in plugins as these are - also handled via modifying `onEvent` to call `exportEvents`. + and processes any onEvent plugin handlers configured for the team. At the moment this is just a wrapper around `IngestionConsumer`. We may want to further remove that abstraction in the future. diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 786c4d7decc8b..b29ed86d68ba0 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -2,7 +2,6 @@ import { ReaderModel } from '@maxmind/geoip2-node' import ClickHouse from '@posthog/clickhouse' import { Element, - Meta, PluginAttachment, PluginConfigSchema, PluginEvent, @@ -32,7 +31,6 @@ import { TeamManager } from './worker/ingestion/team-manager' import { PluginsApiKeyManager } from './worker/vm/extensions/helpers/api-key-manager' import { RootAccessManager } from './worker/vm/extensions/helpers/root-acess-manager' import { LazyPluginVM } from './worker/vm/lazy' -import { PromiseManager } from './worker/vm/promise-manager' export { Element } from '@posthog/plugin-scaffold' // Re-export Element from scaffolding, for backwards compat. @@ -193,10 +191,6 @@ export interface PluginsServerConfig { PLUGIN_SERVER_MODE: PluginServerMode | null PLUGIN_LOAD_SEQUENTIALLY: boolean // could help with reducing memory usage spikes on startup KAFKAJS_LOG_LEVEL: 'NOTHING' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' - HISTORICAL_EXPORTS_ENABLED: boolean // enables historical exports for export apps - HISTORICAL_EXPORTS_MAX_RETRY_COUNT: number - HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW: number - HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER: number APP_METRICS_GATHERED_FOR_ALL: boolean // whether to gather app metrics for all teams MAX_TEAM_ID_TO_BUFFER_ANONYMOUS_EVENTS_FOR: number USE_KAFKA_FOR_SCHEDULED_TASKS: boolean // distribute scheduled tasks across the scheduler workers @@ -268,7 +262,6 @@ export interface Hub extends PluginsServerConfig { organizationManager: OrganizationManager pluginsApiKeyManager: PluginsApiKeyManager rootAccessManager: RootAccessManager - promiseManager: PromiseManager eventsProcessor: EventsProcessor appMetrics: AppMetrics // geoip database, setup in workers @@ -490,7 +483,6 @@ export type VMMethods = { teardownPlugin?: () => Promise getSettings?: () => PluginSettings onEvent?: (event: ProcessedPluginEvent) => Promise - exportEvents?: (events: PluginEvent[]) => Promise composeWebhook?: (event: PostHogEvent) => Webhook | null processEvent?: (event: PluginEvent) => Promise } @@ -526,12 +518,6 @@ export interface PluginConfigVMResponse { usedImports: Set } -export interface PluginConfigVMInternalResponse { - methods: VMMethods - tasks: Record> - meta: M -} - export interface EventUsage { event: string usage_count: number | null diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index a2212628c0855..b161de1873fc9 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -33,7 +33,6 @@ import { status } from '../status' import { createRedisPool, UUIDT } from '../utils' import { PluginsApiKeyManager } from './../../worker/vm/extensions/helpers/api-key-manager' import { RootAccessManager } from './../../worker/vm/extensions/helpers/root-acess-manager' -import { PromiseManager } from './../../worker/vm/promise-manager' import { DB } from './db' import { KafkaProducerWrapper } from './kafka-producer-wrapper' import { PostgresRouter } from './postgres' @@ -135,8 +134,6 @@ export async function createHub( status.warn('🪣', `Object storage could not be created`) } - const promiseManager = new PromiseManager(serverConfig) - const db = new DB( postgres, redisPool, @@ -195,7 +192,6 @@ export async function createHub( organizationManager, pluginsApiKeyManager, rootAccessManager, - promiseManager, conversionBufferEnabledTeams, pluginConfigsToSkipElementsParsing: buildIntegerMatcher(process.env.SKIP_ELEMENTS_PARSING_PLUGINS, true), poeEmbraceJoinForTeams: buildIntegerMatcher(process.env.POE_EMBRACE_JOIN_FOR_TEAMS, true), diff --git a/plugin-server/src/utils/db/postgres.ts b/plugin-server/src/utils/db/postgres.ts index caf7d13dd201a..7e859a3bc225e 100644 --- a/plugin-server/src/utils/db/postgres.ts +++ b/plugin-server/src/utils/db/postgres.ts @@ -105,7 +105,7 @@ export class PostgresRouter { transaction: (client: TransactionClient) => Promise ): Promise { const wrappedTag = `${PostgresUse[usage]}:Tx<${tag}>` - return instrumentQuery(this.statsd, 'query.postgres_transation', wrappedTag, async () => { + return instrumentQuery(this.statsd, 'query.postgres_transaction', wrappedTag, async () => { const timeout = timeoutGuard(`Postgres slow transaction warning after 30 sec!`) const client = await this.pools.get(usage)!.connect() try { diff --git a/plugin-server/src/utils/event.ts b/plugin-server/src/utils/event.ts index ca6ff219880c3..1b97c1baa7bf4 100644 --- a/plugin-server/src/utils/event.ts +++ b/plugin-server/src/utils/event.ts @@ -2,8 +2,7 @@ import { PluginEvent, PostHogEvent, ProcessedPluginEvent } from '@posthog/plugin import { DateTime } from 'luxon' import { Message } from 'node-rdkafka' -import { ClickHouseEvent, PipelineEvent, PostIngestionEvent, RawClickHouseEvent } from '../types' -import { convertDatabaseElementsToRawElements } from '../worker/vm/upgrades/utils/fetchEventsForInterval' +import { ClickHouseEvent, Element, PipelineEvent, PostIngestionEvent, RawClickHouseEvent } from '../types' import { chainToElements } from './db/elements-chain' import { personInitialAndUTMProperties } from './db/utils' import { @@ -12,6 +11,22 @@ import { clickHouseTimestampToISO, } from './utils' +interface RawElement extends Element { + $el_text?: string +} + +const convertDatabaseElementsToRawElements = (elements: RawElement[]): RawElement[] => { + for (const element of elements) { + if (element.attributes && element.attributes.attr__class) { + element.attr_class = element.attributes.attr__class + } + if (element.text) { + element.$el_text = element.text + } + } + return elements +} + export function convertToProcessedPluginEvent(event: PostIngestionEvent): ProcessedPluginEvent { return { distinct_id: event.distinctId, diff --git a/plugin-server/src/utils/retries.ts b/plugin-server/src/utils/retries.ts index fe7fe22f932a4..8107f5af0f075 100644 --- a/plugin-server/src/utils/retries.ts +++ b/plugin-server/src/utils/retries.ts @@ -1,9 +1,4 @@ -import { RetryError } from '@posthog/plugin-scaffold' - -import { runInTransaction } from '../sentry' -import { Hub } from '../types' import { status } from '../utils/status' -import { AppMetricIdentifier, ErrorWithContext } from '../worker/ingestion/app-metrics' import { sleep } from './utils' // Simple retries in our code @@ -39,116 +34,6 @@ export function getNextRetryMs(baseMs: number, multiplier: number, attempt: numb return baseMs * multiplier ** (attempt - 1) } -export interface RetriableFunctionDefinition { - payload: Record - tryFn: () => void | Promise - catchFn?: (error: Error | RetryError) => void | Promise - finallyFn?: (attempts: number) => void | Promise -} - -export interface RetryParams { - maxAttempts: number - retryBaseMs: number - retryMultiplier: number -} - -export interface MetricsDefinition { - metricName: string - appMetric?: AppMetricIdentifier - appMetricErrorContext?: Omit -} - -export type RetriableFunctionPayload = RetriableFunctionDefinition & - Partial & - MetricsDefinition & { hub: Hub } - -function iterateRetryLoop(retriableFunctionPayload: RetriableFunctionPayload, attempt = 1): Promise { - const { - metricName, - hub, - payload, - tryFn, - catchFn, - finallyFn, - maxAttempts = process.env.PLUGINS_RETRY_ATTEMPTS ? parseInt(process.env.PLUGINS_RETRY_ATTEMPTS) : 3, - retryBaseMs = 3000, - retryMultiplier = 2, - appMetric, - appMetricErrorContext, - } = retriableFunctionPayload - return runInTransaction( - { - name: 'retryLoop', - op: metricName, - description: '?', - data: { - metricName, - payload, - attempt, - }, - }, - async () => { - let nextIterationPromise: Promise | undefined - try { - await tryFn() - if (appMetric) { - await hub.appMetrics.queueMetric({ - ...appMetric, - successes: attempt == 1 ? 1 : 0, - successesOnRetry: attempt == 1 ? 0 : 1, - }) - } - } catch (error) { - if (error instanceof RetryError) { - error._attempt = attempt - error._maxAttempts = maxAttempts - } - if (error instanceof RetryError && attempt < maxAttempts) { - const nextRetryMs = getNextRetryMs(retryBaseMs, retryMultiplier, attempt) - nextIterationPromise = new Promise((resolve, reject) => - setTimeout(() => { - // This is not awaited directly so that attempts beyond the first one don't stall the payload queue - iterateRetryLoop(retriableFunctionPayload, attempt + 1) - .then(resolve) - .catch(reject) - }, nextRetryMs) - ) - hub.promiseManager.trackPromise(nextIterationPromise, 'retries') - await hub.promiseManager.awaitPromisesIfNeeded() - } else { - await catchFn?.(error) - if (appMetric) { - await hub.appMetrics.queueError( - { - ...appMetric, - failures: 1, - }, - { - error, - ...appMetricErrorContext, - } - ) - } - } - } - if (!nextIterationPromise) { - await finallyFn?.(attempt) - } - } - ) -} - -/** Run function with `RetryError` handling. */ -export async function runRetriableFunction(retriableFunctionPayload: RetriableFunctionPayload): Promise { - const { finallyFn } = retriableFunctionPayload - await iterateRetryLoop({ - ...retriableFunctionPayload, - finallyFn: async (attempts) => { - await finallyFn?.(attempts) - }, - }) -} - /** * Retry a function, respecting `error.isRetriable`. */ diff --git a/plugin-server/src/worker/ingestion/app-metrics.ts b/plugin-server/src/worker/ingestion/app-metrics.ts index 36791e235b242..d8f52a7401150 100644 --- a/plugin-server/src/worker/ingestion/app-metrics.ts +++ b/plugin-server/src/worker/ingestion/app-metrics.ts @@ -15,7 +15,7 @@ export interface AppMetricIdentifier { pluginConfigId: number jobId?: string // Keep in sync with posthog/queries/app_metrics/serializers.py - category: 'processEvent' | 'onEvent' | 'exportEvents' | 'scheduledTask' | 'webhook' | 'composeWebhook' + category: 'processEvent' | 'onEvent' | 'scheduledTask' | 'webhook' | 'composeWebhook' } export interface AppMetric extends AppMetricIdentifier { diff --git a/plugin-server/src/worker/plugins/loadPluginsFromDB.ts b/plugin-server/src/worker/plugins/loadPluginsFromDB.ts index 282a20389882c..3f556b7e6b160 100644 --- a/plugin-server/src/worker/plugins/loadPluginsFromDB.ts +++ b/plugin-server/src/worker/plugins/loadPluginsFromDB.ts @@ -70,7 +70,7 @@ export async function loadPluginsFromDB( let method = undefined if (plugin.capabilities?.methods) { const methods = plugin.capabilities.methods - if (methods?.some((method) => [PluginMethod.onEvent.toString(), 'exportEvents'].includes(method))) { + if (methods?.some((method) => [PluginMethod.onEvent.toString()].includes(method))) { method = PluginMethod.onEvent } else if (methods?.some((method) => [PluginMethod.composeWebhook.toString()].includes(method))) { method = PluginMethod.composeWebhook diff --git a/plugin-server/src/worker/plugins/run.ts b/plugin-server/src/worker/plugins/run.ts index b9f4e45a26af0..d07e33dc67bd9 100644 --- a/plugin-server/src/worker/plugins/run.ts +++ b/plugin-server/src/worker/plugins/run.ts @@ -36,7 +36,7 @@ async function runSingleTeamPluginOnEvent( try { await onEvent!(event) pluginActionMsSummary - .labels(pluginConfig.id.toString(), 'onEvent', 'success') + .labels(pluginConfig.plugin?.id.toString() ?? '?', 'onEvent', 'success') .observe(new Date().getTime() - timer.getTime()) await hub.appMetrics.queueMetric({ teamId: event.team_id, @@ -47,7 +47,7 @@ async function runSingleTeamPluginOnEvent( } catch (error) { hub.statsd?.increment(`${metricName}.ERROR`, metricTags) pluginActionMsSummary - .labels(pluginConfig.id.toString(), 'onEvent', 'error') + .labels(pluginConfig.plugin?.id.toString() ?? '?', 'onEvent', 'error') .observe(new Date().getTime() - timer.getTime()) await processError(hub, pluginConfig, error, event) await hub.appMetrics.queueError( @@ -130,7 +130,7 @@ async function runSingleTeamPluginComposeWebhook( }) if (request.ok) { pluginActionMsSummary - .labels(pluginConfig.id.toString(), 'composeWebhook', 'success') + .labels(pluginConfig.plugin?.id.toString() ?? '?', 'composeWebhook', 'success') .observe(new Date().getTime() - timer.getTime()) await hub.appMetrics.queueMetric({ teamId: event.team_id, @@ -141,7 +141,7 @@ async function runSingleTeamPluginComposeWebhook( } else { hub.statsd?.increment(`${metricName}.ERROR`, metricTags) pluginActionMsSummary - .labels(pluginConfig.id.toString(), 'composeWebhook', 'error') + .labels(pluginConfig.plugin?.id.toString() ?? '?', 'composeWebhook', 'error') .observe(new Date().getTime() - timer.getTime()) const error = `Fetch to ${webhook.url} failed with ${request.statusText}` await processError(hub, pluginConfig, error, event) @@ -161,7 +161,7 @@ async function runSingleTeamPluginComposeWebhook( } catch (error) { hub.statsd?.increment(`${metricName}.ERROR`, metricTags) pluginActionMsSummary - .labels(pluginConfig.id.toString(), 'composeWebhook', 'error') + .labels(pluginConfig.plugin?.id.toString() ?? '?', 'composeWebhook', 'error') .observe(new Date().getTime() - timer.getTime()) await processError(hub, pluginConfig, error, event) await hub.appMetrics.queueError( @@ -239,7 +239,7 @@ export async function runProcessEvent(hub: Hub, event: PluginEvent): Promise 0 } if (serverCapability === 'processAsyncOnEventHandlers') { - return pluginCapabilities.methods?.some((method) => - ['onEvent', 'exportEvents', 'composeWebhook'].includes(method) - ) + return pluginCapabilities.methods?.some((method) => ['onEvent', 'composeWebhook'].includes(method)) } return false diff --git a/plugin-server/src/worker/vm/imports.ts b/plugin-server/src/worker/vm/imports.ts index a8c0ab00923d3..e3d5c0d506583 100644 --- a/plugin-server/src/worker/vm/imports.ts +++ b/plugin-server/src/worker/vm/imports.ts @@ -1,6 +1,5 @@ import * as pubsub from '@google-cloud/pubsub' import * as gcs from '@google-cloud/storage' -import * as contrib from '@posthog/plugin-contrib' import * as scaffold from '@posthog/plugin-scaffold' import * as AWS from 'aws-sdk' import crypto from 'crypto' @@ -21,7 +20,6 @@ export const AVAILABLE_IMPORTS = { : {}), '@google-cloud/pubsub': pubsub, '@google-cloud/storage': gcs, - '@posthog/plugin-contrib': contrib, '@posthog/plugin-scaffold': scaffold, 'aws-sdk': AWS, 'generic-pool': genericPool, diff --git a/plugin-server/src/worker/vm/lazy.ts b/plugin-server/src/worker/vm/lazy.ts index 85cfde7ccf64a..459edd0fcc3a9 100644 --- a/plugin-server/src/worker/vm/lazy.ts +++ b/plugin-server/src/worker/vm/lazy.ts @@ -68,10 +68,6 @@ export class LazyPluginVM { this.initVm() } - public async getExportEvents(): Promise { - return await this.getVmMethod('exportEvents') - } - public async getOnEvent(): Promise { return await this.getVmMethod('onEvent') } diff --git a/plugin-server/src/worker/vm/promise-manager.ts b/plugin-server/src/worker/vm/promise-manager.ts deleted file mode 100644 index d6825b9efb326..0000000000000 --- a/plugin-server/src/worker/vm/promise-manager.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { PluginsServerConfig } from '../../types' -import { status } from '../../utils/status' - -export class PromiseManager { - pendingPromises: Set> - config: PluginsServerConfig - - constructor(config: PluginsServerConfig) { - this.pendingPromises = new Set() - this.config = config - } - - public trackPromise(promise: Promise, key: string): void { - if (typeof promise === 'undefined') { - return - } - - status.info('🤝', `Tracking promise ${key} count = ${this.pendingPromises.size}`) - this.pendingPromises.add(promise) - - promise.finally(() => { - this.pendingPromises.delete(promise) - }) - status.info('✅', `Tracking promise finished ${key}`) - } - - public async awaitPromisesIfNeeded(): Promise { - const startTime = performance.now() - while (this.pendingPromises.size > this.config.MAX_PENDING_PROMISES_PER_WORKER) { - status.info('🤝', `looping in awaitPromise since ${startTime} count = ${this.pendingPromises.size}`) - await Promise.race(this.pendingPromises) - } - status.info('🕐', `Finished awaiting promises ${performance.now() - startTime}`) - } -} diff --git a/plugin-server/src/worker/vm/upgrades/export-events.ts b/plugin-server/src/worker/vm/upgrades/export-events.ts deleted file mode 100644 index c41d417362963..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/export-events.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { Plugin, PluginEvent, PluginMeta, ProcessedPluginEvent } from '@posthog/plugin-scaffold' -import { Counter } from 'prom-client' - -import { Hub, PluginConfig, PluginConfigVMInternalResponse, PluginTaskType } from '../../../types' -import { isTestEnv } from '../../../utils/env-utils' -import { stringClamp } from '../../../utils/utils' -import { ExportEventsBuffer } from './utils/export-events-buffer' - -export const MAXIMUM_RETRIES = 3 -const EXPORT_BUFFER_BYTES_MINIMUM = 1 -const EXPORT_BUFFER_BYTES_DEFAULT = 900 * 1024 // 900 KiB -const EXPORT_BUFFER_BYTES_MAXIMUM = 100 * 1024 * 1024 -const EXPORT_BUFFER_SECONDS_MINIMUM = 1 -const EXPORT_BUFFER_SECONDS_MAXIMUM = 600 -const EXPORT_BUFFER_SECONDS_DEFAULT = isTestEnv() ? 0 : 10 - -export const appRetriesCounter = new Counter({ - name: 'export_app_retries', - help: 'Count of events retries processing onEvent apps, by team and plugin.', - labelNames: ['team_id', 'plugin_id'], -}) - -type ExportEventsUpgrade = Plugin<{ - global: { - exportEventsBuffer: ExportEventsBuffer - exportEventsToIgnore: Set - exportEventsWithRetry: (payload: ExportEventsJobPayload, meta: PluginMeta) => Promise - } - config: { - exportEventsBufferBytes: string - exportEventsBufferSeconds: string - exportEventsToIgnore: string - } - jobs: { - exportEventsWithRetry: ExportEventsJobPayload - } -}> - -interface ExportEventsJobPayload extends Record { - batch: PluginEvent[] - batchId: number - retriesPerformedSoFar: number -} - -/** - * Inject export abstraction code into plugin VM if it has method `exportEvents`: - * - add `global`/`config`/`jobs` stuff specified in the `ExportEventsUpgrade` type above, - * - patch `onEvent` with code to add the event to a buffer. - */ -export function upgradeExportEvents( - hub: Hub, - pluginConfig: PluginConfig, - response: PluginConfigVMInternalResponse> -): void { - const { methods, tasks, meta } = response - - const uploadBytes = stringClamp( - meta.config.exportEventsBufferBytes, - EXPORT_BUFFER_BYTES_DEFAULT, - EXPORT_BUFFER_BYTES_MINIMUM, - EXPORT_BUFFER_BYTES_MAXIMUM - ) - const uploadSeconds = stringClamp( - meta.config.exportEventsBufferSeconds, - EXPORT_BUFFER_SECONDS_DEFAULT, - EXPORT_BUFFER_SECONDS_MINIMUM, - EXPORT_BUFFER_SECONDS_MAXIMUM - ) - - meta.global.exportEventsToIgnore = new Set( - meta.config.exportEventsToIgnore - ? meta.config.exportEventsToIgnore.split(',').map((event: string) => event.trim()) - : null - ) - - meta.global.exportEventsBuffer = new ExportEventsBuffer(hub, pluginConfig, { - limit: uploadBytes, - timeoutSeconds: uploadSeconds, - onFlush: async (batch) => { - const jobPayload = { - batch, - batchId: Math.floor(Math.random() * 1000000), - retriesPerformedSoFar: 0, - } - // Running the first export code directly, without a job in between - await meta.global.exportEventsWithRetry(jobPayload, meta) - }, - }) - - meta.global.exportEventsWithRetry = async ( - payload: ExportEventsJobPayload, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - meta: PluginMeta - ) => { - try { - await methods.exportEvents?.(payload.batch) - await hub.appMetrics.queueMetric({ - teamId: pluginConfig.team_id, - pluginConfigId: pluginConfig.id, - category: 'exportEvents', - successes: payload.batch.length, - }) - } catch (err) { - // We've disabled all retries as we move exportEvents to a new system - await hub.appMetrics.queueError( - { - teamId: pluginConfig.team_id, - pluginConfigId: pluginConfig.id, - category: 'exportEvents', - failures: payload.batch.length, - }, - { - error: err, - eventCount: payload.batch.length, - } - ) - } - } - - tasks.job['exportEventsWithRetry'] = { - name: 'exportEventsWithRetry', - type: PluginTaskType.Job, - exec: (payload) => meta.global.exportEventsWithRetry(payload as ExportEventsJobPayload, meta), - } - - const oldOnEvent = methods.onEvent - methods.onEvent = async (event: ProcessedPluginEvent) => { - if (!meta.global.exportEventsToIgnore.has(event.event)) { - await meta.global.exportEventsBuffer.add(event, JSON.stringify(event).length) - } - await oldOnEvent?.(event) - } - - const oldTeardownPlugin = methods.teardownPlugin - methods.teardownPlugin = async () => { - await Promise.all([meta.global.exportEventsBuffer.flush(), oldTeardownPlugin?.()]) - } -} diff --git a/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events-v2.ts b/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events-v2.ts deleted file mode 100644 index 2a5cc2816cf2b..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events-v2.ts +++ /dev/null @@ -1,772 +0,0 @@ -/* -Historical exports (v2) work the following way: - -- User triggers a `Export historical events V2` job from the UI. - This saves the time range as the running export with parallelism options. -- `runEveryMinute` acts as a coordinator: It takes the time range job runs on, splits it into chunks, - ensures that enough pieces are running, reports progress and finalizes the export. - - If a certain running chunk hasn't reported progress in a while, it is also restarted. -- `exportHistoricalEvents` job is responsible for exporting data between particular start and end points (chunk) - - It tracks its progress under `statusKey` - - It dynamically resizes the time window we fetch data to minimize jobs that need to be scheduled and clickhouse queries - - It calls plugins `exportEvents` with each batch of events it finds - - It handles retries by retrying RetryErrors up to 15 times - -Error handling: -- Failing to fetch events from clickhouse stops the export outright -- For every batch of events fetched, `exportEvents` RetryError is retried up to 15 times -- Unknown errors raised by `exportEvents` cause export to fail -- We periodically check whether a running chunk has made progress. If not, the chunk is restarted - -Note: -- parallelism is only settable by superusers to avoid abuse. -- Double-processing might be possible if a task is queued in graphile worker for a long time -*/ - -import { Plugin, PluginEvent, PluginMeta, RetryError } from '@posthog/plugin-scaffold' -import * as Sentry from '@sentry/node' -import { DateTime } from 'luxon' - -import { - Hub, - ISOTimestamp, - JobSpec, - PluginConfig, - PluginConfigVMInternalResponse, - PluginLogEntry, - PluginLogEntrySource, - PluginLogEntryType, - PluginTask, - PluginTaskType, -} from '../../../../types' -import { createPluginActivityLog } from '../../../../utils/db/activity-log' -import { processError } from '../../../../utils/db/error' -import { isTestEnv } from '../../../../utils/env-utils' -import { status } from '../../../../utils/status' -import { fetchEventsForInterval } from '../utils/fetchEventsForInterval' - -const TEN_MINUTES = 1000 * 60 * 10 -const TWELVE_HOURS = 1000 * 60 * 60 * 12 -export const EVENTS_PER_RUN_SMALL = 500 -export const EVENTS_PER_RUN_BIG = 10000 - -export const EXPORT_PARAMETERS_KEY = 'EXPORT_PARAMETERS' -export const EXPORT_COORDINATION_KEY = 'EXPORT_COORDINATION' - -export const INTERFACE_JOB_NAME = 'Export historical events V2' - -export const JOB_SPEC: JobSpec = { - payload: { - dateRange: { - title: 'Export date range', - type: 'daterange', - required: true, - }, - parallelism: { - title: 'Parallelism', - type: 'number', - default: 1, - staff_only: true, - }, - }, -} - -export interface TestFunctions { - exportHistoricalEvents: (payload: ExportHistoricalEventsJobPayload) => Promise - getTimestampBoundaries: (payload: ExportHistoricalEventsUIPayload) => [ISOTimestamp, ISOTimestamp] - nextCursor: (payload: ExportHistoricalEventsJobPayload, eventCount: number) => OffsetParams - coordinateHistoricalExport: (update?: CoordinationUpdate) => Promise - calculateCoordination: ( - params: ExportParams, - done: Array, - running: Array - ) => Promise - getExportDateRange: (params: ExportParams) => Array<[ISOTimestamp, ISOTimestamp]> - progressBar: (progress: number, length?: number) => string - stopExport: (params: ExportParams, message: string, status: 'success' | 'fail') => Promise - shouldResume: (status: ExportChunkStatus, now: number) => void -} - -export type ExportHistoricalEventsUpgradeV2 = Plugin<{ - global: { - _testFunctions: TestFunctions - } -}> - -export interface ExportHistoricalEventsJobPayload { - // Current cursor to what's being exported - timestampCursor: number - - // The lower and upper bound of the timestamp interval to be processed - startTime: number - endTime: number - - // The offset *within* a given timestamp interval - offset: number - - // how many retries a payload has had (max = 15) - retriesPerformedSoFar: number - - // used for ensuring only one "export task" is running if the server restarts - exportId: string | number - - // Time frame to fetch events for. - fetchTimeInterval: number - - // Key to report export status to - statusKey: string -} - -type OffsetParams = Pick - -export interface ExportHistoricalEventsUIPayload { - dateRange: [string, string] - parallelism?: number - // API-generated token - $job_id?: string -} - -export interface ExportParams { - id: string | number - parallelism: number - dateFrom: ISOTimestamp - dateTo: ISOTimestamp - abortMessage?: string -} - -interface CoordinationPayload { - running?: Array - done?: Array - progress?: number -} - -interface CoordinationUpdate { - hasChanges: boolean - done: Array - running: Array - toStartRunning: Array<[ISOTimestamp, ISOTimestamp]> - toResume: Array - progress: number - exportIsDone: boolean -} - -export interface ExportChunkStatus extends ExportHistoricalEventsJobPayload { - done: boolean - progress: number - // When was this status recorded - statusTime: number -} - -export function addHistoricalEventsExportCapabilityV2( - hub: Hub, - pluginConfig: PluginConfig, - response: PluginConfigVMInternalResponse> -) { - const { methods, tasks, meta } = response - - const currentPublicJobs = pluginConfig.plugin?.public_jobs || {} - - // Set the number of events to fetch per chunk, defaulting to 500 unless - // the plugin indicates bigger batches are preferable (notably plugins writing - // to blob storage with a fixed cost per batch), in which case we use 10000. - // - // It also has the other benefit of using fewer requests to ClickHouse. In - // its current implementation the querying logic for pulling pages of - // events from ClickHouse will read a much larger amount of data from disk - // than is required, due to us trying to order the dataset by `timestamp` - // and this not being included in the `sharded_events` table sort key. - let eventsPerRun = EVENTS_PER_RUN_SMALL - if (methods.getSettings && methods.getSettings()?.handlesLargeBatches) { - eventsPerRun = EVENTS_PER_RUN_BIG - } - - // If public job hasn't been registered or has changed, update it! - if ( - Object.keys(currentPublicJobs[INTERFACE_JOB_NAME]?.payload || {}).length != - Object.keys(JOB_SPEC.payload!).length - ) { - hub.promiseManager.trackPromise( - hub.db.addOrUpdatePublicJob(pluginConfig.plugin_id, INTERFACE_JOB_NAME, JOB_SPEC), - 'exports v2 addOrUpdatePublicJob' - ) - } - const oldRunEveryMinute = tasks.schedule.runEveryMinute - - tasks.job[INTERFACE_JOB_NAME] = { - name: INTERFACE_JOB_NAME, - type: PluginTaskType.Job, - exec: async (payload: ExportHistoricalEventsUIPayload) => { - const id = payload.$job_id || String(Math.floor(Math.random() * 10000 + 1)) - const parallelism = Number(payload.parallelism ?? 1) - const [dateFrom, dateTo] = getTimestampBoundaries(payload) - const params: ExportParams = { - id, - parallelism, - dateFrom, - dateTo, - } - - // only let one export run at a time - const alreadyRunningExport = await getExportParameters() - if (!!alreadyRunningExport) { - await stopExport(params, 'Export already running, not starting another.', 'fail', { keepEntry: true }) - return - } - - // Clear old (conflicting) storage - await meta.storage.del(EXPORT_COORDINATION_KEY) - await meta.storage.set(EXPORT_PARAMETERS_KEY, params) - - createLog(`Starting export ${dateFrom} - ${dateTo}. id=${id}, parallelism=${parallelism}`, { - type: PluginLogEntryType.Info, - }) - - await coordinateHistoricalExport() - }, - } as unknown as PluginTask // :KLUDGE: Work around typing limitations - - tasks.job['exportHistoricalEventsV2'] = { - name: 'exportHistoricalEventsV2', - type: PluginTaskType.Job, - exec: (payload) => exportHistoricalEvents(payload as ExportHistoricalEventsJobPayload), - } - - tasks.schedule.runEveryMinute = { - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: async () => { - await oldRunEveryMinute?.exec?.() - await coordinateHistoricalExport() - }, - // :TRICKY: We don't want to track app metrics for runEveryMinute for historical exports _unless_ plugin also has `runEveryMinute` - __ignoreForAppMetrics: !oldRunEveryMinute || !!oldRunEveryMinute.__ignoreForAppMetrics, - } - - async function coordinateHistoricalExport(update?: CoordinationUpdate) { - const params = await getExportParameters() - - if (!params) { - // No export running! - return - } - - if (params.abortMessage) { - // For manually triggering the export to abort - await stopExport(params, `Export aborted: ${params.abortMessage}`, 'fail') - return - } - - const { done, running } = (await meta.storage.get(EXPORT_COORDINATION_KEY, {})) as CoordinationPayload - update = update || (await calculateCoordination(params, done || [], running || [])) - - createLog(`Export progress: ${progressBar(update.progress)} (${Math.round(1000 * update.progress) / 10}%)`, { - type: PluginLogEntryType.Info, - }) - - if (update.exportIsDone) { - await stopExport(params, 'Export has finished! 💯', 'success') - return - } - - if (update.hasChanges) { - await Promise.all( - update.toStartRunning.map(async ([startDate, endDate]) => { - createLog(`Starting job to export ${startDate} to ${endDate}`, { type: PluginLogEntryType.Debug }) - - const payload: ExportHistoricalEventsJobPayload = { - timestampCursor: new Date(startDate).getTime(), - startTime: new Date(startDate).getTime(), - endTime: new Date(endDate).getTime(), - offset: 0, - retriesPerformedSoFar: 0, - exportId: params.id, - fetchTimeInterval: hub.HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW, - statusKey: `EXPORT_DATE_STATUS_${startDate}`, - } - await startChunk(payload, 0) - }) - ) - - await Promise.all( - update.toResume.map(async (payload: ExportChunkStatus) => { - createLog( - `Export chunk from ${dateRange( - payload.startTime, - payload.endTime - )} seems inactive, restarting!`, - { type: PluginLogEntryType.Debug } - ) - await startChunk(payload, payload.progress) - }) - ) - } - - await meta.storage.set(EXPORT_COORDINATION_KEY, { - done: update.done, - running: update.running, - progress: update.progress, - }) - } - - async function calculateCoordination( - params: ExportParams, - done: Array, - running: Array - ): Promise { - const now = Date.now() - const allDates = getExportDateRange(params) - - let hasChanges = false - const doneDates = new Set(done) - const runningDates = new Set(running) - const progressPerDay = 1.0 / allDates.length - - let progress = progressPerDay * done.length - const toResume: Array = [] - - for (const date of running || []) { - const dateStatus = (await meta.storage.get(`EXPORT_DATE_STATUS_${date}`, null)) as ExportChunkStatus | null - - if (dateStatus?.done) { - hasChanges = true - doneDates.add(date) - runningDates.delete(date) - progress += progressPerDay - continue - } else { - progress += progressPerDay * (dateStatus?.progress ?? 0) - } - - if (dateStatus && shouldResume(dateStatus, now)) { - // :TODO: Temporary debugging code - createLog(`toResume found: now=${now}, dateStatus=${JSON.stringify(dateStatus)}`, { - type: PluginLogEntryType.Debug, - }) - hasChanges = true - toResume.push(dateStatus) - } - } - - const toStartRunning: Array<[ISOTimestamp, ISOTimestamp]> = [] - - if (runningDates.size < params.parallelism && doneDates.size + runningDates.size < allDates.length) { - for (const [startDate, endDate] of allDates) { - if (!doneDates.has(startDate) && !runningDates.has(startDate)) { - runningDates.add(startDate) - toStartRunning.push([startDate, endDate]) - hasChanges = true - - if (runningDates.size === params.parallelism) { - break - } - } - } - } - - return { - hasChanges, - done: Array.from(doneDates.values()), - running: Array.from(runningDates.values()), - toStartRunning, - toResume, - progress, - exportIsDone: doneDates.size === allDates.length, - } - } - - async function startChunk(payload: ExportHistoricalEventsJobPayload, progress: number): Promise { - // Save for detecting retries - await meta.storage.set(payload.statusKey, { - ...payload, - done: false, - progress, - statusTime: Date.now(), - } as ExportChunkStatus) - - // Start the job - await meta.jobs.exportHistoricalEventsV2(payload).runNow() - } - - async function exportHistoricalEvents(payload: ExportHistoricalEventsJobPayload): Promise { - status.info('ℹ️', 'Running export historical events', { - pluginConfigId: pluginConfig.id, - payload, - }) - - const activeExportParameters = await getExportParameters() - if (activeExportParameters?.id != payload.exportId) { - // This export has finished or has been stopped - return - } - - if (activeExportParameters.abortMessage) { - // For manually triggering the export to abort - createLog(`Export manually aborted ${activeExportParameters.abortMessage}`, { - type: PluginLogEntryType.Info, - }) - return - } - - if (payload.timestampCursor >= payload.endTime) { - createLog(`Finished exporting chunk from ${dateRange(payload.startTime, payload.endTime)}`, { - type: PluginLogEntryType.Debug, - }) - await meta.storage.set(payload.statusKey, { - ...payload, - done: true, - progress: 1, - statusTime: Date.now(), - } as ExportChunkStatus) - - return - } - - const progress = (payload.timestampCursor - payload.startTime) / (payload.endTime - payload.startTime) - - await meta.storage.set(payload.statusKey, { - ...payload, - done: false, - progress: progress, - statusTime: Date.now(), - } as ExportChunkStatus) - - let events: PluginEvent[] = [] - - try { - events = await fetchEventsForInterval( - hub.db, - pluginConfig.team_id, - new Date(payload.timestampCursor), - payload.offset, - payload.fetchTimeInterval, - eventsPerRun - ) - } catch (error) { - Sentry.captureException(error, { tags: { team_id: pluginConfig.team_id } }) - - await handleFetchError(error, activeExportParameters, payload) - return - } - - // We bump the statusTime every minute to let the coordinator know we are still - // alive and we don't need to be resumed. - const interval = setInterval(async () => { - const now = Date.now() - createLog(`Still running, updating ${payload.statusKey} statusTime for plugin ${pluginConfig.id} to ${now}`) - await meta.storage.set(payload.statusKey, { - ...payload, - done: false, - progress: progress, - statusTime: now, - } as ExportChunkStatus) - }, 60 * 1000) - - if (events.length > 0) { - try { - await methods.exportEvents!(events) - - createLog( - `Successfully processed events ${payload.offset}-${payload.offset + events.length} from ${dateRange( - payload.timestampCursor, - payload.timestampCursor + payload.fetchTimeInterval - )}.`, - { type: PluginLogEntryType.Debug } - ) - await hub.appMetrics.queueMetric({ - teamId: pluginConfig.team_id, - pluginConfigId: pluginConfig.id, - jobId: payload.exportId.toString(), - category: 'exportEvents', - successes: payload.retriesPerformedSoFar == 0 ? events.length : 0, - successesOnRetry: payload.retriesPerformedSoFar == 0 ? 0 : events.length, - }) - } catch (error) { - clearInterval(interval) - - await handleExportError(error, activeExportParameters, payload, events.length) - return - } - } - - clearInterval(interval) - - const { timestampCursor, fetchTimeInterval, offset } = nextCursor(payload, events.length) - - await meta.jobs - .exportHistoricalEventsV2({ - ...payload, - retriesPerformedSoFar: 0, - timestampCursor, - offset, - fetchTimeInterval, - } as ExportHistoricalEventsJobPayload) - .runIn(1, 'seconds') - } - - async function handleExportError( - error: Error, - params: ExportParams, - payload: ExportHistoricalEventsJobPayload, - eventCount: number - ): Promise { - if (error instanceof RetryError && payload.retriesPerformedSoFar + 1 < hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT) { - const nextRetrySeconds = retryDelaySeconds(payload.retriesPerformedSoFar) - - createLog( - `Failed processing events ${payload.offset}-${payload.offset + eventCount} from ${dateRange( - payload.timestampCursor, - payload.timestampCursor + payload.fetchTimeInterval - )}. Retrying in ${nextRetrySeconds}s`, - { - type: PluginLogEntryType.Warn, - } - ) - - await meta.jobs - .exportHistoricalEventsV2({ - ...payload, - retriesPerformedSoFar: payload.retriesPerformedSoFar + 1, - } as ExportHistoricalEventsJobPayload) - .runIn(nextRetrySeconds, 'seconds') - } else { - if (error instanceof RetryError) { - const message = `Exporting chunk ${dateRange(payload.startTime, payload.endTime)} failed after ${ - hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT - } retries. Stopping export.` - await stopExport(params, message, 'fail') - await processError(hub, pluginConfig, message) - } else { - await stopExport(params, `exportEvents returned unknown error, stopping export. error=${error}`, 'fail') - await processError(hub, pluginConfig, error) - } - await hub.appMetrics.queueError( - { - teamId: pluginConfig.team_id, - pluginConfigId: pluginConfig.id, - jobId: payload.exportId.toString(), - category: 'exportEvents', - failures: eventCount, - }, - { - error, - eventCount, - } - ) - } - } - - async function handleFetchError( - error: Error, - params: ExportParams, - payload: ExportHistoricalEventsJobPayload - ): Promise { - if (error instanceof RetryError && payload.retriesPerformedSoFar + 1 < hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT) { - const nextRetrySeconds = retryDelaySeconds(payload.retriesPerformedSoFar) - - createLog( - `Failed to fetch events from ${dateRange( - payload.timestampCursor, - payload.timestampCursor + payload.fetchTimeInterval - )}. Retrying in ${nextRetrySeconds}s`, - { - type: PluginLogEntryType.Warn, - } - ) - - await meta.jobs - .exportHistoricalEventsV2({ - ...payload, - retriesPerformedSoFar: payload.retriesPerformedSoFar + 1, - } as ExportHistoricalEventsJobPayload) - .runIn(nextRetrySeconds, 'seconds') - } else { - if (error instanceof RetryError) { - const message = `Fetching chunk ${dateRange(payload.startTime, payload.endTime)} failed after ${ - hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT - } retries. Stopping export.` - await stopExport(params, message, 'fail') - await processError(hub, pluginConfig, message) - } else { - await processError(hub, pluginConfig, error) - await stopExport(params, 'Failed fetching events. Stopping export - please try again later.', 'fail') - } - await hub.appMetrics.queueError( - { - teamId: pluginConfig.team_id, - pluginConfigId: pluginConfig.id, - jobId: payload.exportId.toString(), - category: 'exportEvents', - failures: 1, - }, - { - error, - eventCount: 1, - } - ) - } - } - - async function stopExport( - params: ExportParams, - message: string, - status: 'success' | 'fail', - options: { keepEntry?: boolean } = {} - ) { - if (!options.keepEntry) { - await meta.storage.del(EXPORT_PARAMETERS_KEY) - } - - const payload = status == 'success' ? params : { ...params, failure_reason: message } - await createPluginActivityLog( - hub, - pluginConfig.team_id, - pluginConfig.id, - status === 'success' ? 'export_success' : 'export_fail', - { - trigger: { - job_id: params.id.toString(), - job_type: INTERFACE_JOB_NAME, - payload, - }, - } - ) - - createLog(message, { - type: status === 'success' ? PluginLogEntryType.Info : PluginLogEntryType.Error, - }) - } - - function getTimestampBoundaries(payload: ExportHistoricalEventsUIPayload): [ISOTimestamp, ISOTimestamp] { - const min = DateTime.fromISO(payload.dateRange[0], { zone: 'UTC' }) - // :TRICKY: UI shows the end date to be inclusive - const max = DateTime.fromISO(payload.dateRange[1], { zone: 'UTC' }).plus({ days: 1 }) - - if (!min.isValid || !max.isValid) { - createLog(`'dateRange' should be two dates in ISO string format.`, { - type: PluginLogEntryType.Error, - }) - throw new Error(`'dateRange' should be two dates in ISO string format.`) - } - return [min.toISO(), max.toISO()] as [ISOTimestamp, ISOTimestamp] - } - - function retryDelaySeconds(retriesPerformedSoFar: number): number { - return 2 ** retriesPerformedSoFar * 3 - } - - function shouldResume(status: ExportChunkStatus, now: number): boolean { - // When a export hasn't updated in 10 minutes plus whatever time is spent on retries, it's likely already timed out or died - // Note that status updates happen every time the export makes _any_ progress - // NOTE from the future: we discovered that 10 minutes was not enough time as we have exports running for longer - // without failing, and this logic was triggering multiple simultaneous resumes. Simultaneous resumes start to fight to update - // the status, and cause duplicate data to be exported. Overall, a nightmare. - // To mitigate this, we have historialExportEvents update the status as it waits. - return now >= status.statusTime + TEN_MINUTES + retryDelaySeconds(status.retriesPerformedSoFar + 1) * 1000 - } - - function nextCursor(payload: ExportHistoricalEventsJobPayload, eventCount: number): OffsetParams { - // More on the same time window - if (eventCount === eventsPerRun) { - return { - timestampCursor: payload.timestampCursor, - fetchTimeInterval: payload.fetchTimeInterval, - offset: payload.offset + eventsPerRun, - } - } - - const nextCursor = payload.timestampCursor + payload.fetchTimeInterval - let nextFetchInterval = payload.fetchTimeInterval - // If we're fetching too small of a window at a time, increase window to fetch - if (payload.offset === 0 && eventCount < eventsPerRun * 0.5) { - nextFetchInterval = Math.min( - Math.floor(payload.fetchTimeInterval * hub.HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER), - TWELVE_HOURS - ) - } - // If time window seems too large, reduce it - if (payload.offset > 2 * eventsPerRun) { - nextFetchInterval = Math.max( - Math.floor(payload.fetchTimeInterval / hub.HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER), - TEN_MINUTES - ) - } - - // If we would end up fetching too many events next time, reduce fetch interval - if (nextCursor + nextFetchInterval > payload.endTime) { - nextFetchInterval = payload.endTime - nextCursor - } - - return { - timestampCursor: nextCursor, - fetchTimeInterval: nextFetchInterval, - offset: 0, - } - } - - function getExportDateRange({ dateFrom, dateTo }: ExportParams): Array<[ISOTimestamp, ISOTimestamp]> { - const result: Array<[ISOTimestamp, ISOTimestamp]> = [] - let date = dateFrom - while (date < dateTo) { - let nextDate = DateTime.fromISO(date).toUTC().plus({ days: 1 }).startOf('day').toISO() as ISOTimestamp - if (nextDate > dateTo) { - nextDate = dateTo - } - result.push([date, nextDate]) - date = nextDate - } - - return result - } - - function progressBar(progress: number, length = 20): string { - const filledBar = Math.round(progress * length) - - const progressBarCompleted = Array.from({ length: filledBar }) - .map(() => '■') - .join('') - const progressBarRemaining = Array.from({ length: length - filledBar }) - .map(() => '□') - .join('') - - return progressBarCompleted + progressBarRemaining - } - - function dateRange(startTime: number, endTime: number): string { - return `${new Date(startTime).toISOString()} to ${new Date(endTime).toISOString()}` - } - - async function getExportParameters(): Promise { - return (await meta.storage.get(EXPORT_PARAMETERS_KEY, null)) as ExportParams | null - } - - function createLog(message: string, overrides: Partial = {}) { - hub.promiseManager.trackPromise( - hub.db.queuePluginLogEntry({ - pluginConfig, - message: message, - source: PluginLogEntrySource.System, - type: PluginLogEntryType.Log, - instanceId: hub.instanceId, - ...overrides, - }), - 'exports v2 - createLog' - ) - } - - if (isTestEnv()) { - meta.global._testFunctions = { - exportHistoricalEvents, - getTimestampBoundaries, - nextCursor, - coordinateHistoricalExport, - calculateCoordination, - getExportDateRange, - progressBar, - stopExport, - shouldResume, - } - } - - // NOTE: we return the eventsPerRun, purely for testing purposes - return { eventsPerRun } -} diff --git a/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events.ts b/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events.ts deleted file mode 100644 index 0c87e61c6cc56..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/historical-export/export-historical-events.ts +++ /dev/null @@ -1,372 +0,0 @@ -import { PluginEvent, PluginMeta, RetryError } from '@posthog/plugin-scaffold' -import * as Sentry from '@sentry/node' - -import { - Hub, - JobSpec, - PluginConfig, - PluginConfigVMInternalResponse, - PluginLogEntrySource, - PluginLogEntryType, - PluginTask, - PluginTaskType, -} from '../../../../types' -import { fetchEventsForInterval } from '../utils/fetchEventsForInterval' -import { - ExportHistoricalEventsJobPayload, - ExportHistoricalEventsUpgrade, - fetchTimestampBoundariesForTeam, -} from '../utils/utils' - -const TEN_MINUTES = 1000 * 60 * 10 -const EVENTS_TIME_INTERVAL = TEN_MINUTES -const EVENTS_PER_RUN = 500 - -const TIMESTAMP_CURSOR_KEY = 'timestamp_cursor' -const MAX_UNIX_TIMESTAMP_KEY = 'max_timestamp' -const MIN_UNIX_TIMESTAMP_KEY = 'min_timestamp' -const EXPORT_RUNNING_KEY = 'is_export_running' -const RUN_EVERY_MINUTE_LAST_RUN_KEY = 'run_every_minute_last' -const BATCH_ID_CURSOR_KEY = 'batch_id' -const OLD_TIMESTAMP_CURSOR_KEY = 'old_timestamp_cursor' - -const INTERFACE_JOB_NAME = 'Export historical events' - -const JOB_SPEC: JobSpec = { - payload: { - dateFrom: { - title: 'Export start date', - type: 'date', - required: true, - }, - dateTo: { - title: 'Export end date', - type: 'date', - required: true, - }, - }, -} - -export function addHistoricalEventsExportCapability( - hub: Hub, - pluginConfig: PluginConfig, - response: PluginConfigVMInternalResponse> -): void { - const { methods, tasks, meta } = response - - const currentPublicJobs = pluginConfig.plugin?.public_jobs || {} - - // If public job hasn't been registered or has changed, update it! - if ( - Object.keys(currentPublicJobs[INTERFACE_JOB_NAME]?.payload || {}).length !== - Object.keys(JOB_SPEC.payload!).length - ) { - hub.promiseManager.trackPromise( - hub.db.addOrUpdatePublicJob(pluginConfig.plugin_id, INTERFACE_JOB_NAME, JOB_SPEC), - 'exports addOrUpdatePublicJob' - ) - } - - const oldSetupPlugin = methods.setupPlugin - - const oldRunEveryMinute = tasks.schedule.runEveryMinute - - methods.setupPlugin = async () => { - await meta.utils.cursor.init(BATCH_ID_CURSOR_KEY) - - const storedTimestampCursor = await meta.storage.get(TIMESTAMP_CURSOR_KEY, null) - await meta.storage.set(OLD_TIMESTAMP_CURSOR_KEY, storedTimestampCursor || 0) - await meta.storage.set(RUN_EVERY_MINUTE_LAST_RUN_KEY, Date.now() + TEN_MINUTES) - - await oldSetupPlugin?.() - } - - tasks.schedule.runEveryMinute = { - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: async () => { - await oldRunEveryMinute?.exec?.() - - const lastRun = await meta.storage.get(RUN_EVERY_MINUTE_LAST_RUN_KEY, 0) - const exportShouldBeRunning = await meta.storage.get(EXPORT_RUNNING_KEY, false) - - const have10MinutesPassed = Date.now() - Number(lastRun) < TEN_MINUTES - - // only run every 10 minutes _if_ an export is in progress - if (!exportShouldBeRunning || !have10MinutesPassed) { - return - } - - const oldTimestampCursor = await meta.storage.get(OLD_TIMESTAMP_CURSOR_KEY, 0) - const currentTimestampCursor = await meta.storage.get(TIMESTAMP_CURSOR_KEY, 0) - - // if the cursor hasn't been incremented after 10 minutes that means we didn't pick up from - // where we left off automatically after a restart, or something else has gone wrong - // thus, kick off a new export chain with a new batchId - if (exportShouldBeRunning && oldTimestampCursor === currentTimestampCursor) { - const batchId = await meta.utils.cursor.increment(BATCH_ID_CURSOR_KEY) - createLog(`Restarting export after noticing inactivity. Batch ID: ${batchId}`) - await meta.jobs - .exportHistoricalEvents({ retriesPerformedSoFar: 0, incrementTimestampCursor: true, batchId }) - .runNow() - } - - // set the old timestamp cursor to the current one so we can see if it changed in 10 minutes - await meta.storage.set(OLD_TIMESTAMP_CURSOR_KEY, currentTimestampCursor) - - await meta.storage.set(RUN_EVERY_MINUTE_LAST_RUN_KEY, Date.now()) - }, - - // :TRICKY: We don't want to track app metrics for runEveryMinute for historical exports _unless_ plugin also has `runEveryMinute` - __ignoreForAppMetrics: !oldRunEveryMinute || !!oldRunEveryMinute.__ignoreForAppMetrics, - } - - tasks.job['exportHistoricalEvents'] = { - name: 'exportHistoricalEvents', - type: PluginTaskType.Job, - exec: (payload) => meta.global.exportHistoricalEvents(payload as ExportHistoricalEventsJobPayload), - } - - tasks.job[INTERFACE_JOB_NAME] = { - name: INTERFACE_JOB_NAME, - type: PluginTaskType.Job, - // TODO: Accept timestamp as payload - exec: async (payload: ExportHistoricalEventsJobPayload) => { - // only let one export run at a time - const exportAlreadyRunning = await meta.storage.get(EXPORT_RUNNING_KEY, false) - if (exportAlreadyRunning) { - return - } - - await meta.storage.set(RUN_EVERY_MINUTE_LAST_RUN_KEY, Date.now() + TEN_MINUTES) - await meta.storage.set(EXPORT_RUNNING_KEY, true) - - // get rid of all state pertaining to a previous run - await meta.storage.del(TIMESTAMP_CURSOR_KEY) - await meta.storage.del(MAX_UNIX_TIMESTAMP_KEY) - await meta.storage.del(MIN_UNIX_TIMESTAMP_KEY) - meta.global.maxTimestamp = null - meta.global.minTimestamp = null - - await meta.global.initTimestampsAndCursor(payload) - - const batchId = await meta.utils.cursor.increment(BATCH_ID_CURSOR_KEY) - - await meta.jobs - .exportHistoricalEvents({ retriesPerformedSoFar: 0, incrementTimestampCursor: true, batchId: batchId }) - .runNow() - }, - } as unknown as PluginTask // :KLUDGE: Work around typing limitations - - meta.global.exportHistoricalEvents = async (payload: ExportHistoricalEventsJobPayload): Promise => { - if (payload.retriesPerformedSoFar >= 15) { - // create some log error here - return - } - - // this is handling for duplicates when the plugin server restarts - const currentBatchId = await meta.storage.get(BATCH_ID_CURSOR_KEY, 0) - if (currentBatchId !== payload.batchId) { - return - } - - let timestampCursor = payload.timestampCursor - let intraIntervalOffset = payload.intraIntervalOffset ?? 0 - - // this ensures minTimestamp and timestampLimit are not null - // each thread will set them the first time they run this job - // we do this to prevent us from doing 2 additional queries - // to postgres each time the job runs - await meta.global.setTimestampBoundaries() - - // This is the first run OR we're done with an interval - if (payload.incrementTimestampCursor || !timestampCursor) { - // Done with a timestamp interval, reset offset - intraIntervalOffset = 0 - - // This ensures we never process an interval twice - const incrementedCursor = await meta.utils.cursor.increment(TIMESTAMP_CURSOR_KEY, EVENTS_TIME_INTERVAL) - - meta.global.updateProgressBar(incrementedCursor) - - timestampCursor = Number(incrementedCursor) - } - - if (timestampCursor > meta.global.maxTimestamp!) { - await meta.storage.del(EXPORT_RUNNING_KEY) - createLog(`Done exporting all events`) - return - } - - let events: PluginEvent[] = [] - - let fetchEventsError: Error | unknown | null = null - try { - events = await fetchEventsForInterval( - hub.db, - pluginConfig.team_id, - new Date(timestampCursor), - intraIntervalOffset, - EVENTS_TIME_INTERVAL, - EVENTS_PER_RUN - ) - } catch (error) { - fetchEventsError = error - Sentry.captureException(error, { tags: { team_id: pluginConfig.team_id } }) - } - - let exportEventsError: Error | unknown | null = null - - if (fetchEventsError) { - await meta.storage.del(EXPORT_RUNNING_KEY) - createLog(`Failed fetching events. Stopping export - please try again later.`) - return - } else { - if (events.length > 0) { - try { - await methods.exportEvents!(events) - } catch (error) { - exportEventsError = error - } - } - } - - if (exportEventsError instanceof RetryError) { - const nextRetrySeconds = 2 ** payload.retriesPerformedSoFar * 3 - - // "Failed processing events 0-100 from 2021-08-19T12:34:26.061Z to 2021-08-19T12:44:26.061Z. Retrying in 3s" - createLog( - `Failed processing events ${intraIntervalOffset}-${intraIntervalOffset + events.length} from ${new Date( - timestampCursor - ).toISOString()} to ${new Date( - timestampCursor + EVENTS_TIME_INTERVAL - ).toISOString()}. Retrying in ${nextRetrySeconds}s` - ) - - await meta.jobs - .exportHistoricalEvents({ - intraIntervalOffset, - timestampCursor, - retriesPerformedSoFar: payload.retriesPerformedSoFar + 1, - }) - .runIn(nextRetrySeconds, 'seconds') - } else if (!exportEventsError) { - const incrementTimestampCursor = events.length === 0 - - await meta.jobs - .exportHistoricalEvents({ - timestampCursor, - incrementTimestampCursor, - retriesPerformedSoFar: 0, - intraIntervalOffset: intraIntervalOffset + EVENTS_PER_RUN, - batchId: payload.batchId, - }) - .runIn(1, 'seconds') - } - - if (events.length > 0) { - createLog( - `Successfully processed events ${intraIntervalOffset}-${ - intraIntervalOffset + events.length - } from ${new Date(timestampCursor).toISOString()} to ${new Date( - timestampCursor + EVENTS_TIME_INTERVAL - ).toISOString()}.` - ) - } - } - - // initTimestampsAndCursor decides what timestamp boundaries to use before - // the export starts. if a payload is passed with boundaries, we use that, - // but if no payload is specified, we use the boundaries determined at setupPlugin - meta.global.initTimestampsAndCursor = async (payload?: ExportHistoricalEventsJobPayload) => { - // initTimestampsAndCursor will only run on **one** thread, because of our guard against - // multiple exports. as a result, we need to set the boundaries on postgres, and - // only set them in global when the job runs, so all threads have global state in sync - - // Fetch the max and min timestamps for a team's events - const timestampBoundaries = await fetchTimestampBoundariesForTeam(hub.db, pluginConfig.team_id, '_timestamp') - - if (payload && payload.dateFrom) { - try { - const dateFrom = new Date(payload.dateFrom).getTime() - await meta.utils.cursor.init(TIMESTAMP_CURSOR_KEY, dateFrom - EVENTS_TIME_INTERVAL) - await meta.storage.set(MIN_UNIX_TIMESTAMP_KEY, dateFrom) - } catch (error) { - createLog(`'dateFrom' should be an timestamp in ISO string format.`) - throw error - } - } else { - // no timestamp override specified via the payload, default to the first event ever ingested - if (!timestampBoundaries) { - throw new Error( - `Unable to determine the lower timestamp bound for the export automatically. Please specify a 'dateFrom' value.` - ) - } - - const dateFrom = timestampBoundaries.min.getTime() - await meta.utils.cursor.init(TIMESTAMP_CURSOR_KEY, dateFrom - EVENTS_TIME_INTERVAL) - await meta.storage.set(MIN_UNIX_TIMESTAMP_KEY, dateFrom) - } - - if (payload && payload.dateTo) { - try { - await meta.storage.set(MAX_UNIX_TIMESTAMP_KEY, new Date(payload.dateTo).getTime()) - } catch (error) { - createLog(`'dateTo' should be an timestamp in ISO string format.`) - throw error - } - } else { - // no timestamp override specified via the payload, default to the last event before the plugin was enabled - if (!timestampBoundaries) { - throw new Error( - `Unable to determine the upper timestamp bound for the export automatically. Please specify a 'dateTo' value.` - ) - } - await meta.storage.set(MAX_UNIX_TIMESTAMP_KEY, timestampBoundaries.max.getTime()) - } - } - - // this ensures we have the global object correctly set on every thread - // without having to always do a postgres query when an export job for an - // inteval is triggered - meta.global.setTimestampBoundaries = async () => { - if (!meta.global.maxTimestamp) { - const storedTimestampLimit = await meta.storage.get(MAX_UNIX_TIMESTAMP_KEY, null) - meta.global.maxTimestamp = Number(storedTimestampLimit) - } - - if (!meta.global.minTimestamp) { - const storedMinTimestamp = await meta.storage.get(MIN_UNIX_TIMESTAMP_KEY, null) - meta.global.minTimestamp = Number(storedMinTimestamp) - } - } - - meta.global.updateProgressBar = (incrementedCursor) => { - const progressNumerator = incrementedCursor - meta.global.minTimestamp! - const progressDenominator = meta.global.maxTimestamp! - meta.global.minTimestamp! - - const progress = progressDenominator === 0 ? 20 : Math.round(progressNumerator / progressDenominator) * 20 - const percentage = Math.round((1000 * progressNumerator) / progressDenominator) / 10 - - const progressBarCompleted = Array.from({ length: progress }) - .map(() => '■') - .join('') - const progressBarRemaining = Array.from({ length: 20 - progress }) - .map(() => '□') - .join('') - createLog(`Export progress: ${progressBarCompleted}${progressBarRemaining} (${percentage}%)`) - } - - function createLog(message: string, type: PluginLogEntryType = PluginLogEntryType.Log) { - hub.promiseManager.trackPromise( - hub.db.queuePluginLogEntry({ - pluginConfig, - message: `(${hub.instanceId}) ${message}`, - source: PluginLogEntrySource.System, - type: type, - instanceId: hub.instanceId, - }), - 'exports createLog' - ) - } -} diff --git a/plugin-server/src/worker/vm/upgrades/utils/export-events-buffer.ts b/plugin-server/src/worker/vm/upgrades/utils/export-events-buffer.ts deleted file mode 100644 index 281b4b5dab460..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/utils/export-events-buffer.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { runInTransaction } from '../../../../sentry' -import { Hub, PluginConfig } from '../../../../types' -import { timeoutGuard } from '../../../../utils/db/utils' - -export type BufferOptions = { - limit: number - timeoutSeconds: number - onFlush?: (objects: any[], points: number) => void | Promise -} - -export class ExportEventsBuffer { - buffer: any[] - timeout: NodeJS.Timeout | null - points: number - options: BufferOptions - pluginConfig: PluginConfig - hub: Hub - - constructor(hub: Hub, pluginConfig: PluginConfig, opts?: Partial) { - this.buffer = [] - this.timeout = null - this.points = 0 - this.options = { - limit: 10, - timeoutSeconds: 60, - ...opts, - } - this.pluginConfig = pluginConfig - this.hub = hub - } - - public async add(object: Record, points = 1): Promise { - // flush existing if adding would make us go over the limit - if (this.points && this.points + points > this.options.limit) { - await this.flush() - } - - // add the object to the buffer - this.points += points - this.buffer.push(object) - - if (this.points > this.options.limit) { - // flush (again?) if we are now over the limit - await this.flush() - } else if (!this.timeout) { - // if not, make sure there's a flush timeout - this.timeout = setTimeout(async () => await this.flush(), this.options.timeoutSeconds * 1000) - } - } - - public async flush(): Promise { - const oldBuffer = this.buffer - const oldPoints = this.points - this.buffer = [] - this.points = 0 - - this.hub.promiseManager.trackPromise( - this._flush(oldBuffer, oldPoints, new Date()), - 'ExportEventsBuffer flush logs' - ) - await this.hub.promiseManager.awaitPromisesIfNeeded() - } - - public async _flush(oldBuffer: any[], oldPoints: number, _: Date): Promise { - if (this.timeout) { - clearTimeout(this.timeout) - this.timeout = null - } - - const slowTimeout = timeoutGuard( - `ExportEventsBuffer flush promise running for more than 5 minutes`, - { - plugin_id: this.pluginConfig.plugin_id, - team_id: this.pluginConfig.team_id, - plugin_config_id: this.pluginConfig.id, - }, - 300_000 - ) - try { - await runInTransaction( - { - name: 'export-events-buffer', - op: 'ExportEventsBuffer.flush', - }, - async () => { - await this.options.onFlush?.(oldBuffer, oldPoints) - } - ) - } finally { - clearTimeout(slowTimeout) - } - } -} diff --git a/plugin-server/src/worker/vm/upgrades/utils/fetchEventsForInterval.ts b/plugin-server/src/worker/vm/upgrades/utils/fetchEventsForInterval.ts deleted file mode 100644 index 16353c424a5c6..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/utils/fetchEventsForInterval.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { RetryError } from '@posthog/plugin-scaffold' -import { DateTime } from 'luxon' - -import { Element, RawClickHouseEvent, TimestampFormat } from '../../../../types' -import { DB } from '../../../../utils/db/db' -import { parseRawClickHouseEvent } from '../../../../utils/event' -import { status } from '../../../../utils/status' -import { castTimestampToClickhouseFormat } from '../../../../utils/utils' -import { HistoricalExportEvent } from './utils' - -export interface RawElement extends Element { - $el_text?: string -} - -export const fetchEventsForInterval = async ( - db: DB, - teamId: number, - timestampLowerBound: Date, - offset: number, - eventsTimeInterval: number, - eventsPerRun: number -): Promise => { - const timestampUpperBound = new Date(timestampLowerBound.getTime() + eventsTimeInterval) - - const chTimestampLower = castTimestampToClickhouseFormat( - DateTime.fromISO(timestampLowerBound.toISOString()), - TimestampFormat.ClickHouseSecondPrecision - ) - const chTimestampHigher = castTimestampToClickhouseFormat( - DateTime.fromISO(timestampUpperBound.toISOString()), - TimestampFormat.ClickHouseSecondPrecision - ) - - // :TODO: Adding tag messes up the return value? - const fetchEventsQuery = ` - SELECT - event, - uuid, - team_id, - distinct_id, - properties, - timestamp, - created_at, - elements_chain - FROM events - WHERE team_id = ${teamId} - AND timestamp >= '${chTimestampLower}' - AND timestamp < '${chTimestampHigher}' - ORDER BY timestamp - LIMIT ${eventsPerRun} - OFFSET ${offset}` - - let clickhouseFetchEventsResult: { data: RawClickHouseEvent[] } - - try { - clickhouseFetchEventsResult = await db.clickhouseQuery(fetchEventsQuery) - } catch (error) { - // TODO: add more specific error handling based on the error from - // `clickhouseQuery` (e.g. if it's a timeout, we should retry, if it's a - // query syntax error, we should not retry) - status.error('🔥', 'clickhouse_export_fetch_failure', { error }) - throw new RetryError("Couldn't fetch events from ClickHouse") - } - - return clickhouseFetchEventsResult.data.map(convertClickhouseEventToPluginEvent) -} - -const convertClickhouseEventToPluginEvent = (event: RawClickHouseEvent): HistoricalExportEvent => { - const clickhouseEvent = parseRawClickHouseEvent(event) - const parsedEvent = { - uuid: clickhouseEvent.uuid, - team_id: clickhouseEvent.team_id, - distinct_id: clickhouseEvent.distinct_id, - properties: clickhouseEvent.properties, - elements: - clickhouseEvent.event === '$autocapture' && clickhouseEvent.elements_chain - ? convertDatabaseElementsToRawElements(clickhouseEvent.elements_chain) - : undefined, - timestamp: clickhouseEvent.timestamp.toISO(), - now: DateTime.now().toISO(), - event: clickhouseEvent.event || '', - ip: clickhouseEvent.properties['$ip'] || '', - site_url: '', - } - return addHistoricalExportEventProperties(parsedEvent) -} - -const addHistoricalExportEventProperties = (event: HistoricalExportEvent): HistoricalExportEvent => { - event.properties['$$historical_export_source_db'] = 'clickhouse' - event.properties['$$is_historical_export_event'] = true - event.properties['$$historical_export_timestamp'] = new Date().toISOString() - return event -} - -export const convertDatabaseElementsToRawElements = (elements: RawElement[]): RawElement[] => { - for (const element of elements) { - if (element.attributes && element.attributes.attr__class) { - element.attr_class = element.attributes.attr__class - } - if (element.text) { - element.$el_text = element.text - } - } - return elements -} diff --git a/plugin-server/src/worker/vm/upgrades/utils/utils.ts b/plugin-server/src/worker/vm/upgrades/utils/utils.ts deleted file mode 100644 index 7109288dd4de2..0000000000000 --- a/plugin-server/src/worker/vm/upgrades/utils/utils.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { PluginEvent, Properties } from '@posthog/plugin-scaffold' -import { Plugin } from '@posthog/plugin-scaffold' -import * as Sentry from '@sentry/node' -import { DateTime } from 'luxon' -import { Client } from 'pg' - -import { DB } from '../../../../utils/db/db' - -export interface TimestampBoundaries { - min: Date - max: Date -} - -export interface ExportHistoricalEventsJobPayload extends Record { - // The lower bound of the timestamp interval to be processed - timestampCursor?: number - - // The offset *within* a given timestamp interval - intraIntervalOffset?: number - - // how many retries a payload has had (max = 15) - retriesPerformedSoFar: number - - // tells us we're ready to pick up a new interval - incrementTimestampCursor: boolean - - // used for ensuring only one "export task" is running if the server restarts - batchId: number -} - -export interface HistoricalExportEvent extends PluginEvent { - properties: Properties // can't be undefined -} - -export type ExportHistoricalEventsUpgrade = Plugin<{ - global: { - pgClient: Client - eventsToIgnore: Set - sanitizedTableName: string - exportHistoricalEvents: (payload: ExportHistoricalEventsJobPayload) => Promise - initTimestampsAndCursor: (payload: ExportHistoricalEventsJobPayload | undefined) => Promise - setTimestampBoundaries: () => Promise - updateProgressBar: (incrementedCursor: number) => void - timestampBoundariesForTeam: TimestampBoundaries - maxTimestamp: number | null - minTimestamp: number | null - } -}> - -export const clickhouseEventTimestampToDate = (timestamp: string): Date => { - return new Date(DateTime.fromFormat(timestamp, 'yyyy-MM-dd HH:mm:ss').toISO()) -} - -export const fetchTimestampBoundariesForTeam = async ( - db: DB, - teamId: number, - column: 'timestamp' | '_timestamp' -): Promise => { - try { - const clickhouseFetchTimestampsResult = await db.clickhouseQuery(` - SELECT min(${column}) as min, max(${column}) as max - FROM events - WHERE team_id = ${teamId}`) - - const min = clickhouseFetchTimestampsResult.data[0].min - const max = clickhouseFetchTimestampsResult.data[0].max - - const minDate = new Date(clickhouseEventTimestampToDate(min)) - const maxDate = new Date(clickhouseEventTimestampToDate(max)) - - const isValidMin = minDate.getTime() !== new Date(0).getTime() - const isValidMax = maxDate.getTime() !== new Date(0).getTime() - - return isValidMin && isValidMax ? { min: minDate, max: maxDate } : null - } catch (e) { - Sentry.captureException(e, { tags: { team_id: teamId } }) - return null - } -} diff --git a/plugin-server/src/worker/vm/vm.ts b/plugin-server/src/worker/vm/vm.ts index ef790d33e26db..c3b304049046f 100644 --- a/plugin-server/src/worker/vm/vm.ts +++ b/plugin-server/src/worker/vm/vm.ts @@ -13,9 +13,6 @@ import { createStorage } from './extensions/storage' import { createUtils } from './extensions/utilities' import { AVAILABLE_IMPORTS } from './imports' import { transformCode } from './transforms' -import { upgradeExportEvents } from './upgrades/export-events' -import { addHistoricalEventsExportCapability } from './upgrades/historical-export/export-historical-events' -import { addHistoricalEventsExportCapabilityV2 } from './upgrades/historical-export/export-historical-events-v2' export class TimeoutError extends RetryError { name = 'TimeoutError' @@ -191,7 +188,6 @@ export function createPluginConfigVM( const __methods = { setupPlugin: __asyncFunctionGuard(__bindMeta('setupPlugin'), 'setupPlugin'), teardownPlugin: __asyncFunctionGuard(__bindMeta('teardownPlugin'), 'teardownPlugin'), - exportEvents: __asyncFunctionGuard(__bindMeta('exportEvents'), 'exportEvents'), onEvent: __asyncFunctionGuard(__bindMeta('onEvent'), 'onEvent'), processEvent: __asyncFunctionGuard(__bindMeta('processEvent'), 'processEvent'), composeWebhook: __bindMeta('composeWebhook'), @@ -235,19 +231,6 @@ export function createPluginConfigVM( const vmResponse = vm.run(responseVar) const { methods, tasks } = vmResponse - const exportEventsExists = !!methods.exportEvents - - if (exportEventsExists) { - upgradeExportEvents(hub, pluginConfig, vmResponse) - statsdTiming('vm_setup_sync_section') - - if (hub.HISTORICAL_EXPORTS_ENABLED) { - addHistoricalEventsExportCapability(hub, pluginConfig, vmResponse) - addHistoricalEventsExportCapabilityV2(hub, pluginConfig, vmResponse) - } - } else { - statsdTiming('vm_setup_sync_section') - } statsdTiming('vm_setup_full') vmSetupMsSummary.labels(String(pluginConfig.plugin?.id)).observe(new Date().getTime() - timer.getTime()) diff --git a/plugin-server/tests/historical-export-e2e.test.ts b/plugin-server/tests/historical-export-e2e.test.ts deleted file mode 100644 index aabaab77886f1..0000000000000 --- a/plugin-server/tests/historical-export-e2e.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { PluginEvent } from '@posthog/plugin-scaffold' - -import { defaultConfig } from '../src/config/config' -import { startPluginsServer } from '../src/main/pluginsServer' -import { EnqueuedPluginJob, Hub, LogLevel, PluginsServerConfig } from '../src/types' -import { UUIDT } from '../src/utils/utils' -import { EventPipelineRunner } from '../src/worker/ingestion/event-pipeline/runner' -import Piscina, { makePiscina } from '../src/worker/piscina' -import { writeToFile } from '../src/worker/vm/extensions/test-utils' -import { delayUntilEventIngested, resetTestDatabaseClickhouse } from './helpers/clickhouse' -import { resetGraphileWorkerSchema } from './helpers/graphile-worker' -import { resetKafka } from './helpers/kafka' -import { pluginConfig39 } from './helpers/plugins' -import { resetTestDatabase } from './helpers/sql' - -jest.mock('../src/utils/status') -jest.setTimeout(60000) // 60 sec timeout - -const { console: testConsole } = writeToFile - -const extraServerConfig: Partial = { - WORKER_CONCURRENCY: 2, - LOG_LEVEL: LogLevel.Log, - CONVERSION_BUFFER_ENABLED: false, - HISTORICAL_EXPORTS_ENABLED: true, - HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER: 2, - HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW: 8 * 60 * 60 * 1000, // 8 hours -} - -const indexJs = ` -import { console as testConsole } from 'test-utils/write-to-file' - -export async function exportEvents(events) { - for (const event of events) { - if (event.properties && event.properties['$$is_historical_export_event']) { - testConsole.log('exported historical event', event) - } - } -} -` - -describe('Historical Export (v2)', () => { - let hub: Hub - let stopServer: () => Promise - let piscina: Piscina - - beforeAll(async () => { - await resetKafka(extraServerConfig) - }) - - beforeEach(async () => { - console.info = jest.fn() - - testConsole.reset() - await Promise.all([ - await resetTestDatabase(indexJs), - await resetTestDatabaseClickhouse(extraServerConfig), - await resetGraphileWorkerSchema(defaultConfig), - ]) - - const startResponse = await startPluginsServer(extraServerConfig, makePiscina, undefined) - hub = startResponse.hub! - piscina = startResponse.piscina! - stopServer = startResponse.stop! - }) - - afterEach(async () => { - await stopServer() - }) - - afterAll(async () => { - await resetGraphileWorkerSchema(defaultConfig) - }) - - async function ingestEvent(timestamp: string, overrides: Partial = {}) { - const pluginEvent: PluginEvent = { - event: 'some_event', - distinct_id: 'some_user', - site_url: '', - team_id: 2, - timestamp: timestamp, - now: timestamp, - ip: '', - uuid: new UUIDT().toString(), - ...overrides, - } as any as PluginEvent - - const runner = new EventPipelineRunner(hub, pluginEvent) - await runner.runEventPipeline(pluginEvent) - } - - it('exports a batch of events in a time range', async () => { - await ingestEvent('2021-07-28T00:00:00.000Z') // To avoid parallel person processing which we don't handle - await Promise.all([ - ingestEvent('2021-08-01T00:00:00.000Z', { properties: { foo: 'bar' } }), - ingestEvent('2021-08-02T02:00:00.000Z'), - ingestEvent('2021-08-03T09:00:00.000Z'), - ingestEvent('2021-08-03T15:00:00.000Z'), - ingestEvent('2021-08-04T23:00:00.000Z'), - ingestEvent('2021-08-04T23:59:59.000Z'), - ingestEvent('2021-08-05T00:00:00.000Z'), - ingestEvent('2021-08-05T01:00:00.000Z'), - ]) - - await hub.kafkaProducer.flush() - await delayUntilEventIngested(() => hub.db.fetchEvents(), 9) - - await piscina.run({ - task: 'runPluginJob', - args: { - job: { - type: 'Export historical events V2', - payload: { - dateRange: ['2021-08-01', '2021-08-04'], - parallelism: 5, - $operation: 'start', - }, - pluginConfigId: pluginConfig39.id, - pluginConfigTeam: pluginConfig39.team_id, - timestamp: 0, - } as EnqueuedPluginJob, - }, - }) - - await delayUntilEventIngested(() => Promise.resolve(testConsole.read()), 6, 1000, 50) - - const exportedEventLogs = testConsole.read() as Array<[string, any]> - exportedEventLogs.sort((e1, e2) => e1[1].timestamp.localeCompare(e2[1].timestamp)) - - const timestamps = exportedEventLogs.map(([, event]) => event.timestamp) - expect(timestamps).toEqual([ - '2021-08-01T00:00:00.000Z', - '2021-08-02T02:00:00.000Z', - '2021-08-03T09:00:00.000Z', - '2021-08-03T15:00:00.000Z', - '2021-08-04T23:00:00.000Z', - '2021-08-04T23:59:59.000Z', - ]) - expect(exportedEventLogs[0][1].properties).toEqual( - expect.objectContaining({ - foo: 'bar', - $$historical_export_source_db: 'clickhouse', - $$is_historical_export_event: true, - $$historical_export_timestamp: expect.any(String), - }) - ) - }) -}) diff --git a/plugin-server/tests/main/jobs/graphile-worker.test.ts b/plugin-server/tests/main/jobs/graphile-worker.test.ts index 081faeac01cdd..f46817606a012 100644 --- a/plugin-server/tests/main/jobs/graphile-worker.test.ts +++ b/plugin-server/tests/main/jobs/graphile-worker.test.ts @@ -1,8 +1,6 @@ import { GraphileWorker } from '../../../src/main/graphile-worker/graphile-worker' import { EnqueuedJob, Hub, JobName } from '../../../src/types' -import { runRetriableFunction } from '../../../src/utils/retries' import { UUID } from '../../../src/utils/utils' -import { PromiseManager } from '../../../src/worker/vm/promise-manager' jest.mock('../../../src/utils/retries') jest.mock('../../../src/utils/status') @@ -20,7 +18,6 @@ jest.mock('graphile-worker', () => { const mockHub: Hub = { instanceId: new UUID('F8B2F832-6639-4596-ABFC-F9664BC88E84'), - promiseManager: new PromiseManager({ MAX_PENDING_PROMISES_PER_WORKER: 1 } as any), JOB_QUEUES: 'fs', } as Hub @@ -36,22 +33,8 @@ describe('graphileWorker', () => { jest.spyOn(graphileWorker, '_enqueue').mockImplementation(() => Promise.resolve()) await graphileWorker.enqueue(JobName.PLUGIN_JOB, { type: 'foo' } as EnqueuedJob) - expect(runRetriableFunction).not.toHaveBeenCalled() expect(graphileWorker._enqueue).toHaveBeenCalledWith(JobName.PLUGIN_JOB, { type: 'foo' }) }) - - it('calls runRetriableFunction with the correct parameters if retryOnFailure=true', async () => { - jest.spyOn(graphileWorker, '_enqueue').mockImplementation(() => Promise.resolve()) - await graphileWorker.enqueue(JobName.PLUGIN_JOB, { type: 'foo' } as EnqueuedJob, undefined, true) - expect(runRetriableFunction).toHaveBeenCalled() - const runRetriableFunctionArgs = jest.mocked(runRetriableFunction).mock.calls[0][0] - - expect(runRetriableFunctionArgs.metricName).toEqual('job_queues_enqueue_pluginJob') - expect(runRetriableFunctionArgs.payload).toEqual({ type: 'foo' }) - expect(runRetriableFunctionArgs.tryFn).not.toBeUndefined() - expect(runRetriableFunctionArgs.catchFn).not.toBeUndefined() - expect(runRetriableFunctionArgs.finallyFn).toBeUndefined() - }) }) describe('syncState()', () => { diff --git a/plugin-server/tests/main/jobs/schedule.test.ts b/plugin-server/tests/main/jobs/schedule.test.ts index 6c280d5e96be0..150d171f97d3b 100644 --- a/plugin-server/tests/main/jobs/schedule.test.ts +++ b/plugin-server/tests/main/jobs/schedule.test.ts @@ -3,11 +3,9 @@ import { runScheduledTasks } from '../../../src/main/graphile-worker/schedule' import { Hub } from '../../../src/types' import { KafkaProducerWrapper } from '../../../src/utils/db/kafka-producer-wrapper' import { UUID } from '../../../src/utils/utils' -import { PromiseManager } from '../../../src/worker/vm/promise-manager' const mockHub: Hub = { instanceId: new UUID('F8B2F832-6639-4596-ABFC-F9664BC88E84'), - promiseManager: new PromiseManager({ MAX_PENDING_PROMISES_PER_WORKER: 1 } as any), JOB_QUEUES: 'fs', } as Hub diff --git a/plugin-server/tests/utils/retries.test.ts b/plugin-server/tests/utils/retries.test.ts index 15193e8ad825c..6bd6e8b40be82 100644 --- a/plugin-server/tests/utils/retries.test.ts +++ b/plugin-server/tests/utils/retries.test.ts @@ -1,39 +1,8 @@ -import { ProcessedPluginEvent, RetryError } from '@posthog/plugin-scaffold' - -import { Hub } from '../../src/types' -import { getNextRetryMs, runRetriableFunction } from '../../src/utils/retries' -import { UUID } from '../../src/utils/utils' -import { AppMetricIdentifier } from '../../src/worker/ingestion/app-metrics' -import { PromiseManager } from '../../src/worker/vm/promise-manager' +import { getNextRetryMs } from '../../src/utils/retries' jest.useFakeTimers() jest.spyOn(global, 'setTimeout') -const mockHub: Hub = { - instanceId: new UUID('F8B2F832-6639-4596-ABFC-F9664BC88E84'), - promiseManager: new PromiseManager({ MAX_PENDING_PROMISES_PER_WORKER: 1 } as any), - appMetrics: { - queueMetric: jest.fn(), - queueError: jest.fn(), - }, -} as unknown as Hub - -const testEvent: ProcessedPluginEvent = { - uuid: '4CCCB5FD-BD27-4D6C-8737-88EB7294C437', - distinct_id: 'my_id', - ip: '127.0.0.1', - team_id: 3, - timestamp: '2023-04-01T00:00:00.000Z', - event: 'default event', - properties: {}, -} - -const appMetric: AppMetricIdentifier = { - teamId: 2, - pluginConfigId: 3, - category: 'processEvent', -} - describe('getNextRetryMs', () => { it('returns the correct number of milliseconds with a multiplier of 1', () => { expect(getNextRetryMs(500, 1, 1)).toBe(500) @@ -56,175 +25,3 @@ describe('getNextRetryMs', () => { expect(() => getNextRetryMs(4000, 2, -1)).toThrowError('Attempts are indexed starting with 1') }) }) - -describe('runRetriableFunction', () => { - it('runs the function once if it resolves on first try', async () => { - const tryFn = jest.fn().mockResolvedValue('Guten Abend') - const catchFn = jest.fn() - const finallyFn = jest.fn() - - const promise = new Promise((resolve) => { - finallyFn.mockImplementation((attempt: number) => resolve(attempt)) - void runRetriableFunction({ - metricName: 'plugin.on_foo', - hub: mockHub, - payload: testEvent, - tryFn, - catchFn, - finallyFn, - appMetric, - }) - }) - jest.runAllTimers() - - await expect(promise).resolves.toEqual(1) - expect(tryFn).toHaveBeenCalledTimes(1) - expect(catchFn).toHaveBeenCalledTimes(0) - expect(finallyFn).toHaveBeenCalledTimes(1) - expect(setTimeout).not.toHaveBeenCalled() - expect(mockHub.appMetrics.queueMetric).toHaveBeenCalledWith({ - ...appMetric, - successes: 1, - successesOnRetry: 0, - }) - }) - - it('catches non-RetryError error', async () => { - const tryFn = jest.fn().mockImplementation(() => { - // Faulty plugin code might look like this - let bar - bar.baz = 123 - }) - const catchFn = jest.fn() - const finallyFn = jest.fn() - - const promise = new Promise((resolve) => { - finallyFn.mockImplementation((attempt: number) => resolve(attempt)) - void runRetriableFunction({ - metricName: 'plugin.on_foo', - hub: mockHub, - payload: testEvent, - tryFn, - catchFn, - finallyFn, - appMetric, - appMetricErrorContext: { - event: testEvent, - }, - }) - }) - jest.runAllTimers() - - await expect(promise).resolves.toEqual(1) - expect(tryFn).toHaveBeenCalledTimes(1) - expect(catchFn).toHaveBeenCalledTimes(1) - expect(catchFn).toHaveBeenCalledWith(expect.any(TypeError)) - expect(finallyFn).toHaveBeenCalledTimes(1) - expect(setTimeout).not.toHaveBeenCalled() - expect(mockHub.appMetrics.queueError).toHaveBeenCalledWith( - { - ...appMetric, - failures: 1, - }, - { - error: expect.any(TypeError), - event: testEvent, - } - ) - }) - - it('catches RetryError error and retries up to 3 times', async () => { - const tryFn = jest.fn().mockImplementation(() => { - throw new RetryError() - }) - const catchFn = jest.fn() - const finallyFn = jest.fn() - - const promise = new Promise((resolve) => { - finallyFn.mockImplementation((attempt: number) => resolve(attempt)) - void runRetriableFunction({ - metricName: 'plugin.on_foo', - hub: mockHub, - payload: testEvent, - tryFn, - catchFn, - finallyFn, - appMetric, - appMetricErrorContext: { - event: testEvent, - }, - }) - }) - - expect(tryFn).toHaveBeenCalledTimes(1) - expect(finallyFn).toHaveBeenCalledTimes(0) - expect(setTimeout).toHaveBeenCalledTimes(1) - - jest.runAllTimers() - - await expect(promise).resolves.toEqual(3) - expect(tryFn).toHaveBeenCalledTimes(3) - expect(catchFn).toHaveBeenCalledTimes(1) - expect(catchFn).toHaveBeenCalledWith(expect.any(RetryError)) - expect(finallyFn).toHaveBeenCalledTimes(1) - expect(setTimeout).toHaveBeenCalledTimes(2) - expect(setTimeout).toHaveBeenNthCalledWith(1, expect.any(Function), 3_000) - expect(setTimeout).toHaveBeenNthCalledWith(2, expect.any(Function), 6_000) - expect(mockHub.appMetrics.queueError).toHaveBeenCalledWith( - { - ...appMetric, - failures: 1, - }, - { - error: expect.any(RetryError), - event: testEvent, - } - ) - }) - - it('catches RetryError error and allow the function to succeed on 3rd attempt', async () => { - const tryFn = jest - .fn() - .mockImplementationOnce(() => { - throw new RetryError() - }) - .mockImplementationOnce(() => { - throw new RetryError() - }) - .mockResolvedValue('Gute Nacht') - const catchFn = jest.fn() - const finallyFn = jest.fn() - - const promise = new Promise((resolve) => { - finallyFn.mockImplementation((attempt: number) => resolve(attempt)) - void runRetriableFunction({ - metricName: 'plugin.on_foo', - hub: mockHub, - payload: testEvent, - tryFn, - catchFn, - finallyFn, - appMetric, - }) - }) - - expect(tryFn).toHaveBeenCalledTimes(1) - expect(finallyFn).toHaveBeenCalledTimes(0) - expect(setTimeout).toHaveBeenCalledTimes(1) - - jest.runAllTimers() - - await expect(promise).resolves.toEqual(3) - expect(tryFn).toHaveBeenCalledTimes(3) - expect(catchFn).toHaveBeenCalledTimes(0) - expect(finallyFn).toHaveBeenCalledTimes(1) - expect(setTimeout).toHaveBeenCalledTimes(2) - expect(setTimeout).toHaveBeenNthCalledWith(1, expect.any(Function), 3_000) - expect(setTimeout).toHaveBeenNthCalledWith(2, expect.any(Function), 6_000) - expect(mockHub.appMetrics.queueMetric).toHaveBeenCalledWith({ - ...appMetric, - successes: 0, - successesOnRetry: 1, - }) - }) -}) diff --git a/plugin-server/tests/worker/buffer.test.ts b/plugin-server/tests/worker/buffer.test.ts deleted file mode 100644 index 4a38a4fe591e7..0000000000000 --- a/plugin-server/tests/worker/buffer.test.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { delay } from '../../src/utils/utils' -import { PromiseManager } from '../../src/worker/vm/promise-manager' -import { pluginConfig39 } from '../helpers/plugins' -import { Hub } from './../../src/types' -import { ExportEventsBuffer } from './../../src/worker/vm/upgrades/utils/export-events-buffer' - -jest.setTimeout(100000) - -describe('PromiseManager', () => { - let promiseManager: PromiseManager - - beforeEach(() => { - promiseManager = new PromiseManager({ MAX_PENDING_PROMISES_PER_WORKER: 1 } as any) - }) - - afterEach(async () => { - await Promise.all(promiseManager.pendingPromises) - }) - - test('promise manager awaits promises if above limit', async () => { - const hello = jest.fn() - const promise = async () => { - await delay(3000) - hello() - } - - // we track the promise but don't await it - promiseManager.trackPromise(promise()) - expect(promiseManager.pendingPromises.size).toEqual(1) - expect(hello).not.toHaveBeenCalled() - - // we add another promise above the limit - promiseManager.trackPromise(promise()) - expect(promiseManager.pendingPromises.size).toEqual(2) - expect(hello).not.toHaveBeenCalled() - - // we chop one promise off by awaiting it - await promiseManager.awaitPromisesIfNeeded() - expect(hello).toHaveBeenCalled() - expect(promiseManager.pendingPromises.size).toEqual(1) - }) -}) - -describe('ExportEventsBuffer', () => { - let promiseManager: PromiseManager - let mockHub: Hub - let exportEventsBuffer: ExportEventsBuffer - - beforeEach(() => { - promiseManager = new PromiseManager({ MAX_PENDING_PROMISES_PER_WORKER: 1 } as any) - mockHub = { promiseManager } as any - exportEventsBuffer = new ExportEventsBuffer(mockHub, pluginConfig39, { limit: 2 }) - }) - - afterEach(async () => { - await Promise.all(promiseManager.pendingPromises) - }) - - test('add and flush work as expected', async () => { - jest.spyOn(promiseManager, 'trackPromise') - jest.spyOn(exportEventsBuffer, 'flush') - - exportEventsBuffer._flush = jest.fn(async () => { - await delay(3000) - }) - - await exportEventsBuffer.add({ event: 'event1' }, 1) - expect(exportEventsBuffer.points).toEqual(1) - expect(exportEventsBuffer.buffer.length).toEqual(1) - expect(exportEventsBuffer.flush).not.toHaveBeenCalled() - - await exportEventsBuffer.add({ event: 'event2' }, 1) - expect(exportEventsBuffer.points).toEqual(2) - expect(exportEventsBuffer.buffer.length).toEqual(2) - expect(exportEventsBuffer.flush).not.toHaveBeenCalled() - - await exportEventsBuffer.add({ event: 'event3' }, 1) - expect(exportEventsBuffer.points).toEqual(1) - expect(exportEventsBuffer.buffer.length).toEqual(1) - expect(exportEventsBuffer.buffer).toEqual([{ event: 'event3' }]) - expect(exportEventsBuffer._flush).toHaveBeenCalledWith( - [{ event: 'event1' }, { event: 'event2' }], - 2, - expect.any(Date) - ) - }) - - test('flush works correctly with promise manager', async () => { - jest.spyOn(promiseManager, 'trackPromise') - jest.spyOn(exportEventsBuffer, 'flush') - - exportEventsBuffer._flush = jest.fn(async () => { - await delay(3000) - }) - - // add a promise - promiseManager.trackPromise(delay(3000)) - expect(promiseManager.pendingPromises.size).toEqual(1) - - await exportEventsBuffer.add({ event: 'event1' }, 1) - expect(exportEventsBuffer.points).toEqual(1) - expect(exportEventsBuffer.buffer.length).toEqual(1) - expect(exportEventsBuffer.flush).not.toHaveBeenCalled() - expect(promiseManager.trackPromise).toHaveBeenCalledTimes(1) - expect(promiseManager.pendingPromises.size).toEqual(1) - - await exportEventsBuffer.add({ event: 'event2' }, 2) - expect(exportEventsBuffer.flush).toHaveBeenCalled() - expect(promiseManager.trackPromise).toHaveBeenCalledTimes(2) - expect(promiseManager.pendingPromises.size).toEqual(1) - }) -}) diff --git a/plugin-server/tests/worker/capabilities.test.ts b/plugin-server/tests/worker/capabilities.test.ts index 7ba3d3b840178..6dadbef2e88da 100644 --- a/plugin-server/tests/worker/capabilities.test.ts +++ b/plugin-server/tests/worker/capabilities.test.ts @@ -100,7 +100,7 @@ describe('capabilities', () => { const shouldSetupPlugin = shouldSetupPluginInServer( { ingestion: true }, { - methods: ['onEvent', 'exportEvents'], + methods: ['onEvent'], scheduled_tasks: ['runEveryMinute'], jobs: ['someJob'], } @@ -122,7 +122,7 @@ describe('capabilities', () => { const shouldSetupPlugin = shouldSetupPluginInServer( { ingestionOverflow: true }, { - methods: ['onEvent', 'exportEvents'], + methods: ['onEvent'], scheduled_tasks: ['runEveryMinute'], jobs: ['someJob'], } @@ -144,7 +144,7 @@ describe('capabilities', () => { const shouldSetupPlugin = shouldSetupPluginInServer( { ingestionHistorical: true }, { - methods: ['onEvent', 'exportEvents'], + methods: ['onEvent'], scheduled_tasks: ['runEveryMinute'], jobs: ['someJob'], } @@ -184,7 +184,7 @@ describe('capabilities', () => { }) describe('processAsyncOnEventHandlers', () => { - it.each(['onEvent', 'exportEvents'])( + it.each(['onEvent'])( 'returns true if plugin has %s and the server has processAsyncOnEventHandlers capability', (method) => { const shouldSetupPlugin = shouldSetupPluginInServer( @@ -195,7 +195,7 @@ describe('capabilities', () => { } ) - it('returns false if plugin has none of onEvent or exportEvents and the server has only processAsyncOnEventHandlers capability', () => { + it('returns false if plugin has none of onEvent and the server has only processAsyncOnEventHandlers capability', () => { const shouldSetupPlugin = shouldSetupPluginInServer( { processAsyncOnEventHandlers: true }, { methods: [] } @@ -203,8 +203,8 @@ describe('capabilities', () => { expect(shouldSetupPlugin).toEqual(false) }) - it.each(['onEvent', 'exportEvents'])( - 'returns true if plugin has %s and the server has processAsyncOnEventHandlers capability', + it.each(['onEvent'])( + 'onEvent returns true if plugin has %s and the server has processAsyncOnEventHandlers capability', (method) => { const shouldSetupPlugin = shouldSetupPluginInServer( { processAsyncOnEventHandlers: true }, @@ -214,7 +214,7 @@ describe('capabilities', () => { } ) - it('returns false if plugin has none of onEvent or exportEvents and the server has only processAsyncOnEventHandlers capability', () => { + it('returns false if plugin has none of onEvent and the server has only processAsyncOnEventHandlers capability', () => { const shouldSetupPlugin = shouldSetupPluginInServer( { processAsyncOnEventHandlers: true }, { methods: [] } diff --git a/plugin-server/tests/worker/ingestion/utils.test.ts b/plugin-server/tests/worker/ingestion/utils.test.ts index 3e65b0964595d..6144c68fa24fd 100644 --- a/plugin-server/tests/worker/ingestion/utils.test.ts +++ b/plugin-server/tests/worker/ingestion/utils.test.ts @@ -25,7 +25,6 @@ describe('captureIngestionWarning()', () => { it('can read own writes', async () => { await captureIngestionWarning(hub.db, 2, 'some_type', { foo: 'bar' }) - await hub.promiseManager.awaitPromisesIfNeeded() const warnings = await delayUntilEventIngested(fetchWarnings) expect(warnings).toEqual([ diff --git a/plugin-server/tests/worker/plugins.test.ts b/plugin-server/tests/worker/plugins.test.ts index 4b169eb9f33a6..47f0596228a85 100644 --- a/plugin-server/tests/worker/plugins.test.ts +++ b/plugin-server/tests/worker/plugins.test.ts @@ -82,7 +82,6 @@ describe('plugins', () => { const vm = await pluginConfig.vm!.resolveInternalVm expect(Object.keys(vm!.methods).sort()).toEqual([ 'composeWebhook', - 'exportEvents', 'getSettings', 'onEvent', 'processEvent', @@ -732,27 +731,6 @@ describe('plugins', () => { expect(newPluginConfig.plugin!.capabilities).toEqual(pluginConfig.plugin!.capabilities) }) - test.skip('exportEvents automatically sets metrics', async () => { - getPluginRows.mockReturnValueOnce([ - mockPluginWithSourceFiles(` - export function exportEvents() {} - `), - ]) - getPluginConfigRows.mockReturnValueOnce([pluginConfig39]) - getPluginAttachmentRows.mockReturnValueOnce([pluginAttachment1]) - - await setupPlugins(hub) - const pluginConfig = hub.pluginConfigs.get(39)! - - expect(pluginConfig.plugin!.metrics).toEqual({ - events_delivered_successfully: 'sum', - events_seen: 'sum', - other_errors: 'sum', - retry_errors: 'sum', - undelivered_events: 'sum', - }) - }) - describe('loadSchedule()', () => { const mockConfig = (tasks: any) => ({ vm: { getScheduledTasks: () => Promise.resolve(tasks) } }) diff --git a/plugin-server/tests/worker/vm.test.ts b/plugin-server/tests/worker/vm.test.ts index 273523f65744b..7e3769de61328 100644 --- a/plugin-server/tests/worker/vm.test.ts +++ b/plugin-server/tests/worker/vm.test.ts @@ -59,7 +59,6 @@ describe('vm tests', () => { expect(Object.keys(vm).sort()).toEqual(['methods', 'tasks', 'usedImports', 'vm', 'vmResponseVariable']) expect(Object.keys(vm.methods).sort()).toEqual([ 'composeWebhook', - 'exportEvents', 'getSettings', 'onEvent', 'processEvent', @@ -1055,229 +1054,6 @@ describe('vm tests', () => { expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=onEvent', undefined) }) - describe('exportEvents', () => { - beforeEach(() => { - jest.spyOn(hub.appMetrics, 'queueMetric') - }) - - test('normal operation', async () => { - const indexJs = ` - async function exportEvents (events, meta) { - await fetch('https://export.com/results.json?query=' + events[0].event + '&events=' + events.length) - } - ` - await resetTestDatabase(indexJs) - const vm = await createReadyPluginConfigVm( - hub, - { - ...pluginConfig39, - config: { - ...pluginConfig39.config, - exportEventsBufferBytes: '10000', - exportEventsBufferSeconds: '1', - exportEventsToIgnore: `${defaultEvent.event},otherEvent`, - }, - }, - indexJs - ) - - await vm.methods.onEvent!(defaultEvent) - await vm.methods.onEvent!({ ...defaultEvent, event: 'otherEvent' }) - await vm.methods.onEvent!({ ...defaultEvent, event: 'otherEvent2' }) - await vm.methods.onEvent!({ ...defaultEvent, event: 'otherEvent3' }) - await delay(1010) - expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=otherEvent2&events=2', undefined) - expect(hub.appMetrics.queueMetric).toHaveBeenCalledWith({ - teamId: pluginConfig39.team_id, - pluginConfigId: pluginConfig39.id, - category: 'exportEvents', - successes: 2, - }) - - // adds exportEventsWithRetry job and onEvent function - expect(Object.keys(vm.tasks.job)).toEqual(expect.arrayContaining(['exportEventsWithRetry'])) - expect(Object.keys(vm.tasks.schedule)).toEqual(['runEveryMinute']) - expect( - Object.keys(vm.methods) - .filter((m) => !!vm.methods[m as keyof typeof vm.methods]) - .sort() - ).toEqual(expect.arrayContaining(['exportEvents', 'onEvent', 'teardownPlugin'])) - }) - - test('works with onEvent', async () => { - // the exportEvents upgrade patches onEvent, testing that the old one still works - const indexJs = ` - async function exportEvents (events, meta) { - await fetch('https://export.com/results.json?query=' + events[0].event + '&events=' + events.length) - } - async function onEvent (event, meta) { - await fetch('https://onevent.com/') - } - ` - await resetTestDatabase(indexJs) - const vm = await createReadyPluginConfigVm( - hub, - { - ...pluginConfig39, - config: { - ...pluginConfig39.config, - exportEventsBufferBytes: '10000', - exportEventsBufferSeconds: '1', - exportEventsToIgnore: defaultEvent.event, - }, - }, - indexJs - ) - const event: ProcessedPluginEvent = { - ...defaultEvent, - event: 'exported', - } - await vm.methods.onEvent!(event) - await vm.methods.onEvent!(defaultEvent) - await vm.methods.onEvent!(event) - await delay(1010) - expect(fetch).toHaveBeenCalledTimes(4) - expect(fetch).toHaveBeenCalledWith('https://onevent.com/', undefined) - expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=exported&events=2', undefined) - }) - - test('buffers bytes with exportEventsBufferBytes', async () => { - const indexJs = ` - async function exportEvents (events, meta) { - // console.log(meta.config) - await fetch('https://export.com/?length=' + JSON.stringify(events).length + '&count=' + events.length) - } - ` - await resetTestDatabase(indexJs) - const vm = await createReadyPluginConfigVm( - hub, - { - ...pluginConfig39, - config: { - ...pluginConfig39.config, - exportEventsBufferBytes: '1000', - exportEventsBufferSeconds: '1', - exportEventsToIgnore: defaultEvent.event, - }, - }, - indexJs - ) - const event: ProcessedPluginEvent = { - uuid: new UUIDT().toString(), - distinct_id: 'my_id', - ip: '127.0.0.1', - team_id: 3, - timestamp: new Date().toISOString(), - event: 'exported', - properties: {}, - } - for (let i = 0; i < 100; i++) { - await vm.methods.onEvent!(event) - } - await delay(1010) - - // This tests that the requests were broken up correctly according to the exportEventsBufferBytes config - // If you add data to the event above you should see more requests, and vice versa - expect(fetch).toHaveBeenCalledTimes(20) - expect((fetch as any).mock.calls).toEqual([ - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ['https://export.com/?length=866&count=5'], - ]) - }) - - test('buffers bytes with very tiny exportEventsBufferBytes', async () => { - const indexJs = ` - async function exportEvents (events, meta) { - // console.log(meta.config) - await fetch('https://export.com/?length=' + JSON.stringify(events).length + '&count=' + events.length) - } - ` - await resetTestDatabase(indexJs) - const vm = await createReadyPluginConfigVm( - hub, - { - ...pluginConfig39, - config: { - ...pluginConfig39.config, - exportEventsBufferBytes: '1', - exportEventsBufferSeconds: '1', - exportEventsToIgnore: defaultEvent.event, - }, - }, - indexJs - ) - const event: ProcessedPluginEvent = { - uuid: new UUIDT().toString(), - distinct_id: 'my_id', - ip: '127.0.0.1', - team_id: 3, - timestamp: new Date().toISOString(), - event: 'exported', - properties: {}, - } - for (let i = 0; i < 100; i++) { - await vm.methods.onEvent!(event) - } - await delay(1010) - - expect(fetch).toHaveBeenCalledTimes(100) - expect((fetch as any).mock.calls).toEqual( - Array.from(Array(100)).map(() => ['https://export.com/?length=174&count=1']) - ) - }) - - test('flushes on teardown', async () => { - const indexJs = ` - async function exportEvents (events, meta) { - await fetch('https://export.com/results.json?query=' + events[0].event + '&events=' + events.length) - } - ` - await resetTestDatabase(indexJs) - const vm = await createReadyPluginConfigVm( - hub, - { - ...pluginConfig39, - config: { - ...pluginConfig39.config, - exportEventsBufferBytes: '10000', - exportEventsBufferSeconds: '1000', - exportEventsToIgnore: '', - }, - }, - indexJs - ) - await vm.methods.onEvent!(defaultEvent) - expect(fetch).not.toHaveBeenCalledWith( - 'https://export.com/results.json?query=default event&events=1', - undefined - ) - - await vm.methods.teardownPlugin!() - expect(fetch).toHaveBeenCalledWith( - 'https://export.com/results.json?query=default event&events=1', - undefined - ) - }) - }) - test('imports', async () => { const indexJs = ` const urlImport = require('url'); diff --git a/plugin-server/tests/worker/vm/upgrades/historical-export/__snapshots__/export-historical-events-v2.test.ts.snap b/plugin-server/tests/worker/vm/upgrades/historical-export/__snapshots__/export-historical-events-v2.test.ts.snap deleted file mode 100644 index be76415898682..0000000000000 --- a/plugin-server/tests/worker/vm/upgrades/historical-export/__snapshots__/export-historical-events-v2.test.ts.snap +++ /dev/null @@ -1,52 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`addHistoricalEventsExportCapabilityV2() exportHistoricalEvents() calls exportEvents and logs with fetched events 1`] = ` -Array [ - Array [ - Object { - "category": "exportEvents", - "jobId": "1", - "pluginConfigId": 39, - "successes": 3, - "successesOnRetry": 0, - "teamId": 2, - }, - ], -] -`; - -exports[`addHistoricalEventsExportCapabilityV2() exportHistoricalEvents() stops processing after HISTORICAL_EXPORTS_MAX_RETRY_COUNT retries 1`] = ` -Array [ - Array [ - Object { - "category": "exportEvents", - "failures": 3, - "jobId": "1", - "pluginConfigId": 39, - "teamId": 2, - }, - Object { - "error": [RetryError: Retry error], - "eventCount": 3, - }, - ], -] -`; - -exports[`addHistoricalEventsExportCapabilityV2() exportHistoricalEvents() stops processing date if an unknown error was raised in exportEvents 1`] = ` -Array [ - Array [ - Object { - "category": "exportEvents", - "failures": 3, - "jobId": "1", - "pluginConfigId": 39, - "teamId": 2, - }, - Object { - "error": [Error: Unknown error], - "eventCount": 3, - }, - ], -] -`; diff --git a/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events-v2.test.ts b/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events-v2.test.ts deleted file mode 100644 index 10ed56848df53..0000000000000 --- a/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events-v2.test.ts +++ /dev/null @@ -1,1221 +0,0 @@ -import { PluginMeta, RetryError } from '@posthog/plugin-scaffold' - -import { - Hub, - ISOTimestamp, - PluginConfig, - PluginConfigVMInternalResponse, - PluginTaskType, -} from '../../../../../src/types' -import { createPluginActivityLog } from '../../../../../src/utils/db/activity-log' -import { createHub } from '../../../../../src/utils/db/hub' -import { createStorage } from '../../../../../src/worker/vm/extensions/storage' -import { createUtils } from '../../../../../src/worker/vm/extensions/utilities' -import { - addHistoricalEventsExportCapabilityV2, - EVENTS_PER_RUN_SMALL, - EXPORT_COORDINATION_KEY, - EXPORT_PARAMETERS_KEY, - ExportHistoricalEventsJobPayload, - ExportHistoricalEventsUpgradeV2, - INTERFACE_JOB_NAME, - JOB_SPEC, - TestFunctions, -} from '../../../../../src/worker/vm/upgrades/historical-export/export-historical-events-v2' -import { fetchEventsForInterval } from '../../../../../src/worker/vm/upgrades/utils/fetchEventsForInterval' -import { plugin60, pluginConfig39 } from '../../../../helpers/plugins' -import { resetTestDatabase } from '../../../../helpers/sql' - -jest.mock('../../../../../src/utils/status') -jest.mock('../../../../../src/worker/vm/upgrades/utils/fetchEventsForInterval') -jest.mock('../../../../../src/utils/db/activity-log') - -const ONE_HOUR = 1000 * 60 * 60 - -describe('addHistoricalEventsExportCapabilityV2()', () => { - let hub: Hub - let closeHub: () => Promise - let vm: PluginConfigVMInternalResponse> - let runNow: jest.Mock, runIn: jest.Mock - - beforeEach(() => { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-expect-error - vm = undefined - }) - - beforeAll(async () => { - ;[hub, closeHub] = await createHub() - hub.kafkaProducer.queueMessage = jest.fn() - hub.kafkaProducer.flush = jest.fn() - jest.spyOn(hub.db, 'queuePluginLogEntry') - jest.spyOn(hub.appMetrics, 'queueMetric') - jest.spyOn(hub.appMetrics, 'queueError') - - jest.spyOn(Date, 'now').mockReturnValue(1_000_000_000) - }) - - afterAll(async () => { - await hub.promiseManager.awaitPromisesIfNeeded() - await closeHub() - }) - - function storage() { - return createStorage(hub, pluginConfig39) - } - - function createVM(pluginConfig: PluginConfig = pluginConfig39, schedule = {}) { - runIn = jest.fn() - runNow = jest.fn() - - const mockVM = { - methods: { - exportEvents: jest.fn(), - }, - tasks: { - schedule, - job: {}, - }, - meta: { - storage: storage(), - utils: createUtils(hub, pluginConfig.id), - jobs: { - exportHistoricalEventsV2: jest.fn().mockReturnValue({ runNow, runIn }), - }, - global: {}, - }, - } as unknown as PluginConfigVMInternalResponse> - - addHistoricalEventsExportCapabilityV2(hub, pluginConfig, mockVM) - - vm = mockVM - } - - function getTestMethod(name: T): TestFunctions[T] { - // @ts-expect-error testing-related schenanigans - return (...args: any[]) => { - if (!vm) { - createVM() - } - // @ts-expect-error testing-related schenanigans - return vm.meta.global._testFunctions[name](...args) - } - } - - describe('exportHistoricalEvents()', () => { - const exportHistoricalEvents = getTestMethod('exportHistoricalEvents') - const exportParams = { - id: 1, - parallelism: 3, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp, - } - - const defaultPayload: ExportHistoricalEventsJobPayload = { - timestampCursor: 1635724800000, - startTime: 1635724800000, - endTime: 1635742800000, - exportId: 1, - fetchTimeInterval: ONE_HOUR, - offset: 0, - retriesPerformedSoFar: 0, - statusKey: 'statusKey', - } - - beforeEach(async () => { - await resetTestDatabase() - await storage().set(EXPORT_PARAMETERS_KEY, exportParams) - }) - - afterEach(() => { - jest.clearAllTimers() - jest.useRealTimers() - }) - - it('stores current progress in storage under `statusKey`', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue([]) - - await exportHistoricalEvents({ ...defaultPayload, timestampCursor: 1635730000000 }) - expect(await storage().get('statusKey', null)).toEqual({ - ...defaultPayload, - timestampCursor: 1635730000000, - done: false, - progress: expect.closeTo(0.28888), - statusTime: Date.now(), - }) - }) - - it('logs and marks part of export done if reached the end', async () => { - await exportHistoricalEvents({ ...defaultPayload, timestampCursor: defaultPayload.endTime }) - - expect(fetchEventsForInterval).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Finished exporting chunk from 2021-11-01T00:00:00.000Z to 2021-11-01T05:00:00.000Z' - ), - }) - ) - expect(await storage().get('statusKey', null)).toEqual({ - ...defaultPayload, - timestampCursor: defaultPayload.endTime, - done: true, - progress: 1, - statusTime: Date.now(), - }) - }) - - it('calls exportEvents and logs with fetched events', async () => { - createVM() - - jest.useFakeTimers({ - // These are required otherwise queries and other things were breaking. - doNotFake: ['setImmediate', 'clearImmediate', 'clearInterval', 'nextTick', 'Date'], - }) - - jest.spyOn(vm.meta.storage, 'set') - jest.spyOn(global, 'clearInterval') - - const defaultProgress = - (defaultPayload.timestampCursor - defaultPayload.startTime) / - (defaultPayload.endTime - defaultPayload.startTime) - - jest.mocked(vm.methods.exportEvents).mockImplementationOnce(async () => { - let advanced = 0 - while (advanced < 3) { - // The +1 accounts for the first status update that happens once at the beginning of - // exportHistoricalEvents. - expect(vm.meta.storage.set).toHaveBeenCalledTimes(advanced + 1) - - expect(await storage().get('statusKey', null)).toEqual({ - ...defaultPayload, - timestampCursor: defaultPayload.startTime, - done: false, - progress: defaultProgress, - statusTime: Date.now(), - }) - - advanced = advanced + 1 - jest.advanceTimersByTime(60 * 1000) - } - return - }) - jest.mocked(fetchEventsForInterval).mockResolvedValue([1, 2, 3]) - - await exportHistoricalEvents(defaultPayload) - - expect(clearInterval).toHaveBeenCalledTimes(1) - expect(vm.methods.exportEvents).toHaveBeenCalledWith([1, 2, 3]) - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Successfully processed events 0-3 from 2021-11-01T00:00:00.000Z to 2021-11-01T01:00:00.000Z.' - ), - }) - ) - expect(jest.mocked(hub.appMetrics.queueMetric).mock.calls).toMatchSnapshot() - }) - - it('does not call exportEvents or log if no events in time range', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue([]) - jest.spyOn(global, 'clearInterval') - - await exportHistoricalEvents(defaultPayload) - - expect(clearInterval).toHaveBeenCalledTimes(1) - expect(vm.methods.exportEvents).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).not.toHaveBeenCalled() - }) - - it('stops export if events fetch fails', async () => { - jest.mocked(fetchEventsForInterval).mockRejectedValue(new Error('Fetch failed')) - await storage().set(EXPORT_PARAMETERS_KEY, { - id: 1, - parallelism: 3, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp, - }) - - await exportHistoricalEvents(defaultPayload) - - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Failed fetching events. Stopping export - please try again later.' - ), - }) - ) - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - - it('schedules a retry if exportEvents raises a RetryError', async () => { - createVM() - - jest.spyOn(global, 'clearInterval') - jest.mocked(fetchEventsForInterval).mockResolvedValue([1, 2, 3]) - jest.mocked(vm.methods.exportEvents).mockRejectedValue(new RetryError('Retry error')) - - await exportHistoricalEvents(defaultPayload) - - expect(clearInterval).toHaveBeenCalledTimes(1) - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Failed processing events 0-3 from 2021-11-01T00:00:00.000Z to 2021-11-01T01:00:00.000Z.' - ), - }) - ) - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - retriesPerformedSoFar: 1, - }) - expect(runIn).toHaveBeenCalledWith(3, 'seconds') - }) - - it('schedules a retry with exponential backoff for exportEvents RetryError', async () => { - createVM() - - jest.mocked(fetchEventsForInterval).mockResolvedValue([1, 2, 3]) - jest.mocked(vm.methods.exportEvents).mockRejectedValue(new RetryError('Retry error')) - - await exportHistoricalEvents({ ...defaultPayload, retriesPerformedSoFar: 5 }) - - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Failed processing events 0-3 from 2021-11-01T00:00:00.000Z to 2021-11-01T01:00:00.000Z.' - ), - }) - ) - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - retriesPerformedSoFar: 6, - }) - expect(runIn).toHaveBeenCalledWith(96, 'seconds') - }) - - it('schedules a retry with exponential backoff for fetchEventsForInterval RetryError', async () => { - createVM() - - jest.mocked(fetchEventsForInterval).mockRejectedValue(new RetryError('Retry error')) - - await exportHistoricalEvents({ ...defaultPayload, retriesPerformedSoFar: 5 }) - - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'Failed to fetch events from 2021-11-01T00:00:00.000Z to 2021-11-01T01:00:00.000Z.' - ), - }) - ) - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - retriesPerformedSoFar: 6, - }) - expect(runIn).toHaveBeenCalledWith(96, 'seconds') - }) - - it('stops processing date if an unknown error was raised in exportEvents', async () => { - createVM() - - jest.spyOn(global, 'clearInterval') - jest.mocked(fetchEventsForInterval).mockResolvedValue([1, 2, 3]) - jest.mocked(vm.methods.exportEvents).mockRejectedValue(new Error('Unknown error')) - - await exportHistoricalEvents(defaultPayload) - - expect(clearInterval).toHaveBeenCalledTimes(1) - expect(vm.meta.jobs.exportHistoricalEventsV2).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - 'exportEvents returned unknown error, stopping export. error=Error: Unknown error' - ), - }) - ) - expect(jest.mocked(hub.appMetrics.queueError).mock.calls).toMatchSnapshot() - - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - - it('stops processing after HISTORICAL_EXPORTS_MAX_RETRY_COUNT retries', async () => { - createVM() - - jest.mocked(fetchEventsForInterval).mockResolvedValue([1, 2, 3]) - jest.mocked(vm.methods.exportEvents).mockRejectedValue(new RetryError('Retry error')) - - await exportHistoricalEvents({ - ...defaultPayload, - retriesPerformedSoFar: hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT - 1, - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining( - `Exporting chunk 2021-11-01T00:00:00.000Z to 2021-11-01T05:00:00.000Z failed after ${hub.HISTORICAL_EXPORTS_MAX_RETRY_COUNT} retries. Stopping export.` - ), - }) - ) - expect(jest.mocked(hub.appMetrics.queueError).mock.calls).toMatchSnapshot() - - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - - it('does nothing if no export is running', async () => { - await storage().del(EXPORT_PARAMETERS_KEY) - - await exportHistoricalEvents(defaultPayload) - - expect(fetchEventsForInterval).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).not.toHaveBeenCalled() - }) - - it('stops export if abortMessage is set', async () => { - await storage().set(EXPORT_PARAMETERS_KEY, { ...exportParams, abortMessage: 'test ABORT' }) - - await exportHistoricalEvents(defaultPayload) - - expect(fetchEventsForInterval).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining('test ABORT'), - }) - ) - }) - - it('does nothing if a different export is running', async () => { - await exportHistoricalEvents({ ...defaultPayload, exportId: 779 }) - - expect(fetchEventsForInterval).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).not.toHaveBeenCalled() - }) - - describe('calling next time window', () => { - it('calls next time range if this range was empty', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue([]) - - await exportHistoricalEvents(defaultPayload) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - offset: 0, - fetchTimeInterval: - defaultPayload.fetchTimeInterval * hub.HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER, - }) - }) - - it('calls next time range if this range had some events', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue(new Array(400)) - - await exportHistoricalEvents(defaultPayload) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - offset: 0, - fetchTimeInterval: defaultPayload.fetchTimeInterval, - }) - }) - - it('increases offset if this range had full page of events', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue(new Array(500)) - - await exportHistoricalEvents(defaultPayload) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - timestampCursor: defaultPayload.timestampCursor, - offset: 500, - }) - }) - - it('resets `retriesPerformedSoFar` and `offset` when page increases', async () => { - jest.mocked(fetchEventsForInterval).mockResolvedValue(new Array(300)) - - await exportHistoricalEvents({ - ...defaultPayload, - offset: 1000, - retriesPerformedSoFar: 10, - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - ...defaultPayload, - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - offset: 0, - retriesPerformedSoFar: 0, - }) - }) - }) - }) - - describe('coordinateHistoricalExport()', () => { - const coordinateHistoricalExport = getTestMethod('coordinateHistoricalExport') - - beforeEach(async () => { - await resetTestDatabase() - }) - - it('does nothing if export isnt running / is done', async () => { - await coordinateHistoricalExport() - - expect(await storage().get(EXPORT_COORDINATION_KEY, null)).toEqual(null) - expect(hub.db.queuePluginLogEntry).not.toHaveBeenCalled() - }) - - describe('export is running', () => { - const params = { - id: 1, - parallelism: 3, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp, - } - - beforeEach(async () => { - await storage().set(EXPORT_PARAMETERS_KEY, params) - }) - - it('logs progress of the export and does not start excessive jobs', async () => { - await coordinateHistoricalExport({ - hasChanges: false, - exportIsDone: false, - progress: 0.7553, - done: [], - running: [], - toStartRunning: [], - toResume: [], - }) - - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledTimes(1) - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining('Export progress: ■■■■■■■■■■■■■■■□□□□□ (75.5%)'), - }) - ) - - expect(vm.meta.jobs.exportHistoricalEventsV2).not.toHaveBeenCalled() - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(params) - }) - - it('starts up new jobs and updates coordination data if needed', async () => { - await coordinateHistoricalExport({ - hasChanges: true, - exportIsDone: false, - progress: 0.7553, - done: [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[], - running: ['2021-11-01T00:00:00.000Z'] as ISOTimestamp[], - toStartRunning: [['2021-11-01T00:00:00.000Z', '2021-11-01T05:00:00.000Z']] as Array< - [ISOTimestamp, ISOTimestamp] - >, - toResume: [], - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith({ - endTime: 1635742800000, - exportId: 1, - fetchTimeInterval: hub.HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW, - offset: 0, - retriesPerformedSoFar: 0, - startTime: 1635724800000, - timestampCursor: 1635724800000, - statusKey: 'EXPORT_DATE_STATUS_2021-11-01T00:00:00.000Z', - }) - - expect(await storage().get('EXPORT_DATE_STATUS_2021-11-01T00:00:00.000Z', null)).toEqual( - expect.objectContaining({ - done: false, - progress: 0, - statusTime: Date.now(), - }) - ) - expect(await storage().get(EXPORT_COORDINATION_KEY, null)).toEqual({ - done: ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - running: ['2021-11-01T00:00:00.000Z'], - progress: 0.7553, - }) - }) - - it('resumes tasks and updates coordination if needed', async () => { - const toResumePayload = { - done: false, - progress: 0.5, - statusTime: 5_000_000_000, - endTime: 1635742800000, - exportId: 1, - fetchTimeInterval: hub.HISTORICAL_EXPORTS_INITIAL_FETCH_TIME_WINDOW, - offset: 0, - retriesPerformedSoFar: 0, - startTime: 1635724800000, - timestampCursor: 1635724800000, - statusKey: 'EXPORT_DATE_STATUS_2021-11-01T00:00:00.000Z', - } - - await coordinateHistoricalExport({ - hasChanges: true, - exportIsDone: false, - progress: 0.7553, - done: [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[], - running: ['2021-11-01T00:00:00.000Z'] as ISOTimestamp[], - toStartRunning: [], - toResume: [toResumePayload], - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).toHaveBeenCalledWith(toResumePayload) - expect(await storage().get('EXPORT_DATE_STATUS_2021-11-01T00:00:00.000Z', null)).toEqual( - expect.objectContaining({ - done: false, - progress: 0.5, - statusTime: Date.now(), - }) - ) - }) - - it('handles export being completed', async () => { - await coordinateHistoricalExport({ - hasChanges: false, - exportIsDone: true, - progress: 1, - done: [], - running: [], - toStartRunning: [], - toResume: [], - }) - - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining('Export has finished!'), - }) - ) - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - - it('stops export if abortMessage is set', async () => { - await storage().set(EXPORT_PARAMETERS_KEY, { ...params, abortMessage: 'test aborting' }) - - await coordinateHistoricalExport({ - hasChanges: true, - exportIsDone: false, - progress: 0.7553, - done: [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[], - running: ['2021-11-01T00:00:00.000Z'] as ISOTimestamp[], - toStartRunning: [['2021-11-01T00:00:00.000Z', '2021-11-01T05:00:00.000Z']] as Array< - [ISOTimestamp, ISOTimestamp] - >, - toResume: [], - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).not.toHaveBeenCalled() - expect(hub.db.queuePluginLogEntry).toHaveBeenCalledWith( - expect.objectContaining({ - message: expect.stringContaining('test aborting'), - }) - ) - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - - // verify second call also nothing happens - await coordinateHistoricalExport({ - hasChanges: true, - exportIsDone: false, - progress: 0.7553, - done: [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[], - running: ['2021-11-01T00:00:00.000Z'] as ISOTimestamp[], - toStartRunning: [['2021-11-01T00:00:00.000Z', '2021-11-01T05:00:00.000Z']] as Array< - [ISOTimestamp, ISOTimestamp] - >, - toResume: [], - }) - - expect(vm.meta.jobs.exportHistoricalEventsV2).not.toHaveBeenCalled() - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - }) - }) - - describe('calculateCoordination()', () => { - const calculateCoordination = getTestMethod('calculateCoordination') - - const params = { - id: 1, - parallelism: 3, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp, - } - - beforeEach(async () => { - await resetTestDatabase() - }) - - it('does nothing if enough tasks running', async () => { - const result = await calculateCoordination(params, [], [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[]) - - expect(result).toEqual({ - hasChanges: false, - done: [], - running: ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - toStartRunning: [], - toResume: [], - progress: 0, - exportIsDone: false, - }) - }) - - it('kicks off new tasks if theres room', async () => { - const result = await calculateCoordination(params, [], []) - - expect(result).toEqual({ - hasChanges: true, - done: [], - running: ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - toStartRunning: [ - ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z'], - ['2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - ['2021-10-31T00:00:00.000Z', '2021-11-01T00:00:00.000Z'], - ], - toResume: [], - progress: 0, - exportIsDone: false, - }) - }) - - it('marks running tasks as done and counts progress', async () => { - await storage().set('EXPORT_DATE_STATUS_2021-10-29T00:00:00.000Z', { - done: false, - progress: 0.5, - statusTime: Date.now() - 60_000, - }) - await storage().set('EXPORT_DATE_STATUS_2021-10-30T00:00:00.000Z', { - done: true, - progress: 1, - statusTime: Date.now() - 60_000, - }) - - const result = await calculateCoordination(params, [], [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[]) - - expect(result).toEqual({ - hasChanges: true, - done: ['2021-10-30T00:00:00.000Z'], - running: ['2021-10-29T00:00:00.000Z', '2021-10-31T00:00:00.000Z', '2021-11-01T00:00:00.000Z'], - toStartRunning: [['2021-11-01T00:00:00.000Z', '2021-11-01T05:00:00.000Z']], - toResume: [], - progress: 0.375, - exportIsDone: false, - }) - }) - - it('notifies if export is done after marking running tasks as done', async () => { - await storage().set('EXPORT_DATE_STATUS_2021-10-30T00:00:00.000Z', { - done: true, - progress: 1, - }) - - const result = await calculateCoordination( - params, - ['2021-10-29T00:00:00.000Z', '2021-10-31T00:00:00.000Z', '2021-11-01T00:00:00.000Z'] as ISOTimestamp[], - ['2021-10-30T00:00:00.000Z'] as ISOTimestamp[] - ) - - expect(result).toEqual({ - hasChanges: true, - done: expect.arrayContaining([ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - '2021-11-01T00:00:00.000Z', - ]), - running: [], - toStartRunning: [], - toResume: [], - progress: 1, - exportIsDone: true, - }) - }) - - it('resumes running task after a long enough of a delay', async () => { - const dateStatus = { - done: false, - progress: 0.5, - statusTime: Date.now() - 70 * 60 * 1000, - retriesPerformedSoFar: 0, - } - await storage().set('EXPORT_DATE_STATUS_2021-10-29T00:00:00.000Z', dateStatus) - - const result = await calculateCoordination(params, [], [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[]) - - expect(result).toEqual({ - hasChanges: true, - done: [], - running: ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - toStartRunning: [], - toResume: [dateStatus], - progress: 0.125, - exportIsDone: false, - }) - }) - - it('does not resume tasks that are done', async () => { - const dateStatus = { - done: true, - progress: 1, - statusTime: Date.now() - 70 * 60 * 1000, - retriesPerformedSoFar: 0, - } - await storage().set('EXPORT_DATE_STATUS_2021-10-29T00:00:00.000Z', dateStatus) - - const result = await calculateCoordination(params, [], [ - '2021-10-29T00:00:00.000Z', - '2021-10-30T00:00:00.000Z', - '2021-10-31T00:00:00.000Z', - ] as ISOTimestamp[]) - - expect(result).toEqual({ - hasChanges: true, - done: ['2021-10-29T00:00:00.000Z'], - running: ['2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z', '2021-11-01T00:00:00.000Z'], - toStartRunning: [['2021-11-01T00:00:00.000Z', '2021-11-01T05:00:00.000Z']], - toResume: [], - progress: 0.25, - exportIsDone: false, - }) - }) - }) - - describe('nextCursor()', () => { - const nextCursor = getTestMethod('nextCursor') - - const defaultPayload: ExportHistoricalEventsJobPayload = { - timestampCursor: 0, - startTime: 0, - endTime: 1_000_000_000, - offset: 0, - retriesPerformedSoFar: 0, - exportId: 0, - fetchTimeInterval: ONE_HOUR, - statusKey: 'abc', - } - - it('increases only offset if more in current time range', () => { - expect(nextCursor(defaultPayload, EVENTS_PER_RUN_SMALL)).toEqual({ - timestampCursor: defaultPayload.timestampCursor, - fetchTimeInterval: ONE_HOUR, - offset: EVENTS_PER_RUN_SMALL, - }) - }) - it('increases only offset if more in current time range on a late page', () => { - expect(nextCursor({ ...defaultPayload, offset: 5 * EVENTS_PER_RUN_SMALL }, EVENTS_PER_RUN_SMALL)).toEqual({ - timestampCursor: defaultPayload.timestampCursor, - fetchTimeInterval: ONE_HOUR, - offset: 6 * EVENTS_PER_RUN_SMALL, - }) - }) - - it('returns existing fetchTimeInterval if time range mostly full', () => { - expect(nextCursor(defaultPayload, EVENTS_PER_RUN_SMALL * 0.9)).toEqual({ - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - fetchTimeInterval: ONE_HOUR, - offset: 0, - }) - }) - - it('increases fetchTimeInterval if time range mostly empty', () => { - expect(nextCursor(defaultPayload, EVENTS_PER_RUN_SMALL * 0.1)).toEqual({ - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - fetchTimeInterval: ONE_HOUR * hub.HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER, - offset: 0, - }) - }) - - it('does not increase fetchTimeInterval beyond 12 hours', () => { - const payload = { - ...defaultPayload, - fetchTimeInterval: 11.5 * 60 * 60 * 1000, // 11.5 hours - } - expect(nextCursor(payload, EVENTS_PER_RUN_SMALL * 0.1)).toEqual({ - timestampCursor: payload.timestampCursor + payload.fetchTimeInterval, - fetchTimeInterval: 12 * 60 * 60 * 1000, - offset: 0, - }) - }) - - it('decreases fetchTimeInterval if on a late page and no more to fetch', () => { - expect(nextCursor({ ...defaultPayload, offset: 5 * EVENTS_PER_RUN_SMALL }, 10)).toEqual({ - timestampCursor: defaultPayload.timestampCursor + defaultPayload.fetchTimeInterval, - fetchTimeInterval: ONE_HOUR / hub.HISTORICAL_EXPORTS_FETCH_WINDOW_MULTIPLIER, - offset: 0, - }) - }) - - it('does not decrease fetchTimeInterval below 10 minutes', () => { - const payload = { - ...defaultPayload, - offset: 5 * EVENTS_PER_RUN_SMALL, - fetchTimeInterval: 10.5 * 60 * 1000, // 10.5 minutes - } - - expect(nextCursor(payload, 10)).toEqual({ - timestampCursor: payload.timestampCursor + payload.fetchTimeInterval, - fetchTimeInterval: 10 * 60 * 1000, - offset: 0, - }) - }) - - it('reduces fetchTimeInterval if it would result going beyond endTime', () => { - const payload = { - ...defaultPayload, - endTime: 6_500_000, - timestampCursor: 5_000_000, - fetchTimeInterval: 1_000_000, - offset: 0, - } - - expect(nextCursor(payload, 10)).toEqual({ - timestampCursor: 6_000_000, - fetchTimeInterval: 500_000, - offset: 0, - }) - }) - - it('make sure to use a larger batch size if the plugin recommends it', () => { - // NOTE: this doesn't actually check that this value is used in the - // requests to ClickHouse, but :fingercrossed: it's good enough. - createVM() - - // When no settings are returned, the default small batch size is used - let eventsPerRun = addHistoricalEventsExportCapabilityV2( - hub, - { plugin: { name: 'S3 Export Plugin' } } as any, - vm - ).eventsPerRun - expect(eventsPerRun).toEqual(500) - - // Set the handlesLargeBatches flag to true and expect a big batch size - vm.methods.getSettings = jest.fn().mockReturnValue({ - handlesLargeBatches: true, - }) - eventsPerRun = addHistoricalEventsExportCapabilityV2( - hub, - { plugin: { name: 'S3 Export Plugin' } } as any, - vm - ).eventsPerRun - expect(eventsPerRun).toEqual(10000) - - // Keep the default of 500 if the flag is false - vm.methods.getSettings = jest.fn().mockReturnValue({ - handlesLargeBatches: false, - }) - eventsPerRun = addHistoricalEventsExportCapabilityV2( - hub, - { plugin: { name: 'foo' } } as any, - vm - ).eventsPerRun - expect(eventsPerRun).toEqual(500) - }) - }) - - describe('getTimestampBoundaries()', () => { - const getTimestampBoundaries = getTestMethod('getTimestampBoundaries') - - it('returns timestamp boundaries passed into interface job, increasing the end date by a day', () => { - expect( - getTimestampBoundaries({ - dateRange: ['2021-10-29', '2021-11-30'], - }) - ).toEqual(['2021-10-29T00:00:00.000Z', '2021-12-01T00:00:00.000Z']) - }) - - it('raises an error for invalid timestamp formats', () => { - expect(() => - getTimestampBoundaries({ - dateRange: ['foo', 'bar'], - }) - ).toThrow("'dateRange' should be two dates in ISO string format.") - }) - }) - - describe('getExportDateRange()', () => { - const getExportDateRange = getTestMethod('getExportDateRange') - - it('returns values in range from start of the date', () => { - expect( - getExportDateRange({ - id: 1, - parallelism: 1, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - }) - ).toEqual([]) - - expect( - getExportDateRange({ - id: 1, - parallelism: 1, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-02T00:00:00.000Z' as ISOTimestamp, - }) - ).toEqual([ - ['2021-10-29T00:00:00.000Z', '2021-10-30T00:00:00.000Z'], - ['2021-10-30T00:00:00.000Z', '2021-10-31T00:00:00.000Z'], - ['2021-10-31T00:00:00.000Z', '2021-11-01T00:00:00.000Z'], - ['2021-11-01T00:00:00.000Z', '2021-11-02T00:00:00.000Z'], - ]) - }) - - it('handles partial-day ranges gracefully', () => { - expect( - getExportDateRange({ - id: 1, - parallelism: 1, - dateFrom: '2021-10-29T01:00:00.000Z' as ISOTimestamp, - dateTo: '2021-10-30T05:55:00.000Z' as ISOTimestamp, - }) - ).toEqual([ - ['2021-10-29T01:00:00.000Z', '2021-10-30T00:00:00.000Z'], - ['2021-10-30T00:00:00.000Z', '2021-10-30T05:55:00.000Z'], - ]) - }) - }) - - describe('progressBar()', () => { - const progressBar = getTestMethod('progressBar') - - it('calculates progress correctly', () => { - expect(progressBar(0)).toEqual('□□□□□□□□□□□□□□□□□□□□') - expect(progressBar(1)).toEqual('■■■■■■■■■■■■■■■■■■■■') - expect(progressBar(0.5)).toEqual('■■■■■■■■■■□□□□□□□□□□') - expect(progressBar(0.7)).toEqual('■■■■■■■■■■■■■■□□□□□□') - expect(progressBar(0.12)).toEqual('■■□□□□□□□□□□□□□□□□□□') - expect(progressBar(0.12, 10)).toEqual('■□□□□□□□□□') - }) - }) - - describe('stopExport()', () => { - const stopExport = getTestMethod('stopExport') - - const params = { - id: 1, - parallelism: 3, - dateFrom: '2021-10-29T00:00:00.000Z' as ISOTimestamp, - dateTo: '2021-11-01T05:00:00.000Z' as ISOTimestamp, - } - - it('unsets EXPORT_PARAMETERS_KEY', async () => { - await storage().set(EXPORT_PARAMETERS_KEY, params) - - await stopExport(params, '', 'success') - - expect(await storage().get(EXPORT_PARAMETERS_KEY, null)).toEqual(null) - }) - - it('captures activity for export success', async () => { - await stopExport(params, '', 'success') - - expect(createPluginActivityLog).toHaveBeenCalledWith( - hub, - pluginConfig39.team_id, - pluginConfig39.id, - 'export_success', - { - trigger: { - job_id: '1', - job_type: INTERFACE_JOB_NAME, - payload: params, - }, - } - ) - }) - - it('captures activity for export failure', async () => { - await stopExport(params, 'Some error message', 'fail') - - expect(createPluginActivityLog).toHaveBeenCalledWith( - hub, - pluginConfig39.team_id, - pluginConfig39.id, - 'export_fail', - { - trigger: { - job_id: '1', - job_type: INTERFACE_JOB_NAME, - payload: { - ...params, - failure_reason: 'Some error message', - }, - }, - } - ) - }) - }) - - describe('shouldResume()', () => { - const shouldResume = getTestMethod('shouldResume') - - it('resumes task when a bit over 10 minutes have passed', () => { - const status = { - statusTime: 10_000_000_000, - retriesPerformedSoFar: 0, - } as any - - expect(shouldResume(status, 10_000_000_000)).toEqual(false) - expect(shouldResume(status, 9_000_000_000)).toEqual(false) - expect(shouldResume(status, 10_000_060_000)).toEqual(false) - expect(shouldResume(status, 10_000_590_000)).toEqual(false) - expect(shouldResume(status, 10_000_600_000)).toEqual(false) - expect(shouldResume(status, 10_003_660_000)).toEqual(true) - }) - - it('accounts for retries exponential backoff', () => { - const status = { - statusTime: 10_000_000_000, - retriesPerformedSoFar: 10, - } as any - - expect(shouldResume(status, 10_000_660_000)).toEqual(false) - // Roughly 2**11*3 seconds are waited between retry 10 and 11 - expect(shouldResume(status, 10_006_000_000)).toEqual(false) - expect(shouldResume(status, 10_006_200_000)).toEqual(false) - }) - }) - - describe('updating public jobs', () => { - beforeEach(() => { - jest.spyOn(hub.db, 'addOrUpdatePublicJob') - }) - - it('updates when public job has not been yet registered', () => { - const pluginConfig: PluginConfig = { - ...pluginConfig39, - plugin: { - ...plugin60, - public_jobs: {}, - }, - } - createVM(pluginConfig) - - expect(hub.db.addOrUpdatePublicJob).toHaveBeenCalledWith( - pluginConfig39.plugin_id, - INTERFACE_JOB_NAME, - JOB_SPEC - ) - }) - - it('updates when public job definition has changed', () => { - const pluginConfig: PluginConfig = { - ...pluginConfig39, - plugin: { - ...plugin60, - public_jobs: { [INTERFACE_JOB_NAME]: { payload: {} } }, - }, - } - createVM(pluginConfig) - - expect(hub.db.addOrUpdatePublicJob).toHaveBeenCalledWith( - pluginConfig39.plugin_id, - INTERFACE_JOB_NAME, - JOB_SPEC - ) - }) - - it('does not update if public job has already been registered', () => { - const pluginConfig: PluginConfig = { - ...pluginConfig39, - plugin: { - ...plugin60, - public_jobs: { [INTERFACE_JOB_NAME]: JOB_SPEC }, - }, - } - createVM(pluginConfig) - - expect(hub.db.addOrUpdatePublicJob).not.toHaveBeenCalled() - }) - }) - - describe('tasks.schedule.runEveryMinute()', () => { - it('sets __ignoreForAppMetrics if runEveryMinute was not previously defined', async () => { - createVM() - - expect(vm.tasks.schedule.runEveryMinute).toEqual({ - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: expect.any(Function), - __ignoreForAppMetrics: true, - }) - - await vm.tasks.schedule.runEveryMinute.exec() - }) - - it('calls original method and does not set __ignoreForAppMetrics if runEveryMinute was previously defined in plugin', async () => { - const pluginRunEveryMinute = jest.fn() - - createVM(pluginConfig39, { - runEveryMinute: { - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: pluginRunEveryMinute, - }, - }) - - expect(vm.tasks.schedule.runEveryMinute).toEqual({ - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: expect.any(Function), - __ignoreForAppMetrics: false, - }) - - await vm.tasks.schedule.runEveryMinute.exec() - - expect(pluginRunEveryMinute).toHaveBeenCalled() - }) - - it('calls original method and sets __ignoreForAppMetrics if runEveryMinute was previously also wrapped', async () => { - const pluginRunEveryMinute = jest.fn() - - createVM(pluginConfig39, { - runEveryMinute: { - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: pluginRunEveryMinute, - __ignoreForAppMetrics: true, - }, - }) - - expect(vm.tasks.schedule.runEveryMinute).toEqual({ - name: 'runEveryMinute', - type: PluginTaskType.Schedule, - exec: expect.any(Function), - __ignoreForAppMetrics: true, - }) - - await vm.tasks.schedule.runEveryMinute.exec() - - expect(pluginRunEveryMinute).toHaveBeenCalled() - }) - }) -}) diff --git a/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events.test.ts b/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events.test.ts deleted file mode 100644 index 02fd27eab837f..0000000000000 --- a/plugin-server/tests/worker/vm/upgrades/historical-export/export-historical-events.test.ts +++ /dev/null @@ -1,127 +0,0 @@ -import { PluginMeta } from '@posthog/plugin-scaffold' -import deepmerge from 'deepmerge' - -import { Hub, PluginConfig, PluginConfigVMInternalResponse } from '../../../../../src/types' -import { createHub } from '../../../../../src/utils/db/hub' -import { createStorage } from '../../../../../src/worker/vm/extensions/storage' -import { createUtils } from '../../../../../src/worker/vm/extensions/utilities' -import { addHistoricalEventsExportCapability } from '../../../../../src/worker/vm/upgrades/historical-export/export-historical-events' -import { ExportHistoricalEventsUpgrade } from '../../../../../src/worker/vm/upgrades/utils/utils' -import { pluginConfig39 } from '../../../../helpers/plugins' - -jest.mock('../../../../../src/utils/status') - -describe('addHistoricalEventsExportCapability()', () => { - let hub: Hub - let closeHub: () => Promise - let _pluginConfig39: PluginConfig - - beforeEach(async () => { - ;[hub, closeHub] = await createHub() - - _pluginConfig39 = { ...pluginConfig39 } - }) - - afterEach(async () => { - await closeHub() - }) - - function addCapabilities(overrides?: any) { - const mockVM = deepmerge(overrides, { - methods: { - exportEvents: jest.fn(), - }, - tasks: { - schedule: {}, - job: {}, - }, - meta: { - storage: createStorage(hub, _pluginConfig39), - utils: createUtils(hub, _pluginConfig39.id), - jobs: { - exportHistoricalEvents: jest.fn().mockReturnValue(jest.fn()), - }, - global: {}, - }, - }) as unknown as PluginConfigVMInternalResponse> - - addHistoricalEventsExportCapability(hub, _pluginConfig39, mockVM) - - return mockVM - } - - it('adds new methods, scheduled tasks and jobs', () => { - const vm = addCapabilities() - - expect(Object.keys(vm.methods)).toEqual(['exportEvents', 'setupPlugin']) - expect(Object.keys(vm.tasks.schedule)).toEqual(['runEveryMinute']) - expect(Object.keys(vm.tasks.job)).toEqual(['exportHistoricalEvents', 'Export historical events']) - expect(Object.keys(vm.meta.global)).toEqual([ - 'exportHistoricalEvents', - 'initTimestampsAndCursor', - 'setTimestampBoundaries', - 'updateProgressBar', - ]) - }) - - it('registers public job spec theres not currently a spec', () => { - const addOrUpdatePublicJobSpy = jest.spyOn(hub.db, 'addOrUpdatePublicJob') - addCapabilities() - - expect(addOrUpdatePublicJobSpy).toHaveBeenCalledWith(60, 'Export historical events', { - payload: { - dateFrom: { required: true, title: 'Export start date', type: 'date' }, - dateTo: { required: true, title: 'Export end date', type: 'date' }, - }, - }) - }) - - it('updates plugin job spec if current spec is outdated', () => { - const addOrUpdatePublicJobSpy = jest.spyOn(hub.db, 'addOrUpdatePublicJob') - - _pluginConfig39.plugin = { - public_jobs: { - 'Export historical events': { payload: { foo: 'bar' } }, - }, - } as any - - addCapabilities() - - expect(addOrUpdatePublicJobSpy).toHaveBeenCalledWith(60, 'Export historical events', { - payload: { - dateFrom: { required: true, title: 'Export start date', type: 'date' }, - dateTo: { required: true, title: 'Export end date', type: 'date' }, - }, - }) - }) - - it('does not update plugin job spec if current spec matches stored spec', () => { - const addOrUpdatePublicJobSpy = jest.spyOn(hub.db, 'addOrUpdatePublicJob') - - _pluginConfig39.plugin = { - public_jobs: { - 'Export historical events': { - payload: { - dateFrom: { required: true, title: 'Export start date', type: 'date' }, - dateTo: { required: true, title: 'Export end date', type: 'date' }, - }, - }, - }, - } as any - - addCapabilities() - - expect(addOrUpdatePublicJobSpy).not.toHaveBeenCalled() - }) - - describe('setupPlugin()', () => { - it('calls original setupPlugin()', async () => { - const setupPlugin = jest.fn() - const vm = addCapabilities({ methods: { setupPlugin } }) - - await vm.methods.setupPlugin!() - - expect(setupPlugin).toHaveBeenCalled() - }) - }) -}) diff --git a/plugin-server/tests/worker/vm/upgrades/utils/fetchEventsForInterval.test.ts b/plugin-server/tests/worker/vm/upgrades/utils/fetchEventsForInterval.test.ts deleted file mode 100644 index 2c9ad2d8b486b..0000000000000 --- a/plugin-server/tests/worker/vm/upgrades/utils/fetchEventsForInterval.test.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { PluginEvent } from '@posthog/plugin-scaffold' - -import { Hub } from '../../../../../src/types' -import { createHub } from '../../../../../src/utils/db/hub' -import { UUIDT } from '../../../../../src/utils/utils' -import { EventPipelineRunner } from '../../../../../src/worker/ingestion/event-pipeline/runner' -import { fetchEventsForInterval } from '../../../../../src/worker/vm/upgrades/utils/fetchEventsForInterval' -import { HistoricalExportEvent } from '../../../../../src/worker/vm/upgrades/utils/utils' -import { delayUntilEventIngested, resetTestDatabaseClickhouse } from '../../../../helpers/clickhouse' -import { resetTestDatabase } from '../../../../helpers/sql' - -jest.mock('../../../../../src/utils/status') - -const THIRTY_MINUTES = 1000 * 60 * 30 - -describe('fetchEventsForInterval()', () => { - let hub: Hub - let closeServer: () => Promise - - beforeEach(async () => { - await resetTestDatabase() - await resetTestDatabaseClickhouse() - ;[hub, closeServer] = await createHub() - }) - - afterEach(async () => { - await closeServer() - }) - - async function ingestEvent(timestamp: string, overrides: Partial = {}) { - const pluginEvent: PluginEvent = { - event: 'some_event', - distinct_id: 'some_user', - site_url: '', - team_id: 2, - timestamp: timestamp, - now: timestamp, - ip: '', - uuid: new UUIDT().toString(), - ...overrides, - } as any as PluginEvent - - const runner = new EventPipelineRunner(hub, pluginEvent) - await runner.runEventPipeline(pluginEvent) - } - - function extract( - events: Array, - key: T - ): Array { - return events.map((event) => event[key]) - } - - it('fetches events and parses them', async () => { - // To avoid parallel person processing which we don't handle we're doing one first alone - await ingestEvent('2021-06-01T00:00:00.000Z') // too old - await Promise.all([ - ingestEvent('2021-09-01T00:00:00.000Z'), // too new - - ingestEvent('2021-08-01T00:01:00.000Z'), - ingestEvent('2021-08-01T00:02:00.000Z', { properties: { foo: 'bar' } }), - ingestEvent('2021-08-01T00:03:00.000Z'), - ingestEvent('2021-08-01T00:29:59.000Z'), - ingestEvent('2021-08-01T00:33:00.000Z'), - ]) - - await hub.kafkaProducer.flush() - await delayUntilEventIngested(() => hub.db.fetchEvents(), 7) - - const events = await fetchEventsForInterval( - hub.db, - 2, - new Date('2021-08-01T00:00:00.000Z'), - 0, - THIRTY_MINUTES, - 2 - ) - - expect(events.length).toEqual(2) - expect(extract(events, 'timestamp')).toEqual(['2021-08-01T00:01:00.000Z', '2021-08-01T00:02:00.000Z']) - expect(extract(events, 'properties')).toEqual([ - { - $$historical_export_source_db: 'clickhouse', - $$historical_export_timestamp: expect.any(String), - $$is_historical_export_event: true, - }, - { - $$historical_export_source_db: 'clickhouse', - $$historical_export_timestamp: expect.any(String), - $$is_historical_export_event: true, - foo: 'bar', - }, - ]) - - const offsetEvents = await fetchEventsForInterval( - hub.db, - 2, - new Date('2021-08-01T00:00:00.000Z'), - 2, - THIRTY_MINUTES, - 2 - ) - expect(offsetEvents.length).toEqual(2) - expect(extract(offsetEvents, 'timestamp')).toEqual(['2021-08-01T00:03:00.000Z', '2021-08-01T00:29:59.000Z']) - - const offsetEvents2 = await fetchEventsForInterval( - hub.db, - 2, - new Date('2021-08-01T00:00:00.000Z'), - 4, - THIRTY_MINUTES, - 2 - ) - expect(offsetEvents2.length).toEqual(0) - }) -}) diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py index 1ef831bde1b82..48339aa38bad9 100644 --- a/posthog/api/services/query.py +++ b/posthog/api/services/query.py @@ -5,6 +5,7 @@ from rest_framework.exceptions import ValidationError from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.constants import LimitContext from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.modifiers import create_default_modifiers_for_team @@ -54,7 +55,7 @@ def _unwrap_pydantic_dict(response: Any) -> Dict: def process_query( team: Team, query_json: Dict, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, refresh_requested: Optional[bool] = False, ) -> Dict: # query_json has been parsed by QuerySchemaParser @@ -63,10 +64,10 @@ def process_query( tag_queries(query=query_json) if query_kind in QUERY_WITH_RUNNER: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + query_runner = get_query_runner(query_json, team, limit_context=limit_context) return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) elif query_kind in QUERY_WITH_RUNNER_NO_CACHE: - query_runner = get_query_runner(query_json, team, in_export_context=in_export_context) + query_runner = get_query_runner(query_json, team, limit_context=limit_context) return _unwrap_pydantic_dict(query_runner.calculate()) elif query_kind == "HogQLMetadata": metadata_query = HogQLMetadata.model_validate(query_json) diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index ff03704605014..d538e5a241cdf 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -6,6 +6,7 @@ from rest_framework import status from posthog.api.services.query import process_query +from posthog.hogql.query import LimitContext from posthog.models.property_definition import PropertyDefinition, PropertyType from posthog.models.utils import UUIDT from posthog.schema import ( @@ -611,7 +612,7 @@ def test_full_hogql_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEFA "kind": "HogQLQuery", "query": f"select event from events where distinct_id='{random_uuid}'", }, - in_export_context=True, # This is the only difference + limit_context=LimitContext.EXPORT, # This is the only difference ) self.assertEqual(len(response.get("results", [])), 15) @@ -663,7 +664,7 @@ def test_full_events_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEF "select": ["event"], "where": [f"distinct_id = '{random_uuid}'"], }, - in_export_context=True, + limit_context=LimitContext.EXPORT, ) self.assertEqual(len(response.get("results", [])), 15) diff --git a/posthog/celery.py b/posthog/celery.py index 90037463e1358..d1804524760ac 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -402,7 +402,7 @@ def redis_heartbeat(): @app.task(ignore_result=True, bind=True) -def process_query_task(self, team_id, query_id, query_json, in_export_context=False, refresh_requested=False): +def process_query_task(self, team_id, query_id, query_json, limit_context=None, refresh_requested=False): """ Kick off query Once complete save results to redis @@ -413,7 +413,7 @@ def process_query_task(self, team_id, query_id, query_json, in_export_context=Fa team_id=team_id, query_id=query_id, query_json=query_json, - in_export_context=in_export_context, + limit_context=limit_context, refresh_requested=refresh_requested, ) diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 211c685a068b1..9be449596fdf0 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -9,6 +9,7 @@ from posthog import celery, redis from posthog.celery import process_query_task from posthog.clickhouse.query_tagging import tag_queries +from posthog.hogql.constants import LimitContext from posthog.schema import QueryStatus logger = structlog.get_logger(__name__) @@ -69,7 +70,7 @@ def execute_process_query( team_id, query_id, query_json, - in_export_context, + limit_context, refresh_requested, ): manager = QueryStatusManager(query_id, team_id) @@ -90,7 +91,7 @@ def execute_process_query( try: tag_queries(client_query_id=query_id, team_id=team_id) results = process_query( - team=team, query_json=query_json, in_export_context=in_export_context, refresh_requested=refresh_requested + team=team, query_json=query_json, limit_context=limit_context, refresh_requested=refresh_requested ) logger.info("Got results for team %s query %s", team_id, query_id) query_status.complete = True @@ -135,10 +136,12 @@ def enqueue_process_query_task( if bypass_celery: # Call directly ( for testing ) - process_query_task(team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested) + process_query_task( + team_id, query_id, query_json, limit_context=LimitContext.EXPORT, refresh_requested=refresh_requested + ) else: task = process_query_task.delay( - team_id, query_id, query_json, in_export_context=True, refresh_requested=refresh_requested + team_id, query_id, query_json, limit_context=LimitContext.EXPORT, refresh_requested=refresh_requested ) query_status.task_id = task.id manager.store_query_status(query_status) diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 0a2806ca99878..0e82558ec1628 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,4 +1,5 @@ from datetime import date, datetime +from enum import Enum from typing import Optional, Literal, TypeAlias, Tuple, List from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -32,6 +33,11 @@ MAX_SELECT_RETURNED_ROWS = 10000 # sync with CSV_EXPORT_LIMIT +class LimitContext(str, Enum): + QUERY = "query" + EXPORT = "export" + + # Settings applied at the SELECT level class HogQLQuerySettings(BaseModel): model_config = ConfigDict(extra="forbid") diff --git a/posthog/hogql/modifiers.py b/posthog/hogql/modifiers.py index 8884f197afcf6..fd49ba2bc270c 100644 --- a/posthog/hogql/modifiers.py +++ b/posthog/hogql/modifiers.py @@ -1,12 +1,14 @@ -from typing import Optional +from typing import Optional, TYPE_CHECKING -from posthog.models import Team from posthog.schema import HogQLQueryModifiers, MaterializationMode from posthog.utils import PersonOnEventsMode +if TYPE_CHECKING: + from posthog.models import Team + def create_default_modifiers_for_team( - team: Team, modifiers: Optional[HogQLQueryModifiers] = None + team: "Team", modifiers: Optional[HogQLQueryModifiers] = None ) -> HogQLQueryModifiers: if modifiers is None: modifiers = HogQLQueryModifiers() diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index 751b9fb46b860..8ca5f5b582ab1 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -3,7 +3,7 @@ from posthog.clickhouse.client.connection import Workload from posthog.errors import ExposedCHQueryError from posthog.hogql import ast -from posthog.hogql.constants import HogQLGlobalSettings +from posthog.hogql.constants import HogQLGlobalSettings, LimitContext from posthog.hogql.errors import HogQLException from posthog.hogql.hogql import HogQLContext from posthog.hogql.modifiers import create_default_modifiers_for_team @@ -34,7 +34,7 @@ def execute_hogql_query( workload: Workload = Workload.ONLINE, settings: Optional[HogQLGlobalSettings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, timings: Optional[HogQLTimings] = None, explain: Optional[bool] = False, ) -> HogQLQueryResponse: @@ -80,7 +80,7 @@ def execute_hogql_query( if one_query.limit is None: # One more "max" of MAX_SELECT_RETURNED_ROWS (10k) in applied in the query printer. one_query.limit = ast.Constant( - value=MAX_SELECT_RETURNED_ROWS if in_export_context else DEFAULT_RETURNED_ROWS + value=MAX_SELECT_RETURNED_ROWS if limit_context == LimitContext.EXPORT else DEFAULT_RETURNED_ROWS ) # Get printed HogQL query, and returned columns. Using a cloned query. @@ -122,7 +122,7 @@ def execute_hogql_query( ) settings = settings or HogQLGlobalSettings() - if in_export_context: + if limit_context == LimitContext.EXPORT: settings.max_execution_time = EXPORT_CONTEXT_MAX_EXECUTION_TIME # Print the ClickHouse SQL query diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index e7ec26a441ded..f9ee10c648f25 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -12,7 +12,7 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import action_to_expr, has_aggregation, property_to_expr -from posthog.hogql.query import execute_hogql_query +from posthog.hogql.query import execute_hogql_query, LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner from posthog.models import Action, Person @@ -187,7 +187,7 @@ def calculate(self) -> EventsQueryResponse: query_type="EventsQuery", timings=self.timings, modifiers=self.modifiers, - in_export_context=self.in_export_context, + limit_context=self.limit_context, ) # Convert star field from tuple to dict in each result @@ -265,7 +265,7 @@ def limit(self) -> int: return ( min( MAX_SELECT_RETURNED_ROWS, - (MAX_SELECT_RETURNED_ROWS if self.in_export_context else DEFAULT_RETURNED_ROWS) + (MAX_SELECT_RETURNED_ROWS if self.limit_context == LimitContext.EXPORT else DEFAULT_RETURNED_ROWS) if self.query.limit is None else self.query.limit, ) diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index a79e875d14a73..1a6bcc89c730c 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -49,7 +49,7 @@ def calculate(self) -> HogQLQueryResponse: team=self.team, workload=Workload.ONLINE, timings=self.timings, - in_export_context=self.in_export_context, + limit_context=self.limit_context, explain=bool(self.query.explain), ) diff --git a/posthog/hogql_queries/insights/insight_persons_query_runner.py b/posthog/hogql_queries/insights/insight_persons_query_runner.py index 51cf792346992..e0681bc5af08a 100644 --- a/posthog/hogql_queries/insights/insight_persons_query_runner.py +++ b/posthog/hogql_queries/insights/insight_persons_query_runner.py @@ -16,7 +16,7 @@ class InsightPersonsQueryRunner(QueryRunner): @cached_property def source_runner(self) -> QueryRunner: - return get_query_runner(self.query.source, self.team, self.timings, self.in_export_context) + return get_query_runner(self.query.source, self.team, self.timings, self.limit_context) def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: if isinstance(self.source_runner, LifecycleQueryRunner): diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 76f204c8a310f..eb450338a446a 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -51,9 +51,9 @@ def __init__( team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = None, + limit_context: Optional[bool] = None, ): - super().__init__(query, team=team, timings=timings, modifiers=modifiers, in_export_context=in_export_context) + super().__init__(query, team=team, timings=timings, modifiers=modifiers, limit_context=limit_context) self.series = self.setup_series() def _is_stale(self, cached_result_package): diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 1f2b0d43ad743..ed08a9fcbb34e 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -9,6 +9,7 @@ from posthog.clickhouse.query_tagging import tag_queries from posthog.hogql import ast +from posthog.hogql.constants import LimitContext from posthog.hogql.context import HogQLContext from posthog.hogql.printer import print_ast from posthog.hogql.query import create_default_modifiers_for_team @@ -88,7 +89,7 @@ def get_query_runner( query: Dict[str, Any] | RunnableQueryNode | BaseModel, team: Team, timings: Optional[HogQLTimings] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, modifiers: Optional[HogQLQueryModifiers] = None, ) -> "QueryRunner": kind = None @@ -106,7 +107,7 @@ def get_query_runner( query=cast(LifecycleQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "TrendsQuery": @@ -116,7 +117,7 @@ def get_query_runner( query=cast(TrendsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "EventsQuery": @@ -126,7 +127,7 @@ def get_query_runner( query=cast(EventsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "PersonsQuery": @@ -136,7 +137,7 @@ def get_query_runner( query=cast(PersonsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "InsightPersonsQuery": @@ -146,7 +147,7 @@ def get_query_runner( query=cast(InsightPersonsQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "HogQLQuery": @@ -156,7 +157,7 @@ def get_query_runner( query=cast(HogQLQuery | Dict[str, Any], query), team=team, timings=timings, - in_export_context=in_export_context, + limit_context=limit_context, modifiers=modifiers, ) if kind == "SessionsTimelineQuery": @@ -190,7 +191,7 @@ class QueryRunner(ABC): team: Team timings: HogQLTimings modifiers: HogQLQueryModifiers - in_export_context: bool + limit_context: LimitContext def __init__( self, @@ -198,11 +199,11 @@ def __init__( team: Team, timings: Optional[HogQLTimings] = None, modifiers: Optional[HogQLQueryModifiers] = None, - in_export_context: Optional[bool] = False, + limit_context: Optional[LimitContext] = None, ): self.team = team self.timings = timings or HogQLTimings() - self.in_export_context = in_export_context or False + self.limit_context = limit_context or LimitContext.QUERY self.modifiers = create_default_modifiers_for_team(team, modifiers) if isinstance(query, self.query_type): self.query = query # type: ignore @@ -216,7 +217,7 @@ def calculate(self) -> BaseModel: raise NotImplementedError() def run(self, refresh_requested: Optional[bool] = None) -> CachedQueryResponse: - cache_key = self._cache_key() + ("_export" if self.in_export_context else "") + cache_key = self._cache_key() + ("_export" if self.limit_context == LimitContext.EXPORT else "") tag_queries(cache_key=cache_key) if not refresh_requested: diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 8f6fffd0c9f90..c0dc99ff436fc 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -19,6 +19,7 @@ EXPORT_TIMER, ) from ...constants import CSV_EXPORT_LIMIT +from ...hogql.query import LimitContext logger = structlog.get_logger(__name__) @@ -184,7 +185,7 @@ def _export_to_csv(exported_asset: ExportedAsset, limit: int = 1000) -> None: if resource.get("source"): query = resource.get("source") - query_response = process_query(team=exported_asset.team, query_json=query, in_export_context=True) + query_response = process_query(team=exported_asset.team, query_json=query, limit_context=LimitContext.EXPORT) all_csv_rows = _convert_response_to_csv_data(query_response) else: diff --git a/posthog/urls.py b/posthog/urls.py index c271406c73469..343e699bb8eaa 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -54,11 +54,17 @@ ) from .year_in_posthog import year_in_posthog +import structlog + +logger = structlog.get_logger(__name__) + ee_urlpatterns: List[Any] = [] try: from ee.urls import extend_api_router from ee.urls import urlpatterns as ee_urlpatterns except ImportError: + if settings.DEBUG: + logger.warn(f"Could not import ee.urls", exc_info=True) pass else: extend_api_router(