diff --git a/.envrc b/.envrc new file mode 100644 index 0000000000000..379f0549498a3 --- /dev/null +++ b/.envrc @@ -0,0 +1,3 @@ +if [ -z "${FLOX_VERSION}" ]; then # Don't activate if already activated + flox activate +fi diff --git a/.flox/.gitignore b/.flox/.gitignore new file mode 100644 index 0000000000000..15d71a169ff75 --- /dev/null +++ b/.flox/.gitignore @@ -0,0 +1,4 @@ +run/ +cache/ +lib/ +log/ diff --git a/.flox/env.json b/.flox/env.json new file mode 100644 index 0000000000000..e913909c9bf54 --- /dev/null +++ b/.flox/env.json @@ -0,0 +1,4 @@ +{ + "name": "posthog", + "version": 1 +} diff --git a/.flox/env/direnv-setup.sh b/.flox/env/direnv-setup.sh new file mode 100755 index 0000000000000..705ab57b076f1 --- /dev/null +++ b/.flox/env/direnv-setup.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +if ! command -v direnv &> /dev/null; then + # Install direnv based on platform + if command -v brew &> /dev/null; then + echo "šŸ”„ Installing direnv using 'brew install direnv'..." + HOMEBREW_NO_ENV_HINTS=1 brew install -q direnv + elif command -v apt &> /dev/null; then + echo "šŸ”„ Installing direnv using 'apt install direnv'..." + sudo apt update && sudo apt install -yq direnv + elif command -v dnf &> /dev/null; then + echo "šŸ”„ Installing direnv using 'dnf install direnv'..." + sudo dnf install -yq direnv + else + echo "šŸ”„ Installing direnv using 'curl -sfL https://direnv.net/install.sh | bash'" + curl -sfL https://direnv.net/install.sh | bash + fi + echo "āœ… Installed direnv" +else + echo "ā© direnv already installed" +fi + +# Determine shell and config file +shell_name=$(basename "$SHELL") +case "$shell_name" in + "bash") + config_file="$HOME/.bashrc" + hook_command='eval "$(direnv hook bash)"' + ;; + "zsh") + config_file="$HOME/.zshrc" + hook_command='eval "$(direnv hook zsh)"' + ;; + "fish") + config_file="$HOME/.config/fish/config.fish" + hook_command='direnv hook fish | source' + mkdir -p "$(dirname "$config_file")" + ;; + "tcsh") + config_file="$HOME/.cshrc" + hook_command='eval `direnv hook tcsh`' + ;; + *) + echo "Unsupported shell: $shell_name" + return 1 + ;; +esac + +echo "šŸš Configuring your default shell, $SHELL, for direnv" + +# Add hook to shell config if not already present +if ! grep -q "direnv hook" "$config_file" 2>/dev/null; then + echo -e "\n# Initialize direnv - added by PostHog's Flox activation hook (../posthog/.flox/env/manifest.toml)\n$hook_command" >> "$config_file" + echo "āœ… Injected direnv hook into $config_file" +else + echo "ā© direnv hook already present in $config_file" +fi + +# Add hook to shell config if not already present +if ! grep -q "warn_timeout" "$HOME/.config/direnv/direnv.toml" 2>/dev/null; then + echo "[global]\nwarn_timeout = 0 # Ignore timeout from this issue: https://github.com/direnv/direnv/issues/1065 - added by PostHog's Flox activation hook (../posthog/.flox/env/manifest.toml)" >> "$HOME/.config/direnv/direnv.toml" + echo "āœ… Configured ~/.config/direnv/direnv.toml" +else + echo "ā© ~/.config/direnv/direnv.toml already configured" +fi + +echo "šŸ’« direnv is now active" + +# Allow this directory's .envrc to be loaded +direnv allow diff --git a/.flox/env/manifest.lock b/.flox/env/manifest.lock new file mode 100644 index 0000000000000..1a6702487f450 --- /dev/null +++ b/.flox/env/manifest.lock @@ -0,0 +1,1833 @@ +{ + "lockfile-version": 1, + "manifest": { + "version": 1, + "install": { + "brotli": { + "pkg-path": "brotli", + "pkg-group": "nodejs" + }, + "cargo": { + "pkg-path": "cargo", + "pkg-group": "rust-toolchain", + "version": "1.80.1" + }, + "clippy": { + "pkg-path": "clippy", + "pkg-group": "rust-toolchain" + }, + "corepack": { + "pkg-path": "corepack", + "pkg-group": "nodejs" + }, + "go": { + "pkg-path": "go", + "pkg-group": "go", + "version": "1.22" + }, + "libiconv": { + "pkg-path": "libiconv", + "pkg-group": "rust-toolchain", + "systems": [ + "aarch64-darwin" + ] + }, + "libtool": { + "pkg-path": "libtool", + "pkg-group": "python" + }, + "mprocs": { + "pkg-path": "mprocs" + }, + "nodejs": { + "pkg-path": "nodejs_18", + "pkg-group": "nodejs" + }, + "python3": { + "pkg-path": "python3", + "pkg-group": "python", + "version": "3.11" + }, + "rust-lib-src": { + "pkg-path": "rustPlatform.rustLibSrc", + "pkg-group": "rust-toolchain" + }, + "rustc": { + "pkg-path": "rustc", + "pkg-group": "rust-toolchain" + }, + "rustfmt": { + "pkg-path": "rustfmt", + "pkg-group": "rust-toolchain" + }, + "uv": { + "pkg-path": "uv", + "pkg-group": "python" + }, + "xmlsec": { + "pkg-path": "xmlsec", + "pkg-group": "python", + "version": "1.2.34" + } + }, + "vars": { + "DEBUG": "1", + "DIRENV_LOG_FORMAT": "", + "POSTHOG_SKIP_MIGRATION_CHECKS": "1" + }, + "hook": { + "on-activate": "# Guide through installing and configuring direnv if it's not present (optionally)\nif [[ -t 0 ]] && [ ! -d \"$DIRENV_DIR\" ] && [ ! -f \"$FLOX_ENV_CACHE/.hush-direnv\" ]; then\n read -p \"\nšŸ‘‰ Use direnv (https://direnv.net) for automatic activation of this environment by your shell.\nā“ Would you like direnv to be set up now? (y/n, default: y)\" -n 1 -r\n if [[ $REPLY =~ ^[Yy]$ || -z $REPLY ]]; then\n $FLOX_ENV_CACHE/../env/direnv-setup.sh\n else\n echo \"ā­ļø Skipping direnv setup. This message will not be shown again, but if you change your mind, just run '.flox/bin/direnv-setup.sh'\"\n touch $FLOX_ENV_CACHE/.hush-direnv\n fi\n echo\nfi\n\n# Set up a Python virtual environment\nexport PYTHON_DIR=\"$FLOX_ENV_CACHE/venv\"\nif [ ! -d \"$PYTHON_DIR\" ]; then\n uv venv \"$PYTHON_DIR\" --allow-existing\nfi\n\necho -e \"Python virtual environment path: \\033[33m.flox/cache/venv\\033[0m\"\necho -e \"Python interpreter path, for your code editor: \\033[33m.flox/cache/venv/bin/python\\033[0m\"\n\nsource \"$PYTHON_DIR/bin/activate\"\n\n# Install Python dependencies (this is practically instant thanks to uv)\nuv pip install -q -r requirements.txt -r requirements-dev.txt\n\n# Install top-level Node dependencies (only if not present all yet, because this takes almost a second even with pnpm)\n# This also sets up pre-commit hooks via Husky\nif [ ! -d \"node_modules\" ]; then\n pnpm install -s\nfi\n\nif [[ -t 0 ]]; then # The block below only runs when in an interactive shell\n # Add required entries to /etc/hosts if not present\n if ! grep -q \"127.0.0.1 kafka clickhouse\" /etc/hosts; then\n echo\n echo \"šŸšØ Amending /etc/hosts to map hostnames 'kafka' and 'clickhouse' to 127.0.0.1...\"\n echo \"127.0.0.1 kafka clickhouse\" | sudo tee -a /etc/hosts 1> /dev/null\n echo \"āœ… /etc/hosts amended\"\n fi\n\n # Print intro message\n echo -e \"\nIT'S DANGEROUS TO GO ALONE! RUN THIS:\n1. \\033[31mbin/migrate\\033[0m - to run all migrations\n2. \\033[32mbin/start\\033[0m - to start the entire stack\n3. \\033[34m./manage.py generate_demo_data\\033[0m - to create a user with demo data\n\"\nfi\n" + }, + "profile": { + "bash": " source \"$PYTHON_DIR/bin/activate\"\n", + "zsh": " source \"$PYTHON_DIR/bin/activate\"\n", + "fish": " source \"$PYTHON_DIR/bin/activate.fish\"\n", + "tcsh": " source \"$PYTHON_DIR/bin/activate.csh\"\n" + }, + "options": { + "systems": [ + "aarch64-darwin", + "aarch64-linux", + "x86_64-darwin", + "x86_64-linux" + ], + "allow": { + "licenses": [] + }, + "semver": {} + } + }, + "packages": [ + { + "attr_path": "go", + "broken": false, + "derivation": "/nix/store/x1p2gf1jcqn29l4agdqrb0w09rib7w3m-go-1.22.7.drv", + "description": "Go Programming language", + "install_id": "go", + "license": "BSD-3-Clause", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "go-1.22.7", + "pname": "go", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.22.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/rfcwglhhspqx5v5h0sl4b3py14i6vpxa-go-1.22.7" + }, + "system": "aarch64-darwin", + "group": "go", + "priority": 5 + }, + { + "attr_path": "go", + "broken": false, + "derivation": "/nix/store/qk95aryv3n1mhmk0lxf55sg9yr0l6138-go-1.22.7.drv", + "description": "Go Programming language", + "install_id": "go", + "license": "BSD-3-Clause", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "go-1.22.7", + "pname": "go", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.22.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/64z59pb0ss407rbv1fcvq0ynngrwfa6k-go-1.22.7" + }, + "system": "aarch64-linux", + "group": "go", + "priority": 5 + }, + { + "attr_path": "go", + "broken": false, + "derivation": "/nix/store/4bjrs6k4a0xjyy5zanbc8igv0cffsi0a-go-1.22.7.drv", + "description": "Go Programming language", + "install_id": "go", + "license": "BSD-3-Clause", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "go-1.22.7", + "pname": "go", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.22.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/r8199g59rmp6ac0lnx86fpk57fbxc3bk-go-1.22.7" + }, + "system": "x86_64-darwin", + "group": "go", + "priority": 5 + }, + { + "attr_path": "go", + "broken": false, + "derivation": "/nix/store/gp0ma9f4n4nxmgbgl1g65kvlf05cl22y-go-1.22.7.drv", + "description": "Go Programming language", + "install_id": "go", + "license": "BSD-3-Clause", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "go-1.22.7", + "pname": "go", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.22.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/chzgk756zb2cqlzbjr86m0lfxi63cdfy-go-1.22.7" + }, + "system": "x86_64-linux", + "group": "go", + "priority": 5 + }, + { + "attr_path": "brotli", + "broken": false, + "derivation": "/nix/store/6cwflld1b78lmqr3mj7c8jgysbfq32qk-brotli-1.1.0.drv", + "description": "Generic-purpose lossless compression algorithm and tool", + "install_id": "brotli", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "brotli-1.1.0", + "pname": "brotli", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "1.1.0", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/x9x8kz1dm5mlk9shcs1d802ynj6w61y2-brotli-1.1.0-dev", + "lib": "/nix/store/r7fvblydd9smajp3asaqhm6jvqc38qmb-brotli-1.1.0-lib", + "out": "/nix/store/ns2pn7hja7k1rmviv52zrf2xbb0xx1wn-brotli-1.1.0" + }, + "system": "aarch64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "brotli", + "broken": false, + "derivation": "/nix/store/4p76dm63fspxbwhng9mhkr9qh9y449hs-brotli-1.1.0.drv", + "description": "Generic-purpose lossless compression algorithm and tool", + "install_id": "brotli", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "brotli-1.1.0", + "pname": "brotli", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "1.1.0", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/j3am2m4jn6pkiv0bm2r45z51243qf45x-brotli-1.1.0-dev", + "lib": "/nix/store/0w6nm7j7w5b27gxmb6kfang0n69ybx94-brotli-1.1.0-lib", + "out": "/nix/store/xq716j0dkf2pwxvvl426ga1vxsmyjgbj-brotli-1.1.0" + }, + "system": "aarch64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "brotli", + "broken": false, + "derivation": "/nix/store/p3y1bh5164jlw6gy45037wnrd7rzdncx-brotli-1.1.0.drv", + "description": "Generic-purpose lossless compression algorithm and tool", + "install_id": "brotli", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "brotli-1.1.0", + "pname": "brotli", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "1.1.0", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/hxz478davwb39pcs9p40chhmj6igh5hn-brotli-1.1.0-dev", + "lib": "/nix/store/v8zhc6am8gqbc16w6ik20yj9wbpn015i-brotli-1.1.0-lib", + "out": "/nix/store/azlw8ri487sa8rmwg86cdysf3j8xh1c5-brotli-1.1.0" + }, + "system": "x86_64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "brotli", + "broken": false, + "derivation": "/nix/store/rbwgzdy86dfwp0nlvqy7k39n61486979-brotli-1.1.0.drv", + "description": "Generic-purpose lossless compression algorithm and tool", + "install_id": "brotli", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "brotli-1.1.0", + "pname": "brotli", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "1.1.0", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/m69rxkn1154drqhcbnqjr6i7xbar4yb4-brotli-1.1.0-dev", + "lib": "/nix/store/2vgwd43vqxm66grkgsy7z1d87z31nzph-brotli-1.1.0-lib", + "out": "/nix/store/2ww03dm6fbyp76fp0kv1dv9cxd39fiis-brotli-1.1.0" + }, + "system": "x86_64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "corepack", + "broken": false, + "derivation": "/nix/store/fjjlk4g8ybfl2lh9kdi1j3k52rssif0z-corepack-nodejs-20.18.1.drv", + "description": "Wrappers for npm, pnpm and Yarn via Node.js Corepack", + "install_id": "corepack", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "corepack-nodejs-20.18.1", + "pname": "corepack", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-20.18.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/xb7rzh9a99dwidaaks5dhkbq3g5qx2fi-corepack-nodejs-20.18.1" + }, + "system": "aarch64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "corepack", + "broken": false, + "derivation": "/nix/store/39wdrzfmghz3j4jr6sh6z4ndhfzq6kmg-corepack-nodejs-20.18.1.drv", + "description": "Wrappers for npm, pnpm and Yarn via Node.js Corepack", + "install_id": "corepack", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "corepack-nodejs-20.18.1", + "pname": "corepack", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-20.18.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/64m1bmya4yvz2pfpa4li800m8caxawwy-corepack-nodejs-20.18.1" + }, + "system": "aarch64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "corepack", + "broken": false, + "derivation": "/nix/store/0nxs6aij505x6c8lr747gfmfryv5gh03-corepack-nodejs-20.18.1.drv", + "description": "Wrappers for npm, pnpm and Yarn via Node.js Corepack", + "install_id": "corepack", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "corepack-nodejs-20.18.1", + "pname": "corepack", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-20.18.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/2apr9lhsr6gvcbr92dhvall1qvjc2d1h-corepack-nodejs-20.18.1" + }, + "system": "x86_64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "corepack", + "broken": false, + "derivation": "/nix/store/hdmcwb8m1l9lkcrqhc4s32vyfrrv9xsf-corepack-nodejs-20.18.1.drv", + "description": "Wrappers for npm, pnpm and Yarn via Node.js Corepack", + "install_id": "corepack", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "corepack-nodejs-20.18.1", + "pname": "corepack", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-20.18.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/y7kznvfcjl2y8cpivy6vbp1j0x5rdqlm-corepack-nodejs-20.18.1" + }, + "system": "x86_64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "nodejs_18", + "broken": false, + "derivation": "/nix/store/1xpdiwn64w121hqam7a572x76j05k59b-nodejs-18.20.5.drv", + "description": "Event-driven I/O framework for the V8 JavaScript engine", + "install_id": "nodejs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "nodejs-18.20.5", + "pname": "nodejs_18", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-18.20.5", + "outputs_to_install": [ + "out" + ], + "outputs": { + "libv8": "/nix/store/a40q0j0m27m1dxy9i94accm5zaknpx2k-nodejs-18.20.5-libv8", + "out": "/nix/store/6dlw5588gs7whckfx2l0iyd8xi4gklq6-nodejs-18.20.5" + }, + "system": "aarch64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "nodejs_18", + "broken": false, + "derivation": "/nix/store/vhpwx7fmj5zcmm8jn8zj4kr7559kfwaf-nodejs-18.20.5.drv", + "description": "Event-driven I/O framework for the V8 JavaScript engine", + "install_id": "nodejs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "nodejs-18.20.5", + "pname": "nodejs_18", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-18.20.5", + "outputs_to_install": [ + "out" + ], + "outputs": { + "libv8": "/nix/store/1plafrvq3c9vhdwilizkfl1wff0n5n6h-nodejs-18.20.5-libv8", + "out": "/nix/store/x07wndwyr9f6xhk6rk7iq5rn2qbw4d3y-nodejs-18.20.5" + }, + "system": "aarch64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "nodejs_18", + "broken": false, + "derivation": "/nix/store/rm9x0h8ap163r25lqjppxny4y7d5h6vv-nodejs-18.20.5.drv", + "description": "Event-driven I/O framework for the V8 JavaScript engine", + "install_id": "nodejs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "nodejs-18.20.5", + "pname": "nodejs_18", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-18.20.5", + "outputs_to_install": [ + "out" + ], + "outputs": { + "libv8": "/nix/store/rdx8vcizxv9q5kqd1qydz5gkagar5xy8-nodejs-18.20.5-libv8", + "out": "/nix/store/24kh8yc1g5d9whhbdykf58zymfbxbki6-nodejs-18.20.5" + }, + "system": "x86_64-darwin", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "nodejs_18", + "broken": false, + "derivation": "/nix/store/9hq82dni4ix0h26rdrvysrkdqvfxgfwl-nodejs-18.20.5.drv", + "description": "Event-driven I/O framework for the V8 JavaScript engine", + "install_id": "nodejs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56", + "name": "nodejs-18.20.5", + "pname": "nodejs_18", + "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", + "rev_count": 719504, + "rev_date": "2024-12-09T15:59:59Z", + "scrape_date": "2024-12-11T03:49:32Z", + "stabilities": [ + "unstable" + ], + "unfree": false, + "version": "nodejs-18.20.5", + "outputs_to_install": [ + "out" + ], + "outputs": { + "libv8": "/nix/store/3kp2j4w0k9cb7r29wq2iv8kyp7hw1dqs-nodejs-18.20.5-libv8", + "out": "/nix/store/nmmgwk1a0cakhmhwgf1v2b5ws3zf899h-nodejs-18.20.5" + }, + "system": "x86_64-linux", + "group": "nodejs", + "priority": 5 + }, + { + "attr_path": "libtool", + "broken": false, + "derivation": "/nix/store/pnicfn49l8gglh4admyrgz7c2mlhv2kh-libtool-2.4.7.drv", + "description": "GNU Libtool, a generic library support script", + "install_id": "libtool", + "license": "GPL-2.0-or-later", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "libtool-2.4.7", + "pname": "libtool", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "2.4.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "lib": "/nix/store/yhi9n11d9lg80h98rhwq7cdfymlrkc2k-libtool-2.4.7-lib", + "out": "/nix/store/abhrfb6g3jcp78l35ajj2aqa11pspjp0-libtool-2.4.7" + }, + "system": "aarch64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "libtool", + "broken": false, + "derivation": "/nix/store/n8nyg3q3nw96prqz1qbrfqr74j5fczf4-libtool-2.4.7.drv", + "description": "GNU Libtool, a generic library support script", + "install_id": "libtool", + "license": "GPL-2.0-or-later", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "libtool-2.4.7", + "pname": "libtool", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "2.4.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "lib": "/nix/store/sbf540sfh2nw3v2c0i08p9v48805dghh-libtool-2.4.7-lib", + "out": "/nix/store/kjcyrwjlzlwavmwxqfr6655ammnb3ifm-libtool-2.4.7" + }, + "system": "aarch64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "libtool", + "broken": false, + "derivation": "/nix/store/vx663mr0pkr4z52nayq8f180qwfa5d36-libtool-2.4.7.drv", + "description": "GNU Libtool, a generic library support script", + "install_id": "libtool", + "license": "GPL-2.0-or-later", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "libtool-2.4.7", + "pname": "libtool", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "2.4.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "lib": "/nix/store/wa20n47ny35i71gd8iwp52i98bizb2ys-libtool-2.4.7-lib", + "out": "/nix/store/n72sxky85d97zk1cxmjhy56bm3pfxqxh-libtool-2.4.7" + }, + "system": "x86_64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "libtool", + "broken": false, + "derivation": "/nix/store/bp5w7dy2xvag67wcrm4ccrp3s4siapa4-libtool-2.4.7.drv", + "description": "GNU Libtool, a generic library support script", + "install_id": "libtool", + "license": "GPL-2.0-or-later", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "libtool-2.4.7", + "pname": "libtool", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "2.4.7", + "outputs_to_install": [ + "out" + ], + "outputs": { + "lib": "/nix/store/msg5sk7rqi638605l412fqkwyn71kvka-libtool-2.4.7-lib", + "out": "/nix/store/ijhlqlas42l7i8fdkhn2rkf0bpcyq3z1-libtool-2.4.7" + }, + "system": "x86_64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "python3", + "broken": false, + "derivation": "/nix/store/wfykwnxr7b3yf52iq1mvrj6xsyki9xh3-python3-3.11.9.drv", + "description": "High-level dynamically-typed programming language", + "install_id": "python3", + "license": "Python-2.0", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "python3-3.11.9", + "pname": "python3", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "3.11.9", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/327bf08j5b7l9cnzink3g4vp32y5352j-python3-3.11.9" + }, + "system": "aarch64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "python3", + "broken": false, + "derivation": "/nix/store/k8rfpimvg8943kffa0dhlfc0fcswp6sf-python3-3.11.9.drv", + "description": "High-level dynamically-typed programming language", + "install_id": "python3", + "license": "Python-2.0", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "python3-3.11.9", + "pname": "python3", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "3.11.9", + "outputs_to_install": [ + "out" + ], + "outputs": { + "debug": "/nix/store/kl8hjhf6x7dz7brs1ylkxy26qb8argaq-python3-3.11.9-debug", + "out": "/nix/store/q3x28mimkawkdjlvd78jxv3s0fk25vz8-python3-3.11.9" + }, + "system": "aarch64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "python3", + "broken": false, + "derivation": "/nix/store/23x4lr8y0y7d2gch5vnhmxw927ciqm60-python3-3.11.9.drv", + "description": "High-level dynamically-typed programming language", + "install_id": "python3", + "license": "Python-2.0", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "python3-3.11.9", + "pname": "python3", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "3.11.9", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/9lcbaggnygcqpgzakib5lwisks8gnn5i-python3-3.11.9" + }, + "system": "x86_64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "python3", + "broken": false, + "derivation": "/nix/store/9v1jlbifgwgfw0l9v745kifpj9zdpl60-python3-3.11.9.drv", + "description": "High-level dynamically-typed programming language", + "install_id": "python3", + "license": "Python-2.0", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "python3-3.11.9", + "pname": "python3", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "3.11.9", + "outputs_to_install": [ + "out" + ], + "outputs": { + "debug": "/nix/store/61rzpp3v8dsf6h17h3jnnwlm5hwc2brr-python3-3.11.9-debug", + "out": "/nix/store/6b1fqdwb3g56j5pazv8zkx9qd0mv3wiz-python3-3.11.9" + }, + "system": "x86_64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "uv", + "broken": false, + "derivation": "/nix/store/x2ik7hjn0q4c94qyyajhhsrnxw24rm4b-uv-0.2.15.drv", + "description": "Extremely fast Python package installer and resolver, written in Rust", + "install_id": "uv", + "license": "[ Apache-2.0, MIT ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "uv-0.2.15", + "pname": "uv", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.2.15", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dist": "/nix/store/7a7h8y05b60cnccr18mchfagwibrhy0v-uv-0.2.15-dist", + "out": "/nix/store/zi83zbl2l4vw2sgygsgs6ib5587mizag-uv-0.2.15" + }, + "system": "aarch64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "uv", + "broken": false, + "derivation": "/nix/store/1svddx8162ibj8v26fs8z709k79pdm24-uv-0.2.15.drv", + "description": "Extremely fast Python package installer and resolver, written in Rust", + "install_id": "uv", + "license": "[ Apache-2.0, MIT ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "uv-0.2.15", + "pname": "uv", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.2.15", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dist": "/nix/store/x0nx8qn17ljf84jpsc1hn1gg90if3a2a-uv-0.2.15-dist", + "out": "/nix/store/5f7vfq5gaz36xvf170agrad04ybhl8bn-uv-0.2.15" + }, + "system": "aarch64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "uv", + "broken": false, + "derivation": "/nix/store/d0xzqj82pddbb8q5y3vdw7wbbm3bbfx0-uv-0.2.15.drv", + "description": "Extremely fast Python package installer and resolver, written in Rust", + "install_id": "uv", + "license": "[ Apache-2.0, MIT ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "uv-0.2.15", + "pname": "uv", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.2.15", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dist": "/nix/store/x172v7f4gr2zkkd51k0g80vz55513yis-uv-0.2.15-dist", + "out": "/nix/store/qw6i3mng0mr6hz28d0m87i18badysgk0-uv-0.2.15" + }, + "system": "x86_64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "uv", + "broken": false, + "derivation": "/nix/store/lxc1d49wc3yp25pzmb3sgixsrwxq5ml0-uv-0.2.15.drv", + "description": "Extremely fast Python package installer and resolver, written in Rust", + "install_id": "uv", + "license": "[ Apache-2.0, MIT ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "uv-0.2.15", + "pname": "uv", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.2.15", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dist": "/nix/store/ap7bl4yas61vz3k0fh98wn08b9x8s79a-uv-0.2.15-dist", + "out": "/nix/store/6pxd9hxps14jr88mmixpvp5z3ygcdpdy-uv-0.2.15" + }, + "system": "x86_64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "xmlsec", + "broken": false, + "derivation": "/nix/store/p85izfrfdj8n22pm9iv1b8a511pm2i5n-xmlsec-1.2.34.drv", + "description": "XML Security Library in C based on libxml2", + "install_id": "xmlsec", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "xmlsec-1.2.34", + "pname": "xmlsec", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.2.34", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/mnrmn6vndrfgxyxmbqvmrwi7qjwr3d0m-xmlsec-1.2.34-dev", + "out": "/nix/store/ff6sr1v2q8gqmnyvpl6xfxw3r2lb56i3-xmlsec-1.2.34" + }, + "system": "aarch64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "xmlsec", + "broken": false, + "derivation": "/nix/store/7zscs792mb7v53i43jf4qq0gypgrql8g-xmlsec-1.2.34.drv", + "description": "XML Security Library in C based on libxml2", + "install_id": "xmlsec", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "xmlsec-1.2.34", + "pname": "xmlsec", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.2.34", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/k8n1j04qrbhlzxklrxhk32aphh58jqcf-xmlsec-1.2.34-dev", + "out": "/nix/store/qdscjbwglk2953azhhhsibf0xlsjig1h-xmlsec-1.2.34" + }, + "system": "aarch64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "xmlsec", + "broken": false, + "derivation": "/nix/store/cbl3q86h6vzzzi50ph82syrkha9g9mah-xmlsec-1.2.34.drv", + "description": "XML Security Library in C based on libxml2", + "install_id": "xmlsec", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "xmlsec-1.2.34", + "pname": "xmlsec", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.2.34", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/jc3nxrj9kv447gb5ywxqal45b7ick5c7-xmlsec-1.2.34-dev", + "out": "/nix/store/fzhimicsixlzc8k6l0f5vh5lzbxsl39c-xmlsec-1.2.34" + }, + "system": "x86_64-darwin", + "group": "python", + "priority": 5 + }, + { + "attr_path": "xmlsec", + "broken": false, + "derivation": "/nix/store/5z6p8lb90900s6k8lml62mlwjjx6whm2-xmlsec-1.2.34.drv", + "description": "XML Security Library in C based on libxml2", + "install_id": "xmlsec", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "xmlsec-1.2.34", + "pname": "xmlsec", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.2.34", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/z0k51x5cg15f2r058ypjzhf9bakzq16g-xmlsec-1.2.34-dev", + "out": "/nix/store/rrnig5ybklsrf8kiwn15lvci0rfq6379-xmlsec-1.2.34" + }, + "system": "x86_64-linux", + "group": "python", + "priority": 5 + }, + { + "attr_path": "cargo", + "broken": false, + "derivation": "/nix/store/a6j07nsrmnqhv0mm2bbnvnjcfkv3bxpz-cargo-1.80.1.drv", + "description": "Downloads your Rust project's dependencies and builds your project", + "install_id": "cargo", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "cargo-1.80.1", + "pname": "cargo", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/idfq37bavykpfahg0q8cpawj90sckagq-cargo-1.80.1" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "cargo", + "broken": false, + "derivation": "/nix/store/piqkpmwr5m4bn8p0d0j8q94y0bb7yfkw-cargo-1.80.1.drv", + "description": "Downloads your Rust project's dependencies and builds your project", + "install_id": "cargo", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "cargo-1.80.1", + "pname": "cargo", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/hgycsvahrwxwkq56zj5by28pxwx9sfqh-cargo-1.80.1" + }, + "system": "aarch64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "cargo", + "broken": false, + "derivation": "/nix/store/lw8a7gvkhkjfwfccpyy0p2i2kq1bq5qk-cargo-1.80.1.drv", + "description": "Downloads your Rust project's dependencies and builds your project", + "install_id": "cargo", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "cargo-1.80.1", + "pname": "cargo", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/rc3niv9fvii3lc862h81qbqca89yagg9-cargo-1.80.1" + }, + "system": "x86_64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "cargo", + "broken": false, + "derivation": "/nix/store/gjrrcbbsmjq4dp6gfrss50l6zm628qpw-cargo-1.80.1.drv", + "description": "Downloads your Rust project's dependencies and builds your project", + "install_id": "cargo", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "cargo-1.80.1", + "pname": "cargo", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/c0r47wb8xm1dbsvppw6gh75jfjakal1y-cargo-1.80.1" + }, + "system": "x86_64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "clippy", + "broken": false, + "derivation": "/nix/store/4bh8wbqf05jvl888ls2395iq2w5yksq1-clippy-1.80.1.drv", + "description": "Bunch of lints to catch common mistakes and improve your Rust code", + "install_id": "clippy", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "clippy-1.80.1", + "pname": "clippy", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/aswyg8bfc3y5nzv6asaz6xlr5b79vvr6-clippy-1.80.1" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "clippy", + "broken": false, + "derivation": "/nix/store/hf78d32s4l9ynzigdyfsqimp58s00d42-clippy-1.80.1.drv", + "description": "Bunch of lints to catch common mistakes and improve your Rust code", + "install_id": "clippy", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "clippy-1.80.1", + "pname": "clippy", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "debug": "/nix/store/xlmkc4xzv3wjbr00vgwxy3k6d1k5hqa5-clippy-1.80.1-debug", + "out": "/nix/store/1dh3cz88wnf12i8cylllgn8j7m0z2j7i-clippy-1.80.1" + }, + "system": "aarch64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "clippy", + "broken": false, + "derivation": "/nix/store/3a0yk18a8h7cqvca1mzma1fc97vz7f6a-clippy-1.80.1.drv", + "description": "Bunch of lints to catch common mistakes and improve your Rust code", + "install_id": "clippy", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "clippy-1.80.1", + "pname": "clippy", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/2qw6a3ay1da4c2l2rcalqi35n27my591-clippy-1.80.1" + }, + "system": "x86_64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "clippy", + "broken": false, + "derivation": "/nix/store/avz5cyyj22cz4dc53bx4p8mfrx6frzj9-clippy-1.80.1.drv", + "description": "Bunch of lints to catch common mistakes and improve your Rust code", + "install_id": "clippy", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "clippy-1.80.1", + "pname": "clippy", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "debug": "/nix/store/zfbf9qngj05s22fbg8dbcarjgzn9hff0-clippy-1.80.1-debug", + "out": "/nix/store/6pblw8l1ipgj2g1jvd7grhs1cnpqp9vm-clippy-1.80.1" + }, + "system": "x86_64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "libiconv", + "broken": false, + "derivation": "/nix/store/5g94mlh38xmfwq5wpik66jsaw1g026ss-libiconv-99.drv", + "description": "Iconv(3) implementation", + "install_id": "libiconv", + "license": "[ BSD-2-Clause, BSD-3-Clause, APSL-1.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "libiconv-99", + "pname": "libiconv", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "99", + "outputs_to_install": [ + "out" + ], + "outputs": { + "dev": "/nix/store/r2jkk7lsmi17m5lp3r7aip9626n07qfz-libiconv-99-dev", + "out": "/nix/store/cjzmz9dskblgkbv4by2wnsyvdjw6jpcm-libiconv-99" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustPlatform.rustLibSrc", + "broken": false, + "derivation": "/nix/store/za8qn1akdaglwdm4sv2ndnsa92hy54j7-rust-lib-src.drv", + "install_id": "rust-lib-src", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rust-lib-src", + "pname": "rustLibSrc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "rust-lib-src", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/9mwdk449lws263ykk01xrbywpdzqmgh3-rust-lib-src" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustPlatform.rustLibSrc", + "broken": false, + "derivation": "/nix/store/nin524xcp0l7xzw4ccbgpzxsncf5y447-rust-lib-src.drv", + "install_id": "rust-lib-src", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rust-lib-src", + "pname": "rustLibSrc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "rust-lib-src", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/b2kn9wlf1bhz2kry46d1xv0ir402vfrw-rust-lib-src" + }, + "system": "aarch64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustPlatform.rustLibSrc", + "broken": false, + "derivation": "/nix/store/7g5crphsvw21gbwm5ij3b5pbhx0i0m8j-rust-lib-src.drv", + "install_id": "rust-lib-src", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rust-lib-src", + "pname": "rustLibSrc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "rust-lib-src", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/p84mx4ksfzlvpi12rcxhsaihy0pkynxs-rust-lib-src" + }, + "system": "x86_64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustPlatform.rustLibSrc", + "broken": false, + "derivation": "/nix/store/4ix01cdzxqcpk452lndkw3d3pib10d4w-rust-lib-src.drv", + "install_id": "rust-lib-src", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rust-lib-src", + "pname": "rustLibSrc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "rust-lib-src", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/5dh2vfghjizfwgci0z00hbrpx5vwv6zc-rust-lib-src" + }, + "system": "x86_64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustc", + "broken": false, + "derivation": "/nix/store/sw1xd1imm58b2bq23fasx7jk9yfalw40-rustc-wrapper-1.80.1.drv", + "description": "Safe, concurrent, practical language (wrapper script)", + "install_id": "rustc", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustc-wrapper-1.80.1", + "pname": "rustc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "wrapper-1.80.1", + "outputs_to_install": [ + "out", + "man" + ], + "outputs": { + "doc": "/nix/store/pwm2c3c2msxiy78ms1g1syqcf4zjwidx-rustc-wrapper-1.80.1-doc", + "man": "/nix/store/7irfsbn1g91hn132755l5nr0sajrip7v-rustc-wrapper-1.80.1-man", + "out": "/nix/store/3gggf0knmsc6srxq89gm424rffwma6rv-rustc-wrapper-1.80.1" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustc", + "broken": false, + "derivation": "/nix/store/fs6xy3p7x30l7yx7q8q1cjrdswry8sy9-rustc-wrapper-1.80.1.drv", + "description": "Safe, concurrent, practical language (wrapper script)", + "install_id": "rustc", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustc-wrapper-1.80.1", + "pname": "rustc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "wrapper-1.80.1", + "outputs_to_install": [ + "out", + "man" + ], + "outputs": { + "doc": "/nix/store/ycyqn2r84lmhkd0vf605nz28ngb6qbbm-rustc-wrapper-1.80.1-doc", + "man": "/nix/store/4d1nnd97sdqy7rvndqb9jzcni6q8ibc9-rustc-wrapper-1.80.1-man", + "out": "/nix/store/1b35qfalqp7jdwhd2642cki5b29mlg9w-rustc-wrapper-1.80.1" + }, + "system": "aarch64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustc", + "broken": false, + "derivation": "/nix/store/w4n4a5cvwygrnqfn889s6f1ndsjxbs10-rustc-wrapper-1.80.1.drv", + "description": "Safe, concurrent, practical language (wrapper script)", + "install_id": "rustc", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustc-wrapper-1.80.1", + "pname": "rustc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "wrapper-1.80.1", + "outputs_to_install": [ + "out", + "man" + ], + "outputs": { + "doc": "/nix/store/0b1mwhz7l71wdwxf925q4k8cvmcpx3aq-rustc-wrapper-1.80.1-doc", + "man": "/nix/store/nysgy2cz1bsjixn7igrzglj9s1bh0cfn-rustc-wrapper-1.80.1-man", + "out": "/nix/store/hl60dz242vrknfd15i984xwmzjm36z1b-rustc-wrapper-1.80.1" + }, + "system": "x86_64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustc", + "broken": false, + "derivation": "/nix/store/m5g06mxcks61p06zn7fr5yi822v99x3q-rustc-wrapper-1.80.1.drv", + "description": "Safe, concurrent, practical language (wrapper script)", + "install_id": "rustc", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustc-wrapper-1.80.1", + "pname": "rustc", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "wrapper-1.80.1", + "outputs_to_install": [ + "out", + "man" + ], + "outputs": { + "doc": "/nix/store/iyl50h2sik10vb40kiipiavzlgrrks04-rustc-wrapper-1.80.1-doc", + "man": "/nix/store/6ijm3z013cgcwjs60g5sjmd8i8nscq10-rustc-wrapper-1.80.1-man", + "out": "/nix/store/n4nnqpcaxk621i8lwr86116fv5kfy3jl-rustc-wrapper-1.80.1" + }, + "system": "x86_64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustfmt", + "broken": false, + "derivation": "/nix/store/cqa5g8hhr7l15gzymwb5gdkiq4ixwygy-rustfmt-1.80.1.drv", + "description": "Tool for formatting Rust code according to style guidelines", + "install_id": "rustfmt", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustfmt-1.80.1", + "pname": "rustfmt", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/p2xnaif204agqsmdlgmkypbhlk3isnx8-rustfmt-1.80.1" + }, + "system": "aarch64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustfmt", + "broken": false, + "derivation": "/nix/store/z4j918159p6jfssbf4ky03l0f6kx6bnf-rustfmt-1.80.1.drv", + "description": "Tool for formatting Rust code according to style guidelines", + "install_id": "rustfmt", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustfmt-1.80.1", + "pname": "rustfmt", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/6pwc4hkrsk5pxsd38zcdyjzjcml3hlvh-rustfmt-1.80.1" + }, + "system": "aarch64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustfmt", + "broken": false, + "derivation": "/nix/store/nandqymhz263w22xyrlf83k7njjnpg6f-rustfmt-1.80.1.drv", + "description": "Tool for formatting Rust code according to style guidelines", + "install_id": "rustfmt", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustfmt-1.80.1", + "pname": "rustfmt", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/kh259aj4gv6xnnaa2ayi0q1qmsf98ck0-rustfmt-1.80.1" + }, + "system": "x86_64-darwin", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "rustfmt", + "broken": false, + "derivation": "/nix/store/f5c8hrz13dx5rp6v0j0jnhqyvn9q0c1s-rustfmt-1.80.1.drv", + "description": "Tool for formatting Rust code according to style guidelines", + "install_id": "rustfmt", + "license": "[ MIT, Apache-2.0 ]", + "locked_url": "https://github.com/flox/nixpkgs?rev=5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "name": "rustfmt-1.80.1", + "pname": "rustfmt", + "rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", + "rev_count": 690827, + "rev_date": "2024-10-09T16:51:18Z", + "scrape_date": "2024-10-11T03:53:01Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "1.80.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/mawqalpjhg4764l6rl8nygnzmmph3zbj-rustfmt-1.80.1" + }, + "system": "x86_64-linux", + "group": "rust-toolchain", + "priority": 5 + }, + { + "attr_path": "mprocs", + "broken": false, + "derivation": "/nix/store/5hckam5lgqqmlkjd1r6a0bzkkvhfm349-mprocs-0.7.1.drv", + "description": "TUI tool to run multiple commands in parallel and show the output of each command separately", + "install_id": "mprocs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "mprocs-0.7.1", + "pname": "mprocs", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.7.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/pk1mb41ryrw2izq15pymxjwqvw8mr29p-mprocs-0.7.1" + }, + "system": "aarch64-darwin", + "group": "toplevel", + "priority": 5 + }, + { + "attr_path": "mprocs", + "broken": false, + "derivation": "/nix/store/lg4yvv4az18g59l7rgfvn2pqxidx5qgb-mprocs-0.7.1.drv", + "description": "TUI tool to run multiple commands in parallel and show the output of each command separately", + "install_id": "mprocs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "mprocs-0.7.1", + "pname": "mprocs", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.7.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/dynm867fg992nb5wlysvrg1p14cwgsca-mprocs-0.7.1" + }, + "system": "aarch64-linux", + "group": "toplevel", + "priority": 5 + }, + { + "attr_path": "mprocs", + "broken": false, + "derivation": "/nix/store/xbicacfnn0gnz08rri7jwc40byxrm404-mprocs-0.7.1.drv", + "description": "TUI tool to run multiple commands in parallel and show the output of each command separately", + "install_id": "mprocs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "mprocs-0.7.1", + "pname": "mprocs", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.7.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/kx138kp8rxjwph6wqp3lk8v6fybij2bz-mprocs-0.7.1" + }, + "system": "x86_64-darwin", + "group": "toplevel", + "priority": 5 + }, + { + "attr_path": "mprocs", + "broken": false, + "derivation": "/nix/store/ydrq0m8lp0m3pjpd9ndg1x30z3bg81qx-mprocs-0.7.1.drv", + "description": "TUI tool to run multiple commands in parallel and show the output of each command separately", + "install_id": "mprocs", + "license": "MIT", + "locked_url": "https://github.com/flox/nixpkgs?rev=9f4128e00b0ae8ec65918efeba59db998750ead6", + "name": "mprocs-0.7.1", + "pname": "mprocs", + "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6", + "rev_count": 647193, + "rev_date": "2024-07-03T18:27:49Z", + "scrape_date": "2024-07-05T00:14:29Z", + "stabilities": [ + "staging", + "unstable" + ], + "unfree": false, + "version": "0.7.1", + "outputs_to_install": [ + "out" + ], + "outputs": { + "out": "/nix/store/ky3hbhjvsr4l5nh1xjipkbswll37j0dq-mprocs-0.7.1" + }, + "system": "x86_64-linux", + "group": "toplevel", + "priority": 5 + } + ] +} \ No newline at end of file diff --git a/.flox/env/manifest.toml b/.flox/env/manifest.toml new file mode 100644 index 0000000000000..eae42ba88603c --- /dev/null +++ b/.flox/env/manifest.toml @@ -0,0 +1,138 @@ +# +# This is a Flox environment manifest. +# Visit flox.dev/docs/concepts/manifest/ +# or see flox-edit(1), manifest.toml(5) for more information. +# +# Flox manifest version managed by Flox CLI +version = 1 + +# List packages you wish to install in your environment inside +# the `[install]` section. +[install] +# Python +python3 = { pkg-path = "python3", version = "3.11", pkg-group = "python" } +uv = { pkg-path = "uv", pkg-group = "python" } +xmlsec = { pkg-path = "xmlsec", pkg-group = "python", version = "1.2.34" } +libtool = { pkg-path = "libtool", pkg-group = "python" } +# Node +nodejs = { pkg-path = "nodejs_18", pkg-group = "nodejs" } +corepack = { pkg-path = "corepack", pkg-group = "nodejs" } +brotli = { pkg-path = "brotli", pkg-group = "nodejs" } +# Rust toolchain (based on https://flox.dev/docs/cookbook/languages/rust/) +cargo.pkg-path = "cargo" +cargo.pkg-group = "rust-toolchain" +cargo.version = "1.80.1" +rustc.pkg-path = "rustc" +rustc.pkg-group = "rust-toolchain" +clippy.pkg-path = "clippy" +clippy.pkg-group = "rust-toolchain" +rustfmt.pkg-path = "rustfmt" +rustfmt.pkg-group = "rust-toolchain" +rust-lib-src.pkg-path = "rustPlatform.rustLibSrc" +rust-lib-src.pkg-group = "rust-toolchain" +libiconv.pkg-path = "libiconv" +libiconv.systems = ["aarch64-darwin"] +libiconv.pkg-group = "rust-toolchain" +# Go +go = { pkg-path = "go", version = "1.22", pkg-group = "go" } +# General CLI tools +mprocs.pkg-path = "mprocs" + +# Set environment variables in the `[vars]` section. These variables may not +# reference one another, and are added to the environment without first +# expanding them. They are available for use in the `[profile]` and `[hook]` +# scripts. +[vars] +DEBUG = "1" +POSTHOG_SKIP_MIGRATION_CHECKS = "1" +DIRENV_LOG_FORMAT = "" # Disable direnv activation logging (in case direnv is present) + +# The `hook.on-activate` script is run by the *bash* shell immediately upon +# activating an environment, and will not be invoked if Flox detects that the +# environment has previously been activated. Variables set by the script will +# be inherited by `[profile]` scripts defined below. Note that any stdout +# generated by the script will be redirected to stderr. +[hook] +on-activate = ''' +# Guide through installing and configuring direnv if it's not present (optionally) +if [[ -t 0 ]] && [ ! -d "$DIRENV_DIR" ] && [ ! -f "$FLOX_ENV_CACHE/.hush-direnv" ]; then + read -p " +šŸ‘‰ Use direnv (https://direnv.net) for automatic activation of this environment by your shell. +ā“ Would you like direnv to be set up now? (y/n, default: y)" -n 1 -r + if [[ $REPLY =~ ^[Yy]$ || -z $REPLY ]]; then + $FLOX_ENV_CACHE/../env/direnv-setup.sh + else + echo "ā­ļø Skipping direnv setup. This message will not be shown again, but if you change your mind, just run '.flox/bin/direnv-setup.sh'" + touch $FLOX_ENV_CACHE/.hush-direnv + fi + echo +fi + +# Set up a Python virtual environment +export PYTHON_DIR="$FLOX_ENV_CACHE/venv" +if [ ! -d "$PYTHON_DIR" ]; then + uv venv "$PYTHON_DIR" --allow-existing +fi + +echo -e "Python virtual environment path: \033[33m.flox/cache/venv\033[0m" +echo -e "Python interpreter path, for your code editor: \033[33m.flox/cache/venv/bin/python\033[0m" + +source "$PYTHON_DIR/bin/activate" + +# Install Python dependencies (this is practically instant thanks to uv) +uv pip install -q -r requirements.txt -r requirements-dev.txt + +# Install top-level Node dependencies (only if not present all yet, because this takes almost a second even with pnpm) +# This also sets up pre-commit hooks via Husky +if [ ! -d "node_modules" ]; then + pnpm install -s +fi + +if [[ -t 0 ]]; then # The block below only runs when in an interactive shell + # Add required entries to /etc/hosts if not present + if ! grep -q "127.0.0.1 kafka clickhouse" /etc/hosts; then + echo + echo "šŸšØ Amending /etc/hosts to map hostnames 'kafka' and 'clickhouse' to 127.0.0.1..." + echo "127.0.0.1 kafka clickhouse" | sudo tee -a /etc/hosts 1> /dev/null + echo "āœ… /etc/hosts amended" + fi + + # Print intro message + echo -e " +IT'S DANGEROUS TO GO ALONE! RUN THIS: +1. \033[31mbin/migrate\033[0m - to run all migrations +2. \033[32mbin/start\033[0m - to start the entire stack +3. \033[34m./manage.py generate_demo_data\033[0m - to create a user with demo data +" +fi +''' + +# Scripts defined in the `[profile]` section are *sourced* by *your shell* and +# inherit environment variables set in the `[vars]` section and by `[hook]` scripts. +# The `profile.common` script is sourced by all shells and special care should be +# taken to ensure compatibility with all shells, after which exactly one of +# `profile.{bash,fish,tcsh,zsh}` is sourced by the corresponding shell. +[profile] +bash = ''' + source "$PYTHON_DIR/bin/activate" +''' +zsh = ''' + source "$PYTHON_DIR/bin/activate" +''' +fish = ''' + source "$PYTHON_DIR/bin/activate.fish" +''' +tcsh = ''' + source "$PYTHON_DIR/bin/activate.csh" +''' + +# The `[services]` section of the manifest allows you to define services. +# Services defined here use the packages provided by the `[install]` section +# and any variables you've defined in the `[vars]` section or `hook.on-activate` script. +[services] +# db.command = "postgres -D $FLOX_ENV_CACHE/postgres" + +# Additional options can be set in the `[options]` section. Refer to +# manifest.toml(5) for a list of available options. +[options] +systems = ["aarch64-darwin", "aarch64-linux", "x86_64-darwin", "x86_64-linux"] diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml index b393214ec72da..c5cacd1dec579 100644 --- a/.github/workflows/container-images-cd.yml +++ b/.github/workflows/container-images-cd.yml @@ -227,3 +227,24 @@ jobs: "labels": ${{ steps.labels.outputs.labels }}, "timestamp": "${{ github.event.head_commit.timestamp }}" } + + - name: Trigger Data Warehouse V2 Temporal Worker Cloud deployment + if: steps.check_changes_data_warehouse_temporal_worker.outputs.changed == 'true' + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ steps.deployer.outputs.token }} + repository: PostHog/charts + event-type: commit_state_update + client-payload: | + { + "values": { + "image": { + "sha": "${{ steps.build.outputs.digest }}" + } + }, + "release": "temporal-worker-data-warehouse-v2", + "commit": ${{ toJson(github.event.head_commit) }}, + "repository": ${{ toJson(github.repository) }}, + "labels": ${{ steps.labels.outputs.labels }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" + } diff --git a/.gitignore b/.gitignore index 362ec30bce107..9228d5f666415 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ +/env venv -env .venv *.pyc __pycache__/ @@ -12,7 +12,6 @@ debug.log *.swo node_modules/ *.code-workspace -node_modules .mypy_cache frontend/.cache/ frontend/dist/ @@ -76,4 +75,4 @@ pyrightconfig.json .temporal-worker-settings temp_test_run_data.json .temp-deepeval-cache.json -.eslintcache \ No newline at end of file +.eslintcache diff --git a/bin/migrate b/bin/migrate index 2f2aa49ed749b..76645d6ae3714 100755 --- a/bin/migrate +++ b/bin/migrate @@ -1,5 +1,6 @@ #!/bin/bash set -e + SCRIPT_DIR=$(dirname "$(readlink -f "$0")") # NOTE when running in docker, rust might not exist so we need to check for it @@ -7,8 +8,12 @@ if [ -d "$SCRIPT_DIR/../rust" ]; then bash $SCRIPT_DIR/../rust/bin/migrate-cyclotron fi +( + python manage.py migrate_clickhouse + python manage.py sync_replicated_schema +) & # ClickHouse migrations can run in parallel to Postgres ones + python manage.py migrate -python manage.py migrate_clickhouse # NOTE: we do not apply any non-noop migrations here. Rather these are run # manually within the UI. See https://posthog.com/docs/runbook/async-migrations @@ -20,4 +25,4 @@ python manage.py run_async_migrations --complete-noop-migrations # k8s pod deployments. python manage.py run_async_migrations --check -python manage.py sync_replicated_schema +wait $(jobs -p) # Make sure CH migrations are done before we exit diff --git a/bin/mprocs.yaml b/bin/mprocs.yaml index c7831d818d589..cb3761250d035 100644 --- a/bin/mprocs.yaml +++ b/bin/mprocs.yaml @@ -1,24 +1,26 @@ procs: celery-worker: - shell: 'source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=worker' + shell: 'bin/check_kafka_clickhouse_up && source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=worker' celery-beat: - shell: 'source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=beat' + shell: 'bin/check_kafka_clickhouse_up && source ./bin/celery-queues.env && python manage.py run_autoreload_celery --type=beat' plugin-server: - shell: './bin/plugin-server' + shell: 'bin/check_kafka_clickhouse_up && ./bin/plugin-server' backend: - shell: './bin/start-backend' + shell: 'bin/check_kafka_clickhouse_up && ./bin/start-backend' frontend: - shell: './bin/start-frontend' + shell: 'bin/check_kafka_clickhouse_up && ./bin/start-frontend' temporal-worker: # added a sleep to give the docker stuff time to start - shell: 'sleep 10 && python3 manage.py start_temporal_worker' + shell: 'bin/check_kafka_clickhouse_up && bin/check_temporal_up && python manage.py start_temporal_worker' docker-compose: shell: 'docker compose -f docker-compose.dev.yml up' stop: send-keys: [''] + +mouse_scroll_speed: 1 diff --git a/bin/start b/bin/start index 171656ed0e3ec..ceaddede2140f 100755 --- a/bin/start +++ b/bin/start @@ -2,25 +2,11 @@ set -e -trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT - export DEBUG=${DEBUG:-1} -export SKIP_SERVICE_VERSION_REQUIREMENTS=1 +export SKIP_SERVICE_VERSION_REQUIREMENTS=${SKIP_SERVICE_VERSION_REQUIREMENTS:-1} export BILLING_SERVICE_URL=${BILLING_SERVICE_URL:-https://billing.dev.posthog.dev} export HOG_HOOK_URL=${HOG_HOOK_URL:-http://localhost:3300/hoghook} -service_warning() { - echo -e "\033[0;31m$1 isn't ready. You can run the stack with:\ndocker compose -f docker-compose.dev.yml up\nIf you have already ran that, just make sure that services are starting properly, and sit back.\nWaiting for $1 to start...\033[0m" -} - -nc -z localhost 9092 || ( service_warning 'Kafka'; bin/check_kafka_clickhouse_up ) -curl -s 'http://localhost:8123/ping' || ( service_warning 'ClickHouse'; bin/check_kafka_clickhouse_up ) - [ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) -./bin/start-worker & -./bin/start-backend & -./bin/start-frontend & -./bin/temporal-django-worker & - -wait +exec mprocs --config bin/mprocs.yaml diff --git a/bin/start-mprocs b/bin/start-mprocs deleted file mode 100755 index ceaddede2140f..0000000000000 --- a/bin/start-mprocs +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -e - -export DEBUG=${DEBUG:-1} -export SKIP_SERVICE_VERSION_REQUIREMENTS=${SKIP_SERVICE_VERSION_REQUIREMENTS:-1} -export BILLING_SERVICE_URL=${BILLING_SERVICE_URL:-https://billing.dev.posthog.dev} -export HOG_HOOK_URL=${HOG_HOOK_URL:-http://localhost:3300/hoghook} - -[ ! -f ./share/GeoLite2-City.mmdb ] && ( curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=./share/GeoLite2-City.mmdb ) - -exec mprocs --config bin/mprocs.yaml diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index cefbb60b6fe69..e130feeedc897 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -341,19 +341,6 @@ describe('Dashboard', () => { }) }) - /** - * This test is currently failing because the query that runs when you open the dashboard includes the code - * select equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, 'app_rating'), ''), 'null'), '^"|"$', ''), 5.) from events where event ilike '%rated%'; - * This throws the error Code: 386. DB::Exception: There is no supertype for types String, Float64 because some of them are String/FixedString and some of them are not. (NO_COMMON_TYPE) - * All the 'app_ratings' are extracted as strings and 5. is a float - */ - // it('Opens dashboard item in insights', () => { - // cy.get('[data-attr=dashboard-name]').contains('App Analytics').click() - // cy.get('.InsightCard [data-attr=insight-card-title]').first().click() - // cy.location('pathname').should('include', '/insights') - // cy.get('[data-attr=funnel-bar-vertical]', { timeout: 30000 }).should('exist') - // }) - it('Add insight from empty dashboard', () => { const dashboardName = randomString('dashboard-') dashboards.createAndGoToEmptyDashboard(dashboardName) @@ -363,76 +350,6 @@ describe('Dashboard', () => { cy.get('[data-attr="top-bar-name"] .EditableField__display').contains(dashboardName).should('exist') }) - it('Changing dashboard filter shows updated insights', () => { - const dashboardName = randomString('to add an insight to') - const firstInsight = randomString('insight to add to dashboard') - - // Create and visit a dashboard to get it into turbo mode cache - dashboards.createAndGoToEmptyDashboard(dashboardName) - dashboard.addInsightToEmptyDashboard(firstInsight) - - dashboard.addPropertyFilter() - - cy.get('.PropertyFilterButton').should('have.length', 1) - - // refresh the dashboard by changing date range - cy.get('[data-attr="date-filter"]').click() - cy.contains('span', 'Last 14 days').click() - - cy.wait(2000) - - // insight meta should be updated to show new date range - cy.get('h5').contains('Last 14 days').should('exist') - - cy.get('button').contains('Save').click() - - // should save filters - cy.get('.PropertyFilterButton').should('have.length', 1) - // should save updated date range - cy.get('span').contains('Last 14 days').should('exist') - }) - - // TODO: this test works locally, just not in CI - it.skip('Clicking cancel discards dashboard filter changes', () => { - const dashboardName = randomString('to add an insight to') - const firstInsight = randomString('insight to add to dashboard') - - // Create and visit a dashboard to get it into turbo mode cache - dashboards.createAndGoToEmptyDashboard(dashboardName) - dashboard.addInsightToEmptyDashboard(firstInsight) - - // add property filter - cy.get('.PropertyFilterButton').should('have.length', 0) - cy.get('[data-attr="property-filter-0"]').click() - cy.get('[data-attr="taxonomic-filter-searchfield"]').click().type('Browser').wait(1000) - cy.get('[data-attr="prop-filter-event_properties-0"]').click({ force: true }).wait(1000) - cy.get('.LemonInput').type('Chrome') - cy.contains('.LemonButton__content', 'Chrome').click({ force: true }) - - // added property is present - cy.get('.PropertyFilterButton').should('have.length', 1) - - // refresh the dashboard by changing date range - cy.get('[data-attr="date-filter"]').click() - cy.contains('span', 'Last 14 days').click() - - cy.wait(2000) - - // insight meta should be updated to show new date range - // default date range is last 7 days - cy.get('h5').contains('Last 14 days').should('exist') - - // discard changes - cy.get('button').contains('Cancel').click() - - // should reset filters to be empty - cy.get('.PropertyFilterButton').should('have.length', 0) - // should reset date range to no override - cy.get('span').contains('No date range overrid').should('exist') - // should reset insight meta date range - cy.get('h5').contains('Last 7 days').should('exist') - }) - it('clicking on insight carries through dashboard filters', () => { const dashboardName = randomString('to add an insight to') const firstInsight = randomString('insight to add to dashboard') diff --git a/cypress/e2e/events.cy.ts b/cypress/e2e/events.cy.ts index 4badc5f66333f..249e7152785e5 100644 --- a/cypress/e2e/events.cy.ts +++ b/cypress/e2e/events.cy.ts @@ -1,54 +1,26 @@ -const interceptPropertyDefinitions = (): void => { - cy.intercept('/api/event/values?key=%24browser').as('getBrowserValues') - - cy.intercept('api/projects/@current/property_definitions/?limit=5000', { - fixture: 'api/event/property_definitions', - }) - - cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=&*', { - fixture: 'api/event/property_definitions', - }) - - cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=%24time*', { - fixture: 'api/event/only_time_property_definition', - }) - - cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=%24browser*', { - fixture: 'api/event/only_browser_version_property_definition', - }) - - cy.intercept('/api/projects/*/property_definitions?is_feature_flag=true*', { - fixture: 'api/event/feature_flag_property_definition', - }) -} - -const selectNewTimestampPropertyFilter = (): void => { - cy.get('[data-attr="new-prop-filter-EventPropertyFilters.0"]').click() - cy.get('[data-attr=taxonomic-filter-searchfield]').type('$time') - cy.get('.taxonomic-list-row').should('have.length', 1) - cy.get('[data-attr=prop-filter-event_properties-0]').click({ force: true }) -} +describe('Events', () => { + beforeEach(() => { + cy.intercept('/api/event/values?key=%24browser').as('getBrowserValues') -const selectOperator = (operator: string, openPopover: boolean): void => { - if (openPopover) { - cy.get('[data-attr="property-filter-0"] .property-filter .property-filter-button-label').click() - } + cy.intercept('api/projects/@current/property_definitions/?limit=5000', { + fixture: 'api/event/property_definitions', + }) - cy.get('[data-attr="taxonomic-operator"]').click() - cy.get('.operator-value-option').its('length').should('eql', 8) - cy.get('.operator-value-option').contains('< before').should('be.visible') - cy.get('.operator-value-option').contains('> after').should('be.visible') + cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=&*', { + fixture: 'api/event/property_definitions', + }) - cy.get('.operator-value-option').contains(operator).click() -} + cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=%24time*', { + fixture: 'api/event/only_time_property_definition', + }) -const changeSecondPropertyFilterToDateAfter = (): void => { - selectOperator('> after', true) -} + cy.intercept('/api/projects/*/property_definitions?is_feature_flag=false&search=%24browser*', { + fixture: 'api/event/only_browser_version_property_definition', + }) -describe('Events', () => { - beforeEach(() => { - interceptPropertyDefinitions() + cy.intercept('/api/projects/*/property_definitions?is_feature_flag=true*', { + fixture: 'api/event/feature_flag_property_definition', + }) cy.intercept('/api/event/values/?key=$browser_version', (req) => { return req.reply([{ name: '96' }, { name: '97' }]) @@ -85,7 +57,11 @@ describe('Events', () => { }) it('use before and after with a DateTime property', () => { - selectNewTimestampPropertyFilter() + // Select the time property + cy.get('[data-attr="new-prop-filter-EventPropertyFilters.0"]').click() + cy.get('[data-attr=taxonomic-filter-searchfield]').type('$time') + cy.get('.taxonomic-list-row').should('have.length', 1) + cy.get('[data-attr=prop-filter-event_properties-0]').click({ force: true }) cy.get('[data-attr="taxonomic-operator"]').click() cy.get('.operator-value-option').should('contain.text', '> after') @@ -122,44 +98,4 @@ describe('Events', () => { cy.wait(500) cy.get('[data-attr="taxonomic-operator"]').should('be.visible') }) - - /** - * Test fails because property filters act on properties.$time but not all events have that property - * - * Needs https://github.com/PostHog/posthog/issues/8250 before can query on timestamp - */ - it.skip('can filter after a date and can filter before it', () => { - cy.intercept(/api\/projects\/\d+\/activity\/explore\/.*/).as('getEvents') - - selectNewTimestampPropertyFilter() - - selectOperator('< before', undefined) - cy.get('[data-attr=taxonomic-value-select]').click() - - cy.get('[data-attr="lemon-calendar-month-previous"]').first().click() - cy.get('[data-attr="lemon-calendar-day"]').first().click() - cy.get('[data-attr="lemon-calendar-select-apply"]').first().click() - cy.get('[data-attr="property-filter-0"]').should('include.text', 'Time < ') - - cy.wait('@getEvents').then(() => { - cy.get('tr.event-row:first-child').should('contain.text', 'a day ago') - cy.get('tr.event-row').should((rows) => { - // test data setup is slightly random so... - expect(rows.length).to.be.greaterThan(50) - expect(rows.length).to.be.lessThan(110) - }) - - changeSecondPropertyFilterToDateAfter() - - cy.wait('@getEvents').then(() => { - // as the seeded events are random(-ish) we can't assert on how long ago they will be - cy.get('tr.event-row:first-child').should('not.contain.text', 'a day ago') - cy.get('tr.event-row').should((rows) => { - // test data setup is slightly random so... - expect(rows.length).to.be.greaterThan(5) - expect(rows.length).to.be.lessThan(10) - }) - }) - }) - }) }) diff --git a/cypress/e2e/funnels.cy.ts b/cypress/e2e/funnels.cy.ts deleted file mode 100644 index b80d71a1f5edc..0000000000000 --- a/cypress/e2e/funnels.cy.ts +++ /dev/null @@ -1,75 +0,0 @@ -const TIMEOUT = 30000 // increase timeout for funnel viz as sometimes github actions can be slow - -describe.skip('Funnels', () => { - beforeEach(() => { - // :TRICKY: Race condition populating the first dropdown in funnel - cy.get('[data-test-filters-loading]').should('not.exist') - cy.get('[data-attr=insight-funnels-tab]').click() - cy.wait(200) - }) - - it('Add only events to funnel', () => { - cy.get('[data-attr=add-action-event-button]').first().click() - - cy.get('[data-attr=save-funnel-button]').click() // `save-funnel-button` is actually calculate, keeping around to avoid losing data - - cy.get('[data-attr=funnel-bar-horizontal]', { timeout: TIMEOUT }).should('exist') - }) - - it('Add 1 action to funnel and navigate to persons', () => { - cy.get('[data-attr=add-action-event-button]').first().click() - cy.get('[data-attr=trend-element-subject-0]').click() - cy.get('[data-attr=taxonomic-tab-actions]').click() - - cy.wait(200) - cy.contains('Hogflix homepage view').click() - - cy.get('[data-attr=save-funnel-button]').click() - - cy.get('[data-attr=funnel-bar-horizontal]', { timeout: TIMEOUT }).should('exist') - - cy.get('[data-attr="funnel-person"] a') - .filter(':contains("@")') - .first() - .then(($match) => { - const email = $match.text() - - cy.wrap($match).click() - - cy.url().should('include', '/person/') - cy.contains(email).should('exist') - }) - }) - - it('Apply date filter to funnel', () => { - cy.get('[data-attr=add-action-event-button]').first().click() - cy.get('[data-attr=trend-element-subject-0]').click() - cy.get('[data-attr=taxonomic-tab-actions]').click() - cy.contains('Hogflix homepage view').click() - cy.get('[data-attr=save-funnel-button]').click() - - cy.get('[data-attr=date-filter]').click() - cy.contains('Last 30 days').click() - - cy.get('[data-attr=date-filter]').click() - cy.contains('Last 30 days').click() - - cy.get('[data-attr=funnel-bar-horizontal]', { timeout: TIMEOUT }).should('exist') - }) - - it('Add 2 actions to funnel', () => { - cy.get('[data-attr=add-action-event-button]').first().click() - cy.get('[data-attr=trend-element-subject-0]').click() - cy.get('[data-attr=taxonomic-tab-actions]').click() - cy.contains('Hogflix homepage view').click() - - cy.get('[data-attr=add-action-event-button]').first().click() - cy.get('[data-attr=trend-element-subject-1]').click() - cy.get('[data-attr=taxonomic-tab-actions]').click() - cy.contains('Hogflix paid').click() - - cy.get('[data-attr=save-funnel-button]').click() - - cy.get('[data-attr=funnel-bar-horizontal]', { timeout: TIMEOUT }).should('exist') - }) -}) diff --git a/cypress/e2e/insights-navigation-open-directly.cy.ts b/cypress/e2e/insights-navigation-open-directly.cy.ts index 5cec241a07529..1a2ab16fe7c95 100644 --- a/cypress/e2e/insights-navigation-open-directly.cy.ts +++ b/cypress/e2e/insights-navigation-open-directly.cy.ts @@ -30,12 +30,6 @@ describe('Insights', () => { cy.get('.funnels-empty-state__title').should('exist') }) - it.skip('can open a new retention insight', () => { - insight.newInsight('RETENTION') - cy.get('.RetentionContainer canvas').should('exist') - cy.get('.RetentionTable__Tab').should('have.length', 66) - }) - it('can open a new paths insight', () => { insight.newInsight('PATHS') cy.get('.Paths g').should('have.length.gte', 5) // not a fixed value unfortunately diff --git a/cypress/e2e/insights-reload-query.ts b/cypress/e2e/insights-reload-query.ts new file mode 100644 index 0000000000000..2f944f8993da7 --- /dev/null +++ b/cypress/e2e/insights-reload-query.ts @@ -0,0 +1,48 @@ +import JSONCrush from 'jsoncrush' + +describe('ReloadInsight component', () => { + beforeEach(() => { + // Clear local storage before each test to ensure a clean state + cy.clearLocalStorage() + }) + + it('saves the query to the URL and localStorage, and reloads it when prompted', () => { + // Visit the new insight creation page + cy.visit('/insights/new') + + cy.wait(2000) + + cy.get('[data-attr="math-selector-0"]').click({ force: true }) + cy.get('[data-attr="math-dau-0"]').click({ force: true }) + + // Check that the 'draft-query' item is stored in localStorage + cy.window().then((window) => { + const currentTeamId = window.POSTHOG_APP_CONTEXT.current_team.id + const draftQuery = window.localStorage.getItem(`draft-query-${currentTeamId}`) + expect(draftQuery).to.not.be.null + + const draftQueryObjUncrushed = JSONCrush.uncrush(draftQuery) + const draftQueryObj = JSON.parse(draftQueryObjUncrushed) + + expect(draftQueryObj).to.have.property('query') + + const firstSeries = draftQueryObj.query.source.series[0] + + expect(firstSeries).to.include({ + event: '$pageview', + math: 'dau', + }) + }) + + // Navigate away to the "Saved Insights" page + cy.visit('/saved_insights') + + // Verify that the ReloadInsight component displays a message about the unsaved insight + cy.contains('You have an unsaved insight from').should('exist') + + // Click the link to reload the unsaved insight + cy.contains('Click here').click() + + cy.get('[data-attr="math-selector-0"]').should('contain', 'Unique users') + }) +}) diff --git a/cypress/e2e/insights-saved.cy.ts b/cypress/e2e/insights-saved.cy.ts index 748c0984543f3..adaf2bf3c59c0 100644 --- a/cypress/e2e/insights-saved.cy.ts +++ b/cypress/e2e/insights-saved.cy.ts @@ -2,33 +2,9 @@ import { urls } from 'scenes/urls' import { createInsight } from '../productAnalytics' -chai.Assertion.addMethod('neverHaveChild', function (childSelector) { - this._obj.on('DOMNodeInserted', () => { - const matchCount = cy.$$(childSelector, this._obj).length - if (matchCount > 0) { - throw new Error( - `Expected element to never have child ${childSelector}, but found ${matchCount} match${ - matchCount > 1 ? 'es' : '' - }` - ) - } - }) -}) - // For tests related to trends please check trendsElements.js // insight tests were split up because Cypress was struggling with this many tests in one filešŸ™ˆ describe('Insights - saved', () => { - // TODO: this test works locally, just not in CI - // also change 'neverHaveChild' check to start right after page loads - it.skip('Data is available immediately', () => { - createInsight('saved insight').then((newInsightId) => { - cy.get('[data-attr=trend-line-graph]').should('exist') // Results cached - cy.visit(urls.insightView(newInsightId)) // Full refresh - cy.get('.InsightViz').should('exist').should('neverHaveChild', '.insight-empty-state') // Only cached data - cy.get('[data-attr=trend-line-graph]').should('exist') - }) - }) - it('If cache empty, initiate async refresh', () => { cy.intercept('GET', /\/api\/environments\/\d+\/insights\/?\?[^/]*?refresh=async/).as('getInsightsRefreshAsync') let newInsightId: string diff --git a/cypress/e2e/notebooks.cy.ts b/cypress/e2e/notebooks.cy.ts index 3022d621ba63d..8ee9adb4781d6 100644 --- a/cypress/e2e/notebooks.cy.ts +++ b/cypress/e2e/notebooks.cy.ts @@ -52,18 +52,6 @@ describe('Notebooks', () => { cy.get('.NotebookRecordingTimestamp.opacity-50').should('not.exist') }) - // FIXME: fix commenting on recordings - it.skip('Can comment on a recording', () => { - cy.visit(urls.replay()) - - cy.get('[data-attr="notebooks-add-button"]').click() - cy.get('[data-attr="notebooks-select-button-create"]').click() - - cy.get('.Notebook.Notebook--editable').should('be.visible') - cy.get('.ph-recording.NotebookNode').should('be.visible') - cy.get('.NotebookRecordingTimestamp').should('contain.text', '0:00') - }) - describe('text types', () => { beforeEach(() => { cy.get('li').contains('Notebooks').should('exist').click() diff --git a/cypress/e2e/onboarding.cy.ts b/cypress/e2e/onboarding.cy.ts index 3ffd5ccc4bc27..cc83169513100 100644 --- a/cypress/e2e/onboarding.cy.ts +++ b/cypress/e2e/onboarding.cy.ts @@ -3,9 +3,7 @@ import { setupFeatureFlags } from '../support/decide' describe('Onboarding', () => { beforeEach(() => { cy.intercept('/api/billing/', { fixture: 'api/billing/billing-unsubscribed.json' }) - setupFeatureFlags({ - 'product-intro-pages': 'test', - }) + setupFeatureFlags({ 'product-intro-pages': 'test' }) }) it('Navigate between /products to /onboarding to a product intro page', () => { @@ -33,145 +31,4 @@ describe('Onboarding', () => { cy.get('[data-attr=start-onboarding]').should('be.visible') cy.get('[data-attr=skip-onboarding]').should('not.exist') }) - - // it('Step through PA onboarding', () => { - // cy.visit('/products') - - // // Get started on product analytics onboarding - // cy.get('[data-attr=product_analytics-onboarding-card]').click() - - // // Installation should be complete - // cy.get('svg.LemonIcon.text-success').should('exist') - // cy.get('svg.LemonIcon.text-success').parent().should('contain', 'Installation complete') - - // // Continue to configuration step - // cy.get('[data-attr=sdk-continue]').click() - - // // Confirm the appropriate breadcrumb is highlighted - // cy.get('[data-attr=onboarding-breadcrumbs] > :nth-child(3) > * span').should('contain', 'Configure') - // cy.get('[data-attr=onboarding-breadcrumbs] > :nth-child(3) > * span').should('not.have.css', 'text-muted') - - // // Continue to plans - // cy.get('[data-attr=onboarding-continue]').click() - - // // Verify pricing table visible - // cy.get('.BillingHero').should('be.visible') - // cy.get('table.PlanComparison').should('be.visible') - - // // Confirm buttons on pricing comparison - // cy.get('[data-attr=upgrade-Paid] .LemonButton__content').should('have.text', 'Upgrade') - // cy.get('[data-attr=upgrade-Free] .LemonButton__content').should('have.text', 'Current plan') - - // // Continue - // cy.get('[data-attr=onboarding-skip-button]').click() - - // // Click back to Install step - // cy.get('[data-attr=onboarding-breadcrumbs] > :first-child > * span').click() - - // // Continue through to finish - // cy.get('[data-attr=sdk-continue]').click() - // cy.get('[data-attr=onboarding-continue]').click() - // cy.get('[data-attr=onboarding-skip-button]').click() - // cy.get('[data-attr=onboarding-continue]').click() - - // // Confirm we're on the insights list page - // cy.url().should('contain', 'project/1/insights') - - // cy.visit('/onboarding/product_analytics?step=product_intro') - - // // Should see both an option to skip onboarding and an option to see the sdk instructions - // cy.get('[data-attr=skip-onboarding]').should('be.visible') - // cy.get('[data-attr=start-onboarding-sdk]').should('be.visible') - - // cy.get('[data-attr=skip-onboarding]').first().click() - // cy.url().should('contain', 'project/1/insights') - - // cy.visit('/onboarding/product_analytics?step=product_intro') - // cy.get('[data-attr=start-onboarding-sdk]').first().click() - // cy.url().should('contain', 'project/1/onboarding/product_analytics?step=install') - - // cy.visit('/products') - // cy.get('[data-attr=return-to-product_analytics] > svg').click() - // cy.url().should('contain', 'project/1/insights') - // }) - - // it('Step through SR onboarding', () => { - // cy.visit('/products') - // cy.get('[data-attr=session_replay-onboarding-card]').click() - - // // Installation should be complete - // cy.get('svg.LemonIcon.text-success').should('exist') - // cy.get('svg.LemonIcon.text-success').parent().should('contain', 'Installation complete') - // // Continue to configuration step - // cy.get('[data-attr=sdk-continue]').click() - // // Continue to plans - // cy.get('[data-attr=onboarding-continue]').click() - // // Verify pricing table visible - // cy.get('.BillingHero').should('be.visible') - // cy.get('table.PlanComparison').should('be.visible') - // // Confirm buttons on pricing comparison - // cy.get('[data-attr=upgrade-Paid] .LemonButton__content').should('have.text', 'Upgrade') - // cy.get('[data-attr=upgrade-Free] .LemonButton__content').should('have.text', 'Current plan') - // // Continue through to finish - // cy.get('[data-attr=onboarding-skip-button]').click() - // cy.get('[data-attr=onboarding-continue]').click() - // // Confirm we're on the recordings list page - // cy.url().should('contain', 'project/1/replay/recent') - // cy.visit('/onboarding/session_replay?step=product_intro') - // cy.get('[data-attr=skip-onboarding]').should('be.visible') - // cy.get('[data-attr=start-onboarding-sdk]').should('not.exist') - // }) - - // it('Step through FF onboarding', () => { - // cy.visit('/onboarding/feature_flags?step=product_intro') - // cy.get('[data-attr=start-onboarding-sdk]').first().click() - // cy.get('[data-attr=sdk-continue]').click() - - // // Confirm the appropriate breadcrumb is highlighted - // cy.get('[data-attr=onboarding-breadcrumbs] > :nth-child(5) > * span').should('contain', 'Plans') - // cy.get('[data-attr=onboarding-breadcrumbs] > :nth-child(3) > * span').should('not.have.css', 'text-muted') - - // cy.get('[data-attr=onboarding-skip-button]').click() - // cy.get('[data-attr=onboarding-continue]').click() - - // cy.url().should('contain', '/feature_flags') - - // cy.visit('/onboarding/feature_flags?step=product_intro') - - // cy.get('[data-attr=skip-onboarding]').should('be.visible') - // cy.get('[data-attr=start-onboarding-sdk]').should('be.visible') - - // cy.get('[data-attr=skip-onboarding]').first().click() - // }) - - // it('Step through Surveys onboarding', () => { - // cy.visit('/onboarding/surveys?step=product_intro') - // cy.get('[data-attr=skip-onboarding]').should('be.visible') - // cy.get('[data-attr=start-onboarding-sdk]').should('not.exist') - // cy.get('[data-attr=skip-onboarding]').first().click() - // cy.url().should('contain', 'survey_templates') - - // cy.visit('/products') - // cy.get('[data-attr=surveys-onboarding-card]').click() - // // Installation should be complete - // cy.get('svg.LemonIcon.text-success').should('exist') - // cy.get('svg.LemonIcon.text-success').parent().should('contain', 'Installation complete') - - // // Continue to configuration step - // cy.get('[data-attr=sdk-continue]').click() - - // // Verify pricing table visible - // cy.get('.BillingHero').should('be.visible') - // cy.get('table.PlanComparison').should('be.visible') - - // // Confirm buttons on pricing comparison - // cy.get('[data-attr=upgrade-Paid] .LemonButton__content').should('have.text', 'Upgrade') - // cy.get('[data-attr=upgrade-Free] .LemonButton__content').should('have.text', 'Current plan') - - // // Continue - // cy.get('[data-attr=onboarding-skip-button]').click() - // cy.get('[data-attr=onboarding-continue]').click() - - // cy.url().should('contain', '/survey_templates') - // }) }) diff --git a/cypress/fixtures/api/notebooks/notebook.json b/cypress/fixtures/api/notebooks/notebook.json index a2e4cb7430eea..d9a9a0b2694d6 100644 --- a/cypress/fixtures/api/notebooks/notebook.json +++ b/cypress/fixtures/api/notebooks/notebook.json @@ -60,5 +60,6 @@ "first_name": "Employee 427", "email": "test@posthog.com", "is_email_verified": null - } + }, + "user_access_level": "editor" } diff --git a/cypress/fixtures/api/notebooks/notebooks.json b/cypress/fixtures/api/notebooks/notebooks.json index 1cfd9a7850315..7c52f5bfc985d 100644 --- a/cypress/fixtures/api/notebooks/notebooks.json +++ b/cypress/fixtures/api/notebooks/notebooks.json @@ -65,7 +65,8 @@ "first_name": "Employee 427", "email": "test@posthog.com", "is_email_verified": null - } + }, + "user_access_level": "editor" } ] } diff --git a/ee/hogai/taxonomy.py b/ee/hogai/taxonomy.py index 5c7feccbd1af5..5b78eb38d5f12 100644 --- a/ee/hogai/taxonomy.py +++ b/ee/hogai/taxonomy.py @@ -132,6 +132,11 @@ class CoreFilterDefinition(TypedDict): "description": "When a user interacts with a feature.", "ignored_in_assistant": True, # Specific to posthog-js/react, niche }, + "$feature_enrollment_update": { + "label": "Feature Enrollment", + "description": "When a user enrolls with a feature.", + "ignored_in_assistant": True, # Specific to EarlyAccessFeatureEnrollment, niche + }, "$capture_metrics": { "label": "Capture Metrics", "description": "Metrics captured with values pertaining to your systems at a specific point in time", diff --git a/frontend/__snapshots__/components-sharing--dashboard-sharing--dark.png b/frontend/__snapshots__/components-sharing--dashboard-sharing--dark.png index e23321530c8ff..9ab3d98f3577d 100644 Binary files a/frontend/__snapshots__/components-sharing--dashboard-sharing--dark.png and b/frontend/__snapshots__/components-sharing--dashboard-sharing--dark.png differ diff --git a/frontend/__snapshots__/components-sharing--dashboard-sharing--light.png b/frontend/__snapshots__/components-sharing--dashboard-sharing--light.png index 87fd2d661366e..e904c3bf6d99a 100644 Binary files a/frontend/__snapshots__/components-sharing--dashboard-sharing--light.png and b/frontend/__snapshots__/components-sharing--dashboard-sharing--light.png differ diff --git a/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--dark.png b/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--dark.png index 697989b30eb5c..8563fab7bb03f 100644 Binary files a/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--dark.png and b/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--dark.png differ diff --git a/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--light.png b/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--light.png index 37aac937be4dc..a50de7a9d7eda 100644 Binary files a/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--light.png and b/frontend/__snapshots__/components-sharing--dashboard-sharing-licensed--light.png differ diff --git a/frontend/__snapshots__/components-sharing--insight-sharing--dark.png b/frontend/__snapshots__/components-sharing--insight-sharing--dark.png index ee46e956f9965..6683dfaec9684 100644 Binary files a/frontend/__snapshots__/components-sharing--insight-sharing--dark.png and b/frontend/__snapshots__/components-sharing--insight-sharing--dark.png differ diff --git a/frontend/__snapshots__/components-sharing--insight-sharing--light.png b/frontend/__snapshots__/components-sharing--insight-sharing--light.png index d352a9d91d9af..4f3142465f5a4 100644 Binary files a/frontend/__snapshots__/components-sharing--insight-sharing--light.png and b/frontend/__snapshots__/components-sharing--insight-sharing--light.png differ diff --git a/frontend/__snapshots__/components-sharing--insight-sharing-licensed--dark.png b/frontend/__snapshots__/components-sharing--insight-sharing-licensed--dark.png index 9fc969c540f49..af3b2f58aceaf 100644 Binary files a/frontend/__snapshots__/components-sharing--insight-sharing-licensed--dark.png and b/frontend/__snapshots__/components-sharing--insight-sharing-licensed--dark.png differ diff --git a/frontend/__snapshots__/components-sharing--insight-sharing-licensed--light.png b/frontend/__snapshots__/components-sharing--insight-sharing-licensed--light.png index 8a8d5000bbba0..92014379a2bda 100644 Binary files a/frontend/__snapshots__/components-sharing--insight-sharing-licensed--light.png and b/frontend/__snapshots__/components-sharing--insight-sharing-licensed--light.png differ diff --git a/frontend/__snapshots__/components-sharing--recording-sharing-licensed--dark.png b/frontend/__snapshots__/components-sharing--recording-sharing-licensed--dark.png index 391bffcae7f8f..8b7df18f2525f 100644 Binary files a/frontend/__snapshots__/components-sharing--recording-sharing-licensed--dark.png and b/frontend/__snapshots__/components-sharing--recording-sharing-licensed--dark.png differ diff --git a/frontend/__snapshots__/components-sharing--recording-sharing-licensed--light.png b/frontend/__snapshots__/components-sharing--recording-sharing-licensed--light.png index 08eb57f620393..74e95371f39a3 100644 Binary files a/frontend/__snapshots__/components-sharing--recording-sharing-licensed--light.png and b/frontend/__snapshots__/components-sharing--recording-sharing-licensed--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index 2392b90f55117..bfa0bb8c5d56d 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index 130aa9a287ffd..d3b511f07c042 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png index 2449e1ce566bd..d89a4dc74d0c7 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png index e757987b40dd5..6568e076f39bb 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png index a6d3889b23a43..24cbcf3db543d 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png index 9487f3b16e926..0ba496622b9f9 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png index daf12837bf697..c7c70e450fbeb 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png index aa8ea4959a5cc..5f0732ee1d4f2 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png index ee27b11bed568..4b405550412a0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 00916265cd38b..02bc921745ecd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index 248d1f34b318a..9f3435ec919df 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png index ebbdd65ee5643..3a86dc325987a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--dark.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--dark.png index a4e1979e55cf6..4748657f64e2d 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--dark.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png index 9533045495757..8f7f4f982a03f 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png index e6876dcaf9403..554d344888b78 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png index c299d9caf9d5d..c36d7a90c5649 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png index 3a632563f3dd4..7897cc41d9912 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png index 9533045495757..8f7f4f982a03f 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png index e6876dcaf9403..554d344888b78 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png index 32d397abf284c..8de0050efb48e 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png index 7ae06bd540053..d1592c6883fe3 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index d2302cf2491a4..37aa45a49e13b 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index ccab613598663..0ba27182db8f5 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png index d16b8c42a3cd8..13b181094b3d0 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png index 44413dfa61cd9..706089d18de26 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png index 819445ff0060d..1d2ef8694de75 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png index 04dd24b8ec91d..5847e08ebc0a0 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png index 6b8fe86fd9941..f0f28d4fc91e8 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png index 5fff2e13f4e21..148ac6cae53ff 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png differ diff --git a/frontend/src/layout/ErrorProjectUnavailable.tsx b/frontend/src/layout/ErrorProjectUnavailable.tsx index 1888661bb42de..c38571870c723 100644 --- a/frontend/src/layout/ErrorProjectUnavailable.tsx +++ b/frontend/src/layout/ErrorProjectUnavailable.tsx @@ -3,6 +3,7 @@ import { useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' import { useEffect, useState } from 'react' import { CreateOrganizationModal } from 'scenes/organization/CreateOrganizationModal' +import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' @@ -11,6 +12,7 @@ import { organizationLogic } from '../scenes/organizationLogic' export function ErrorProjectUnavailable(): JSX.Element { const { projectCreationForbiddenReason } = useValues(organizationLogic) const { user } = useValues(userLogic) + const { currentTeam } = useValues(teamLogic) const [options, setOptions] = useState([]) useEffect(() => { @@ -45,7 +47,8 @@ export function ErrorProjectUnavailable(): JSX.Element { {!user?.organization ? ( - ) : user?.team && !user.organization?.teams.some((team) => team.id === user?.team?.id) ? ( + ) : (user?.team && !user.organization?.teams.some((team) => team.id === user?.team?.id || user.team)) || + currentTeam?.user_access_level === 'none' ? ( <>

Project access has been removed

diff --git a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx index 34c18f4fc6ff2..a99679f92f88d 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx @@ -1,6 +1,6 @@ import './SidePanel.scss' -import { IconEllipsis, IconFeatures, IconGear, IconInfo, IconNotebook, IconSupport } from '@posthog/icons' +import { IconEllipsis, IconFeatures, IconGear, IconInfo, IconLock, IconNotebook, IconSupport } from '@posthog/icons' import { LemonButton, LemonMenu, LemonMenuItems, LemonModal } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' @@ -16,6 +16,7 @@ import { import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { SidePanelTab } from '~/types' +import { SidePanelAccessControl } from './panels/access_control/SidePanelAccessControl' import { SidePanelActivation, SidePanelActivationIcon } from './panels/activation/SidePanelActivation' import { SidePanelActivity, SidePanelActivityIcon } from './panels/activity/SidePanelActivity' import { SidePanelDiscussion, SidePanelDiscussionIcon } from './panels/discussion/SidePanelDiscussion' @@ -87,6 +88,11 @@ export const SIDE_PANEL_TABS: Record< Content: SidePanelStatus, noModalSupport: true, }, + [SidePanelTab.AccessControl]: { + label: 'Access control', + Icon: IconLock, + Content: SidePanelAccessControl, + }, } const DEFAULT_WIDTH = 512 diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/AccessControlObject.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/AccessControlObject.tsx new file mode 100644 index 0000000000000..93e14755e12d5 --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/AccessControlObject.tsx @@ -0,0 +1,383 @@ +import { IconX } from '@posthog/icons' +import { + LemonBanner, + LemonButton, + LemonDialog, + LemonInputSelect, + LemonSelect, + LemonSelectProps, + LemonTable, +} from '@posthog/lemon-ui' +import { BindLogic, useActions, useAsyncActions, useValues } from 'kea' +import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' +import { upgradeModalLogic } from 'lib/components/UpgradeModal/upgradeModalLogic' +import { UserSelectItem } from 'lib/components/UserSelectItem' +import { LemonTableColumns } from 'lib/lemon-ui/LemonTable' +import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' +import { ProfileBubbles, ProfilePicture } from 'lib/lemon-ui/ProfilePicture' +import { capitalizeFirstLetter } from 'lib/utils' +import { useEffect, useState } from 'react' +import { urls } from 'scenes/urls' +import { userLogic } from 'scenes/userLogic' + +import { + AccessControlType, + AccessControlTypeMember, + AccessControlTypeRole, + AvailableFeature, + OrganizationMemberType, +} from '~/types' + +import { accessControlLogic, AccessControlLogicProps } from './accessControlLogic' + +export function AccessControlObject(props: AccessControlLogicProps): JSX.Element | null { + const { canEditAccessControls, humanReadableResource } = useValues(accessControlLogic(props)) + + const suffix = `this ${humanReadableResource}` + + return ( + +

+ {canEditAccessControls === false ? ( + + You don't have permission to edit access controls for {suffix}. +
+ You must be the creator of it, a Project Admin, or an Organization Admin. +
+ ) : null} +

Default access to {suffix}

+ + +

Members

+ + + + +

Roles

+ + + +
+ + ) +} + +function AccessControlObjectDefaults(): JSX.Element | null { + const { accessControlDefault, accessControlDefaultOptions, accessControlsLoading, canEditAccessControls } = + useValues(accessControlLogic) + const { updateAccessControlDefault } = useActions(accessControlLogic) + const { guardAvailableFeature } = useValues(upgradeModalLogic) + + return ( + { + guardAvailableFeature(AvailableFeature.PROJECT_BASED_PERMISSIONING, () => { + updateAccessControlDefault(newValue) + }) + }} + disabledReason={ + accessControlsLoading ? 'Loadingā€¦' : !canEditAccessControls ? 'You cannot edit this' : undefined + } + dropdownMatchSelectWidth={false} + options={accessControlDefaultOptions} + /> + ) +} + +function AccessControlObjectUsers(): JSX.Element | null { + const { user } = useValues(userLogic) + const { membersById, addableMembers, accessControlMembers, accessControlsLoading, availableLevels } = + useValues(accessControlLogic) + const { updateAccessControlMembers } = useAsyncActions(accessControlLogic) + const { guardAvailableFeature } = useValues(upgradeModalLogic) + + if (!user) { + return null + } + + const member = (ac: AccessControlTypeMember): OrganizationMemberType => { + return membersById[ac.organization_member] + } + + // TODO: WHAT A MESS - Fix this to do the index mapping beforehand... + const columns: LemonTableColumns = [ + { + key: 'user_profile_picture', + render: function ProfilePictureRender(_, ac) { + return + }, + width: 32, + }, + { + title: 'Name', + key: 'user_first_name', + render: (_, ac) => ( + + {member(ac)?.user.uuid == user.uuid + ? `${member(ac)?.user.first_name} (you)` + : member(ac)?.user.first_name} + + ), + sorter: (a, b) => member(a)?.user.first_name.localeCompare(member(b)?.user.first_name), + }, + { + title: 'Email', + key: 'user_email', + render: (_, ac) => member(ac)?.user.email, + sorter: (a, b) => member(a)?.user.email.localeCompare(member(b)?.user.email), + }, + { + title: 'Level', + key: 'level', + width: 0, + render: function LevelRender(_, { access_level, organization_member }) { + return ( +
+ + void updateAccessControlMembers([{ member: organization_member, level }]) + } + /> +
+ ) + }, + }, + { + key: 'remove', + width: 0, + render: (_, { organization_member }) => { + return ( + + void updateAccessControlMembers([{ member: organization_member, level: null }]) + } + /> + ) + }, + }, + ] + + return ( +
+ { + if (guardAvailableFeature(AvailableFeature.PROJECT_BASED_PERMISSIONING)) { + await updateAccessControlMembers(newValues.map((member) => ({ member, level }))) + } + }} + options={addableMembers.map((member) => ({ + key: member.id, + label: `${member.user.first_name} ${member.user.email}`, + labelComponent: , + }))} + /> + + +
+ ) +} + +function AccessControlObjectRoles(): JSX.Element | null { + const { accessControlRoles, accessControlsLoading, addableRoles, rolesById, availableLevels } = + useValues(accessControlLogic) + const { updateAccessControlRoles } = useAsyncActions(accessControlLogic) + const { guardAvailableFeature } = useValues(upgradeModalLogic) + + const columns: LemonTableColumns = [ + { + title: 'Role', + key: 'role', + width: 0, + render: (_, { role }) => ( + + + + ), + }, + { + title: 'Members', + key: 'members', + render: (_, { role }) => { + return ( + ({ + email: member.user.email, + name: member.user.first_name, + title: `${member.user.first_name} <${member.user.email}>`, + })) ?? [] + } + /> + ) + }, + }, + { + title: 'Level', + key: 'level', + width: 0, + render: (_, { access_level, role }) => { + return ( +
+ void updateAccessControlRoles([{ role, level }])} + /> +
+ ) + }, + }, + { + key: 'remove', + width: 0, + render: (_, { role }) => { + return ( + void updateAccessControlRoles([{ role, level: null }])} + /> + ) + }, + }, + ] + + return ( +
+ { + if (guardAvailableFeature(AvailableFeature.PROJECT_BASED_PERMISSIONING)) { + await updateAccessControlRoles(newValues.map((role) => ({ role, level }))) + } + }} + options={addableRoles.map((role) => ({ + key: role.id, + label: role.name, + }))} + /> + + +
+ ) +} + +function SimplLevelComponent(props: { + size?: LemonSelectProps['size'] + level: AccessControlType['access_level'] | null + levels: AccessControlType['access_level'][] + onChange: (newValue: AccessControlType['access_level']) => void +}): JSX.Element | null { + const { canEditAccessControls } = useValues(accessControlLogic) + + return ( + props.onChange(newValue)} + disabledReason={!canEditAccessControls ? 'You cannot edit this' : undefined} + options={props.levels.map((level) => ({ + value: level, + label: capitalizeFirstLetter(level ?? ''), + }))} + /> + ) +} + +function RemoveAccessButton({ + onConfirm, + subject, +}: { + onConfirm: () => void + subject: 'member' | 'role' +}): JSX.Element { + const { canEditAccessControls } = useValues(accessControlLogic) + + return ( + } + status="danger" + size="small" + disabledReason={!canEditAccessControls ? 'You cannot edit this' : undefined} + onClick={() => + LemonDialog.open({ + title: 'Remove access', + content: `Are you sure you want to remove this ${subject}'s explicit access?`, + primaryButton: { + children: 'Remove', + status: 'danger', + onClick: () => onConfirm(), + }, + }) + } + /> + ) +} + +function AddItemsControls(props: { + placeholder: string + onAdd: (newValues: string[], level: AccessControlType['access_level']) => Promise + options: { + key: string + label: string + }[] +}): JSX.Element | null { + const { availableLevels, canEditAccessControls } = useValues(accessControlLogic) + // TODO: Move this into a form logic + const [items, setItems] = useState([]) + const [level, setLevel] = useState(availableLevels[0] ?? null) + + useEffect(() => { + setLevel(availableLevels[0] ?? null) + }, [availableLevels]) + + const onSubmit = + items.length && level + ? (): void => + void props.onAdd(items, level).then(() => { + setItems([]) + setLevel(availableLevels[0] ?? null) + }) + : undefined + + return ( +
+
+ setItems(newValues)} + mode="multiple" + options={props.options} + disabled={!canEditAccessControls} + /> +
+ + + + Add + +
+ ) +} diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/RolesAndResourceAccessControls.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/RolesAndResourceAccessControls.tsx new file mode 100644 index 0000000000000..c235eeacb01ea --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/RolesAndResourceAccessControls.tsx @@ -0,0 +1,323 @@ +import { IconPlus } from '@posthog/icons' +import { + LemonButton, + LemonDialog, + LemonInput, + LemonInputSelect, + LemonModal, + LemonSelect, + LemonTable, + LemonTableColumns, + ProfileBubbles, + ProfilePicture, +} from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { capitalizeFirstLetter, Form } from 'kea-forms' +import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' +import { usersLemonSelectOptions } from 'lib/components/UserSelectItem' +import { LemonField } from 'lib/lemon-ui/LemonField' +import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' +import { fullName } from 'lib/utils' +import { useMemo, useState } from 'react' +import { userLogic } from 'scenes/userLogic' + +import { AvailableFeature } from '~/types' + +import { roleBasedAccessControlLogic, RoleWithResourceAccessControls } from './roleBasedAccessControlLogic' + +export type RolesAndResourceAccessControlsProps = { + noAccessControls?: boolean +} + +export function RolesAndResourceAccessControls({ noAccessControls }: RolesAndResourceAccessControlsProps): JSX.Element { + const { + rolesWithResourceAccessControls, + rolesLoading, + roleBasedAccessControlsLoading, + resources, + availableLevels, + selectedRoleId, + defaultAccessLevel, + } = useValues(roleBasedAccessControlLogic) + + const { updateRoleBasedAccessControls, selectRoleId, setEditingRoleId } = useActions(roleBasedAccessControlLogic) + + const roleColumns = noAccessControls + ? [] + : resources.map((resource) => ({ + title: resource.replace(/_/g, ' ') + 's', + key: resource, + width: 0, + render: (_: any, { accessControlByResource, role }: RoleWithResourceAccessControls) => { + const ac = accessControlByResource[resource] + + return ( + + updateRoleBasedAccessControls([ + { + resource, + role: role?.id ?? null, + access_level: newValue, + }, + ]) + } + options={availableLevels.map((level) => ({ + value: level, + label: capitalizeFirstLetter(level ?? ''), + }))} + /> + ) + }, + })) + + const columns: LemonTableColumns = [ + { + title: 'Role', + key: 'role', + width: 0, + render: (_, { role }) => ( + + (role.id === selectedRoleId ? selectRoleId(null) : selectRoleId(role.id)) + : undefined + } + title={role?.name ?? 'Default'} + /> + + ), + }, + { + title: 'Members', + key: 'members', + render: (_, { role }) => { + return role ? ( + role.members.length ? ( + ({ + email: member.user.email, + name: member.user.first_name, + title: `${member.user.first_name} <${member.user.email}>`, + }))} + onClick={() => (role.id === selectedRoleId ? selectRoleId(null) : selectRoleId(role.id))} + /> + ) : ( + 'No members' + ) + ) : ( + 'All members' + ) + }, + }, + + ...roleColumns, + ] + + return ( +
+

Use roles to group your organization members and assign them permissions.

+ + +
+ !!selectedRoleId && role?.id === selectedRoleId, + onRowExpand: ({ role }) => (role ? selectRoleId(role.id) : undefined), + onRowCollapse: () => selectRoleId(null), + expandedRowRender: ({ role }) => (role ? : null), + rowExpandable: ({ role }) => !!role, + }} + /> + + setEditingRoleId('new')} icon={}> + Add a role + + +
+
+
+ ) +} + +function RoleDetails({ roleId }: { roleId: string }): JSX.Element | null { + const { user } = useValues(userLogic) + const { sortedMembers, roles, canEditRoleBasedAccessControls } = useValues(roleBasedAccessControlLogic) + const { addMembersToRole, removeMemberFromRole, setEditingRoleId } = useActions(roleBasedAccessControlLogic) + const [membersToAdd, setMembersToAdd] = useState([]) + + const role = roles?.find((role) => role.id === roleId) + + const onSubmit = membersToAdd.length + ? () => { + role && addMembersToRole(role, membersToAdd) + setMembersToAdd([]) + } + : undefined + + const membersNotInRole = useMemo(() => { + const membersInRole = new Set(role?.members.map((member) => member.user.uuid)) + return sortedMembers?.filter((member) => !membersInRole.has(member.user.uuid)) ?? [] + }, [role?.members, sortedMembers]) + + if (!role) { + // This is mostly for typing + return null + } + + return ( +
+
+
+
+ setMembersToAdd(newValues)} + mode="multiple" + disabled={!canEditRoleBasedAccessControls} + options={usersLemonSelectOptions( + membersNotInRole.map((member) => member.user), + 'uuid' + )} + /> +
+ + + Add members + +
+
+ setEditingRoleId(role.id)} + disabledReason={!canEditRoleBasedAccessControls ? 'You cannot edit this' : undefined} + > + Edit + +
+
+ + + }, + width: 32, + }, + { + title: 'Name', + key: 'user_name', + render: (_, member) => + member.user.uuid == user?.uuid ? `${fullName(member.user)} (you)` : fullName(member.user), + sorter: (a, b) => fullName(a.user).localeCompare(fullName(b.user)), + }, + { + title: 'Email', + key: 'user_email', + render: (_, member) => { + return <>{member.user.email} + }, + sorter: (a, b) => a.user.email.localeCompare(b.user.email), + }, + { + key: 'actions', + width: 0, + render: (_, member) => { + return ( +
+ removeMemberFromRole(role, member.id)} + > + Remove + +
+ ) + }, + }, + ]} + dataSource={role.members} + /> +
+ ) +} + +function RoleModal(): JSX.Element { + const { editingRoleId } = useValues(roleBasedAccessControlLogic) + const { setEditingRoleId, submitEditingRole, deleteRole } = useActions(roleBasedAccessControlLogic) + const isEditing = editingRoleId !== 'new' + + const onDelete = (): void => { + LemonDialog.open({ + title: 'Delete role', + content: 'Are you sure you want to delete this role? This action cannot be undone.', + primaryButton: { + children: 'Delete permanently', + onClick: () => deleteRole(editingRoleId as string), + status: 'danger', + }, + secondaryButton: { + children: 'Cancel', + }, + }) + } + + return ( +
+ setEditingRoleId(null)} + title={!isEditing ? 'Create' : `Edit`} + footer={ + <> +
+ {isEditing ? ( + onDelete()}> + Delete + + ) : null} +
+ + setEditingRoleId(null)}> + Cancel + + + + {!isEditing ? 'Create' : 'Save'} + + + } + > + + + +
+
+ ) +} diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/SidePanelAccessControl.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/SidePanelAccessControl.tsx new file mode 100644 index 0000000000000..266b012ebcd77 --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/SidePanelAccessControl.tsx @@ -0,0 +1,25 @@ +import { useValues } from 'kea' + +import { SidePanelPaneHeader } from '../../components/SidePanelPaneHeader' +import { sidePanelContextLogic } from '../sidePanelContextLogic' +import { AccessControlObject } from './AccessControlObject' + +export const SidePanelAccessControl = (): JSX.Element => { + const { sceneSidePanelContext } = useValues(sidePanelContextLogic) + + return ( +
+ +
+ {sceneSidePanelContext.access_control_resource && sceneSidePanelContext.access_control_resource_id ? ( + + ) : ( +

Not supported

+ )} +
+
+ ) +} diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/accessControlLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/accessControlLogic.ts new file mode 100644 index 0000000000000..8182b41c2b602 --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/accessControlLogic.ts @@ -0,0 +1,250 @@ +import { LemonSelectOption } from '@posthog/lemon-ui' +import { actions, afterMount, connect, kea, key, listeners, path, props, selectors } from 'kea' +import { loaders } from 'kea-loaders' +import api from 'lib/api' +import { upgradeModalLogic } from 'lib/components/UpgradeModal/upgradeModalLogic' +import { toSentenceCase } from 'lib/utils' +import { membersLogic } from 'scenes/organization/membersLogic' +import { teamLogic } from 'scenes/teamLogic' + +import { + AccessControlResponseType, + AccessControlType, + AccessControlTypeMember, + AccessControlTypeProject, + AccessControlTypeRole, + AccessControlUpdateType, + APIScopeObject, + OrganizationMemberType, + RoleType, +} from '~/types' + +import type { accessControlLogicType } from './accessControlLogicType' +import { roleBasedAccessControlLogic } from './roleBasedAccessControlLogic' + +export type AccessControlLogicProps = { + resource: APIScopeObject + resource_id: string +} + +export const accessControlLogic = kea([ + props({} as AccessControlLogicProps), + key((props) => `${props.resource}-${props.resource_id}`), + path((key) => ['scenes', 'accessControl', 'accessControlLogic', key]), + connect({ + values: [ + membersLogic, + ['sortedMembers'], + teamLogic, + ['currentTeam'], + roleBasedAccessControlLogic, + ['roles'], + upgradeModalLogic, + ['guardAvailableFeature'], + ], + actions: [membersLogic, ['ensureAllMembersLoaded']], + }), + actions({ + updateAccessControl: ( + accessControl: Pick + ) => ({ accessControl }), + updateAccessControlDefault: (level: AccessControlType['access_level']) => ({ + level, + }), + updateAccessControlRoles: ( + accessControls: { + role: RoleType['id'] + level: AccessControlType['access_level'] + }[] + ) => ({ accessControls }), + updateAccessControlMembers: ( + accessControls: { + member: OrganizationMemberType['id'] + level: AccessControlType['access_level'] + }[] + ) => ({ accessControls }), + }), + loaders(({ values }) => ({ + accessControls: [ + null as AccessControlResponseType | null, + { + loadAccessControls: async () => { + try { + const response = await api.get(values.endpoint) + return response + } catch (error) { + // Return empty access controls + return { + access_controls: [], + available_access_levels: ['none', 'viewer', 'editor'], + user_access_level: 'none', + default_access_level: 'none', + user_can_edit_access_levels: false, + } + } + }, + + updateAccessControlDefault: async ({ level }) => { + await api.put(values.endpoint, { + access_level: level, + }) + + return values.accessControls + }, + + updateAccessControlRoles: async ({ accessControls }) => { + for (const { role, level } of accessControls) { + await api.put(values.endpoint, { + role: role, + access_level: level, + }) + } + + return values.accessControls + }, + + updateAccessControlMembers: async ({ accessControls }) => { + for (const { member, level } of accessControls) { + await api.put(values.endpoint, { + organization_member: member, + access_level: level, + }) + } + + return values.accessControls + }, + }, + ], + })), + listeners(({ actions }) => ({ + updateAccessControlDefaultSuccess: () => actions.loadAccessControls(), + updateAccessControlRolesSuccess: () => actions.loadAccessControls(), + updateAccessControlMembersSuccess: () => actions.loadAccessControls(), + })), + selectors({ + endpoint: [ + () => [(_, props) => props], + (props): string => { + // TODO: This is far from perfect... but it's a start + if (props.resource === 'project') { + return `api/projects/@current/access_controls` + } + return `api/projects/@current/${props.resource}s/${props.resource_id}/access_controls` + }, + ], + humanReadableResource: [ + () => [(_, props) => props], + (props): string => { + return props.resource.replace(/_/g, ' ') + }, + ], + + availableLevelsWithNone: [ + (s) => [s.accessControls], + (accessControls): string[] => { + return accessControls?.available_access_levels ?? [] + }, + ], + + availableLevels: [ + (s) => [s.availableLevelsWithNone], + (availableLevelsWithNone): string[] => { + return availableLevelsWithNone.filter((level) => level !== 'none') + }, + ], + + canEditAccessControls: [ + (s) => [s.accessControls], + (accessControls): boolean | null => { + return accessControls?.user_can_edit_access_levels ?? null + }, + ], + + accessControlDefaultLevel: [ + (s) => [s.accessControls], + (accessControls): string | null => { + return accessControls?.default_access_level ?? null + }, + ], + + accessControlDefaultOptions: [ + (s) => [s.availableLevelsWithNone, (_, props) => props.resource], + (availableLevelsWithNone): LemonSelectOption[] => { + const options = availableLevelsWithNone.map((level) => ({ + value: level, + // TODO: Correct "a" and "an" + label: level === 'none' ? 'No access' : toSentenceCase(level), + })) + + return options + }, + ], + accessControlDefault: [ + (s) => [s.accessControls, s.accessControlDefaultLevel], + (accessControls, accessControlDefaultLevel): AccessControlTypeProject => { + const found = accessControls?.access_controls?.find( + (accessControl) => !accessControl.organization_member && !accessControl.role + ) as AccessControlTypeProject + return ( + found ?? { + access_level: accessControlDefaultLevel, + } + ) + }, + ], + + accessControlMembers: [ + (s) => [s.accessControls], + (accessControls): AccessControlTypeMember[] => { + return (accessControls?.access_controls || []).filter( + (accessControl) => !!accessControl.organization_member + ) as AccessControlTypeMember[] + }, + ], + + accessControlRoles: [ + (s) => [s.accessControls], + (accessControls): AccessControlTypeRole[] => { + return (accessControls?.access_controls || []).filter( + (accessControl) => !!accessControl.role + ) as AccessControlTypeRole[] + }, + ], + + rolesById: [ + (s) => [s.roles], + (roles): Record => { + return Object.fromEntries((roles || []).map((role) => [role.id, role])) + }, + ], + + addableRoles: [ + (s) => [s.roles, s.accessControlRoles], + (roles, accessControlRoles): RoleType[] => { + return roles ? roles.filter((role) => !accessControlRoles.find((ac) => ac.role === role.id)) : [] + }, + ], + + membersById: [ + (s) => [s.sortedMembers], + (members): Record => { + return Object.fromEntries((members || []).map((member) => [member.id, member])) + }, + ], + + addableMembers: [ + (s) => [s.sortedMembers, s.accessControlMembers], + (members, accessControlMembers): any[] => { + return members + ? members.filter( + (member) => !accessControlMembers.find((ac) => ac.organization_member === member.id) + ) + : [] + }, + ], + }), + afterMount(({ actions }) => { + actions.loadAccessControls() + actions.ensureAllMembersLoaded() + }), +]) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/roleBasedAccessControlLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/roleBasedAccessControlLogic.ts new file mode 100644 index 0000000000000..87d885844bfb1 --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/access_control/roleBasedAccessControlLogic.ts @@ -0,0 +1,269 @@ +import { lemonToast } from '@posthog/lemon-ui' +import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' +import { forms } from 'kea-forms' +import { loaders } from 'kea-loaders' +import { actionToUrl, router } from 'kea-router' +import api from 'lib/api' +import { membersLogic } from 'scenes/organization/membersLogic' +import { teamLogic } from 'scenes/teamLogic' +import { userLogic } from 'scenes/userLogic' + +import { + AccessControlResponseType, + AccessControlType, + AccessControlTypeRole, + AccessControlUpdateType, + APIScopeObject, + AvailableFeature, + RoleType, +} from '~/types' + +import type { roleBasedAccessControlLogicType } from './roleBasedAccessControlLogicType' + +export type RoleWithResourceAccessControls = { + role?: RoleType + accessControlByResource: Record +} + +export const roleBasedAccessControlLogic = kea([ + path(['scenes', 'accessControl', 'roleBasedAccessControlLogic']), + connect({ + values: [membersLogic, ['sortedMembers'], teamLogic, ['currentTeam'], userLogic, ['hasAvailableFeature']], + actions: [membersLogic, ['ensureAllMembersLoaded']], + }), + actions({ + updateRoleBasedAccessControls: ( + accessControls: Pick[] + ) => ({ accessControls }), + selectRoleId: (roleId: RoleType['id'] | null) => ({ roleId }), + deleteRole: (roleId: RoleType['id']) => ({ roleId }), + removeMemberFromRole: (role: RoleType, roleMemberId: string) => ({ role, roleMemberId }), + addMembersToRole: (role: RoleType, members: string[]) => ({ role, members }), + setEditingRoleId: (roleId: string | null) => ({ roleId }), + }), + reducers({ + selectedRoleId: [ + null as string | null, + { + selectRoleId: (_, { roleId }) => roleId, + }, + ], + editingRoleId: [ + null as string | null, + { + setEditingRoleId: (_, { roleId }) => roleId, + }, + ], + }), + loaders(({ values }) => ({ + roleBasedAccessControls: [ + null as AccessControlResponseType | null, + { + loadRoleBasedAccessControls: async () => { + const response = await api.get( + 'api/projects/@current/global_access_controls' + ) + return response + }, + + updateRoleBasedAccessControls: async ({ accessControls }) => { + for (const control of accessControls) { + await api.put('api/projects/@current/global_access_controls', { + ...control, + }) + } + + return values.roleBasedAccessControls + }, + }, + ], + + roles: [ + null as RoleType[] | null, + { + loadRoles: async () => { + const response = await api.roles.list() + return response?.results || [] + }, + addMembersToRole: async ({ role, members }) => { + if (!values.roles) { + return null + } + const newMembers = await Promise.all( + members.map(async (userUuid: string) => await api.roles.members.create(role.id, userUuid)) + ) + + role.members = [...role.members, ...newMembers] + + return [...values.roles] + }, + removeMemberFromRole: async ({ role, roleMemberId }) => { + if (!values.roles) { + return null + } + await api.roles.members.delete(role.id, roleMemberId) + role.members = role.members.filter((roleMember) => roleMember.id !== roleMemberId) + return [...values.roles] + }, + deleteRole: async ({ roleId }) => { + const role = values.roles?.find((r) => r.id === roleId) + if (!role) { + return values.roles + } + await api.roles.delete(role.id) + lemonToast.success(`Role "${role.name}" deleted`) + return values.roles?.filter((r) => r.id !== role.id) || [] + }, + }, + ], + })), + + forms(({ values, actions }) => ({ + editingRole: { + defaults: { + name: '', + }, + errors: ({ name }) => { + return { + name: !name ? 'Please choose a name for the role' : null, + } + }, + submit: async ({ name }) => { + if (!values.editingRoleId) { + return + } + let role: RoleType | null = null + if (values.editingRoleId === 'new') { + role = await api.roles.create(name) + } else { + role = await api.roles.update(values.editingRoleId, { name }) + } + + actions.loadRoles() + actions.setEditingRoleId(null) + actions.selectRoleId(role.id) + }, + }, + })), + + listeners(({ actions, values }) => ({ + updateRoleBasedAccessControlsSuccess: () => actions.loadRoleBasedAccessControls(), + loadRolesSuccess: () => { + if (router.values.hashParams.role) { + actions.selectRoleId(router.values.hashParams.role) + } + }, + deleteRoleSuccess: () => { + actions.loadRoles() + actions.setEditingRoleId(null) + actions.selectRoleId(null) + }, + + setEditingRoleId: () => { + const existingRole = values.roles?.find((role) => role.id === values.editingRoleId) + actions.resetEditingRole({ + name: existingRole?.name || '', + }) + }, + })), + + selectors({ + availableLevels: [ + (s) => [s.roleBasedAccessControls], + (roleBasedAccessControls): string[] => { + return roleBasedAccessControls?.available_access_levels ?? [] + }, + ], + + defaultAccessLevel: [ + (s) => [s.roleBasedAccessControls], + (roleBasedAccessControls): string | null => { + return roleBasedAccessControls?.default_access_level ?? null + }, + ], + + defaultResourceAccessControls: [ + (s) => [s.roleBasedAccessControls], + (roleBasedAccessControls): RoleWithResourceAccessControls => { + const accessControls = roleBasedAccessControls?.access_controls ?? [] + + // Find all acs without a roles (they are the default ones) + const accessControlByResource = accessControls + .filter((control) => !control.role) + .reduce( + (acc, control) => ({ + ...acc, + [control.resource]: control, + }), + {} as Record + ) + + return { accessControlByResource } + }, + ], + + rolesWithResourceAccessControls: [ + (s) => [s.roles, s.roleBasedAccessControls, s.defaultResourceAccessControls], + (roles, roleBasedAccessControls, defaultResourceAccessControls): RoleWithResourceAccessControls[] => { + if (!roles) { + return [] + } + + const accessControls = roleBasedAccessControls?.access_controls ?? [] + + return [ + defaultResourceAccessControls, + ...roles.map((role) => { + const accessControlByResource = accessControls + .filter((control) => control.role === role.id) + .reduce( + (acc, control) => ({ + ...acc, + [control.resource]: control, + }), + {} as Record + ) + + return { role, accessControlByResource } + }), + ] + }, + ], + + resources: [ + () => [], + (): AccessControlType['resource'][] => { + // TODO: Sync this as an enum + return ['feature_flag', 'dashboard', 'insight', 'notebook'] + }, + ], + + canEditRoleBasedAccessControls: [ + (s) => [s.roleBasedAccessControls], + (roleBasedAccessControls): boolean | null => { + return roleBasedAccessControls?.user_can_edit_access_levels ?? null + }, + ], + }), + afterMount(({ actions, values }) => { + if (values.hasAvailableFeature(AvailableFeature.ROLE_BASED_ACCESS)) { + actions.loadRoles() + actions.loadRoleBasedAccessControls() + actions.ensureAllMembersLoaded() + } + }), + + actionToUrl(({ values }) => ({ + selectRoleId: () => { + const { currentLocation } = router.values + return [ + currentLocation.pathname, + currentLocation.searchParams, + { + ...currentLocation.hashParams, + role: values.selectedRoleId ?? undefined, + }, + ] + }, + })), +]) diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx index 244e42c52d936..079433affb717 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx @@ -10,12 +10,21 @@ import { toParams } from 'lib/utils' import posthog from 'posthog-js' import { projectLogic } from 'scenes/projectLogic' +import { ActivityScope, UserBasicType } from '~/types' + import { sidePanelStateLogic } from '../../sidePanelStateLogic' -import { ActivityFilters, activityForSceneLogic } from './activityForSceneLogic' +import { SidePanelSceneContext } from '../../types' +import { sidePanelContextLogic } from '../sidePanelContextLogic' import type { sidePanelActivityLogicType } from './sidePanelActivityLogicType' const POLL_TIMEOUT = 5 * 60 * 1000 +export type ActivityFilters = { + scope?: ActivityScope + item_id?: ActivityLogItem['item_id'] + user?: UserBasicType['id'] +} + export interface ChangelogFlagPayload { notificationDate: dayjs.Dayjs markdown: string @@ -36,7 +45,7 @@ export enum SidePanelActivityTab { export const sidePanelActivityLogic = kea([ path(['scenes', 'navigation', 'sidepanel', 'sidePanelActivityLogic']), connect({ - values: [activityForSceneLogic, ['sceneActivityFilters'], projectLogic, ['currentProjectId']], + values: [sidePanelContextLogic, ['sceneSidePanelContext'], projectLogic, ['currentProjectId']], actions: [sidePanelStateLogic, ['openSidePanel']], }), actions({ @@ -267,8 +276,16 @@ export const sidePanelActivityLogic = kea([ }), subscriptions(({ actions, values }) => ({ - sceneActivityFilters: (activityFilters) => { - actions.setFiltersForCurrentPage(activityFilters ? { ...values.filters, ...activityFilters } : null) + sceneSidePanelContext: (sceneSidePanelContext: SidePanelSceneContext) => { + actions.setFiltersForCurrentPage( + sceneSidePanelContext + ? { + ...values.filters, + scope: sceneSidePanelContext.activity_scope, + item_id: sceneSidePanelContext.activity_item_id, + } + : null + ) }, filters: () => { if (values.activeTab === SidePanelActivityTab.All) { @@ -280,7 +297,7 @@ export const sidePanelActivityLogic = kea([ afterMount(({ actions, values }) => { actions.loadImportantChanges() - const activityFilters = values.sceneActivityFilters + const activityFilters = values.sceneSidePanelContext actions.setFiltersForCurrentPage(activityFilters ? { ...values.filters, ...activityFilters } : null) }), diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/discussion/sidePanelDiscussionLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/discussion/sidePanelDiscussionLogic.ts index 5793deba3469f..9d1ba1d536d9b 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/discussion/sidePanelDiscussionLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/discussion/sidePanelDiscussionLogic.ts @@ -6,7 +6,7 @@ import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { CommentsLogicProps } from 'scenes/comments/commentsLogic' -import { activityForSceneLogic } from '../activity/activityForSceneLogic' +import { sidePanelContextLogic } from '../sidePanelContextLogic' import type { sidePanelDiscussionLogicType } from './sidePanelDiscussionLogicType' export const sidePanelDiscussionLogic = kea([ @@ -16,7 +16,7 @@ export const sidePanelDiscussionLogic = kea([ resetCommentCount: true, }), connect({ - values: [featureFlagLogic, ['featureFlags'], activityForSceneLogic, ['sceneActivityFilters']], + values: [featureFlagLogic, ['featureFlags'], sidePanelContextLogic, ['sceneSidePanelContext']], }), loaders(({ values }) => ({ commentCount: [ @@ -45,12 +45,12 @@ export const sidePanelDiscussionLogic = kea([ selectors({ commentsLogicProps: [ - (s) => [s.sceneActivityFilters], - (activityFilters): CommentsLogicProps | null => { - return activityFilters?.scope + (s) => [s.sceneSidePanelContext], + (sceneSidePanelContext): CommentsLogicProps | null => { + return sceneSidePanelContext.activity_scope ? { - scope: activityFilters.scope, - item_id: activityFilters.item_id, + scope: sceneSidePanelContext.activity_scope, + item_id: sceneSidePanelContext.activity_item_id, } : null }, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/exports/sidePanelExportsLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/exports/sidePanelExportsLogic.ts index c9107c4ac695f..8f26e5927842e 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/exports/sidePanelExportsLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/exports/sidePanelExportsLogic.ts @@ -1,23 +1,14 @@ import { afterMount, connect, kea, path } from 'kea' import { exportsLogic } from 'lib/components/ExportButton/exportsLogic' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' -import { activityForSceneLogic } from '../activity/activityForSceneLogic' import type { sidePanelExportsLogicType } from './sidePanelExportsLogicType' export const sidePanelExportsLogic = kea([ path(['scenes', 'navigation', 'sidepanel', 'sidePanelExportsLogic']), connect({ - values: [ - featureFlagLogic, - ['featureFlags'], - activityForSceneLogic, - ['sceneActivityFilters'], - exportsLogic, - ['exports', 'freshUndownloadedExports'], - ], + values: [exportsLogic, ['exports', 'freshUndownloadedExports']], actions: [sidePanelStateLogic, ['openSidePanel'], exportsLogic, ['loadExports', 'removeFresh']], }), afterMount(({ actions }) => { diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelContextLogic.ts similarity index 59% rename from frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts rename to frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelContextLogic.ts index 641c0900638ef..1de9b8e00e251 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelContextLogic.ts @@ -1,22 +1,15 @@ import { connect, kea, path, selectors } from 'kea' import { router } from 'kea-router' import { objectsEqual } from 'kea-test-utils' -import { ActivityLogItem } from 'lib/components/ActivityLog/humanizeActivity' import { removeProjectIdIfPresent } from 'lib/utils/router-utils' import { sceneLogic } from 'scenes/sceneLogic' import { SceneConfig } from 'scenes/sceneTypes' -import { ActivityScope, UserBasicType } from '~/types' +import { SidePanelSceneContext } from '../types' +import { SIDE_PANEL_CONTEXT_KEY } from '../types' +import type { sidePanelContextLogicType } from './sidePanelContextLogicType' -import type { activityForSceneLogicType } from './activityForSceneLogicType' - -export type ActivityFilters = { - scope?: ActivityScope - item_id?: ActivityLogItem['item_id'] - user?: UserBasicType['id'] -} - -export const activityFiltersForScene = (sceneConfig: SceneConfig | null): ActivityFilters | null => { +export const activityFiltersForScene = (sceneConfig: SceneConfig | null): SidePanelSceneContext | null => { if (sceneConfig?.activityScope) { // NOTE: - HACKY, we are just parsing the item_id from the url optimistically... const pathParts = removeProjectIdIfPresent(router.values.currentLocation.pathname).split('/') @@ -24,38 +17,43 @@ export const activityFiltersForScene = (sceneConfig: SceneConfig | null): Activi // Loose check for the item_id being a number, a short_id (8 chars) or a uuid if (item_id && (item_id.length === 8 || item_id.length === 36 || !isNaN(parseInt(item_id)))) { - return { scope: sceneConfig.activityScope, item_id } + return { activity_scope: sceneConfig.activityScope, activity_item_id: item_id } } - return { scope: sceneConfig.activityScope } + return { activity_scope: sceneConfig.activityScope } } return null } -export const activityForSceneLogic = kea([ - path(['scenes', 'navigation', 'sidepanel', 'activityForSceneLogic']), +export const sidePanelContextLogic = kea([ + path(['scenes', 'navigation', 'sidepanel', 'sidePanelContextLogic']), connect({ values: [sceneLogic, ['sceneConfig']], }), selectors({ - sceneActivityFilters: [ + sceneSidePanelContext: [ (s) => [ + s.sceneConfig, // Similar to "breadcrumbs" (state, props) => { const activeSceneLogic = sceneLogic.selectors.activeSceneLogic(state, props) - const sceneConfig = s.sceneConfig(state, props) - if (activeSceneLogic && 'activityFilters' in activeSceneLogic.selectors) { + if (activeSceneLogic && SIDE_PANEL_CONTEXT_KEY in activeSceneLogic.selectors) { const activeLoadedScene = sceneLogic.selectors.activeLoadedScene(state, props) - return activeSceneLogic.selectors.activityFilters( + return activeSceneLogic.selectors[SIDE_PANEL_CONTEXT_KEY]( state, activeLoadedScene?.paramsToProps?.(activeLoadedScene?.sceneParams) || props ) } - return activityFiltersForScene(sceneConfig) + return null }, ], - (filters): ActivityFilters | null => filters, + (sceneConfig, context): SidePanelSceneContext => { + return { + ...(context ?? {}), + ...(!context?.activity_scope ? activityFiltersForScene(sceneConfig) : {}), + } + }, { equalityCheck: objectsEqual }, ], }), diff --git a/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx index 029b34b6cbf4a..b220fd505c4a8 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/sidePanelLogic.tsx @@ -8,6 +8,7 @@ import { activationLogic } from '~/layout/navigation-3000/sidepanel/panels/activ import { AvailableFeature, SidePanelTab } from '~/types' import { sidePanelActivityLogic } from './panels/activity/sidePanelActivityLogic' +import { sidePanelContextLogic } from './panels/sidePanelContextLogic' import { sidePanelStatusLogic } from './panels/sidePanelStatusLogic' import type { sidePanelLogicType } from './sidePanelLogicType' import { sidePanelStateLogic } from './sidePanelStateLogic' @@ -39,14 +40,16 @@ export const sidePanelLogic = kea([ ['status'], userLogic, ['hasAvailableFeature'], + sidePanelContextLogic, + ['sceneSidePanelContext'], ], actions: [sidePanelStateLogic, ['closeSidePanel', 'openSidePanel']], }), selectors({ enabledTabs: [ - (s) => [s.isCloudOrDev, s.isReady, s.hasCompletedAllTasks, s.featureFlags], - (isCloudOrDev, isReady, hasCompletedAllTasks, featureflags) => { + (s) => [s.isCloudOrDev, s.isReady, s.hasCompletedAllTasks, s.featureFlags, s.sceneSidePanelContext], + (isCloudOrDev, isReady, hasCompletedAllTasks, featureflags, sceneSidePanelContext) => { const tabs: SidePanelTab[] = [] tabs.push(SidePanelTab.Notebooks) @@ -61,6 +64,13 @@ export const sidePanelLogic = kea([ if (isReady && !hasCompletedAllTasks) { tabs.push(SidePanelTab.Activation) } + if ( + featureflags[FEATURE_FLAGS.ROLE_BASED_ACCESS_CONTROL] && + sceneSidePanelContext.access_control_resource && + sceneSidePanelContext.access_control_resource_id + ) { + tabs.push(SidePanelTab.AccessControl) + } tabs.push(SidePanelTab.Exports) tabs.push(SidePanelTab.FeaturePreviews) tabs.push(SidePanelTab.Settings) diff --git a/frontend/src/layout/navigation-3000/sidepanel/types.ts b/frontend/src/layout/navigation-3000/sidepanel/types.ts new file mode 100644 index 0000000000000..28da07acb1c89 --- /dev/null +++ b/frontend/src/layout/navigation-3000/sidepanel/types.ts @@ -0,0 +1,12 @@ +import { ActivityLogItem } from 'lib/components/ActivityLog/humanizeActivity' + +import { ActivityScope, APIScopeObject } from '~/types' + +/** Allows scenes to set a context which enables richer features of the side panel */ +export type SidePanelSceneContext = { + access_control_resource?: APIScopeObject + access_control_resource_id?: string + activity_scope?: ActivityScope + activity_item_id?: ActivityLogItem['item_id'] +} +export const SIDE_PANEL_CONTEXT_KEY = 'sidePanelContext' diff --git a/frontend/src/lib/components/Metalytics/metalyticsLogic.ts b/frontend/src/lib/components/Metalytics/metalyticsLogic.ts index 8ddc838701121..06d0f384d81b5 100644 --- a/frontend/src/lib/components/Metalytics/metalyticsLogic.ts +++ b/frontend/src/lib/components/Metalytics/metalyticsLogic.ts @@ -4,7 +4,8 @@ import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { membersLogic } from 'scenes/organization/membersLogic' -import { activityForSceneLogic } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { sidePanelContextLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelContextLogic' +import { SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { HogQLQuery, NodeKind } from '~/queries/schema' import { hogql } from '~/queries/utils' @@ -13,7 +14,7 @@ import type { metalyticsLogicType } from './metalyticsLogicType' export const metalyticsLogic = kea([ path(['lib', 'components', 'metalytics', 'metalyticsLogic']), connect({ - values: [activityForSceneLogic, ['sceneActivityFilters'], membersLogic, ['members']], + values: [sidePanelContextLogic, ['sceneSidePanelContext'], membersLogic, ['members']], }), loaders(({ values }) => ({ @@ -62,11 +63,16 @@ export const metalyticsLogic = kea([ selectors({ instanceId: [ - (s) => [s.sceneActivityFilters], - (sceneActivityFilters) => - sceneActivityFilters?.item_id ? `${sceneActivityFilters.scope}:${sceneActivityFilters.item_id}` : null, + (s) => [s.sceneSidePanelContext], + (sidePanelContext: SidePanelSceneContext) => + sidePanelContext?.activity_item_id + ? `${sidePanelContext.activity_scope}:${sidePanelContext.activity_item_id}` + : null, + ], + scope: [ + (s) => [s.sceneSidePanelContext], + (sidePanelContext: SidePanelSceneContext) => sidePanelContext?.activity_scope, ], - scope: [(s) => [s.sceneActivityFilters], (sceneActivityFilters) => sceneActivityFilters?.scope], recentUserMembers: [ (s) => [s.recentUsers, s.members], diff --git a/frontend/src/lib/components/RestrictedArea.tsx b/frontend/src/lib/components/RestrictedArea.tsx index ade847740c42a..852d1606bb0d0 100644 --- a/frontend/src/lib/components/RestrictedArea.tsx +++ b/frontend/src/lib/components/RestrictedArea.tsx @@ -27,7 +27,10 @@ export interface RestrictedAreaProps extends UseRestrictedAreaProps { Component: (props: RestrictedComponentProps) => JSX.Element } -export function useRestrictedArea({ scope, minimumAccessLevel }: UseRestrictedAreaProps): null | string { +export function useRestrictedArea({ + scope = RestrictionScope.Organization, + minimumAccessLevel, +}: UseRestrictedAreaProps): null | string { const { currentOrganization } = useValues(organizationLogic) const { currentTeam } = useValues(teamLogic) diff --git a/frontend/src/lib/components/Sharing/SharingModal.tsx b/frontend/src/lib/components/Sharing/SharingModal.tsx index 7348b1bc56610..7ef76f6fc54d6 100644 --- a/frontend/src/lib/components/Sharing/SharingModal.tsx +++ b/frontend/src/lib/components/Sharing/SharingModal.tsx @@ -92,6 +92,7 @@ export function SharingModalContent({

Something went wrong...

) : ( <> +

Sharing

([ reportRecordingsListPropertiesFetched: (loadTime: number) => ({ loadTime }), reportRecordingsListFilterAdded: (filterType: SessionRecordingFilterType) => ({ filterType }), reportRecordingPlayerSeekbarEventHovered: true, - reportRecordingPlayerSpeedChanged: (newSpeed: number) => ({ newSpeed }), - reportRecordingPlayerSkipInactivityToggled: (skipInactivity: boolean) => ({ skipInactivity }), reportRecordingInspectorItemExpanded: (tab: InspectorListItemType, index: number) => ({ tab, index }), reportRecordingInspectorMiniFilterViewed: (minifilterKey: MiniFilterKey, enabled: boolean) => ({ minifilterKey, @@ -948,12 +946,6 @@ export const eventUsageLogic = kea([ reportRecordingPlayerSeekbarEventHovered: () => { posthog.capture('recording player seekbar event hovered') }, - reportRecordingPlayerSpeedChanged: ({ newSpeed }) => { - posthog.capture('recording player speed changed', { new_speed: newSpeed }) - }, - reportRecordingPlayerSkipInactivityToggled: ({ skipInactivity }) => { - posthog.capture('recording player skip inactivity toggled', { skip_inactivity: skipInactivity }) - }, reportRecordingInspectorItemExpanded: ({ tab, index }) => { posthog.capture('recording inspector item expanded', { tab: 'replay-4000', type: tab, index }) }, diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index 4480fe9977755..e2b0f57d11623 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -178,6 +178,9 @@ export const convertTableValue = ( } const toFriendlyClickhouseTypeName = (type: string): ColumnScalar => { + if (type.indexOf('Tuple') !== -1) { + return 'TUPLE' + } if (type.indexOf('Int') !== -1) { return 'INTEGER' } @@ -203,8 +206,8 @@ const toFriendlyClickhouseTypeName = (type: string): ColumnScalar => { return type as ColumnScalar } -const isNumericalType = (type: string): boolean => { - if (type.indexOf('Int') !== -1 || type.indexOf('Float') !== -1 || type.indexOf('Decimal') !== -1) { +const isNumericalType = (type: ColumnScalar): boolean => { + if (type === 'INTEGER' || type === 'FLOAT' || type === 'DECIMAL') { return true } @@ -547,11 +550,13 @@ export const dataVisualizationLogic = kea([ return columns.map((column, index) => { const type = types[index]?.[1] + const friendlyClickhouseTypeName = toFriendlyClickhouseTypeName(type) + return { name: column, type: { - name: toFriendlyClickhouseTypeName(type), - isNumerical: isNumericalType(type), + name: friendlyClickhouseTypeName, + isNumerical: isNumericalType(friendlyClickhouseTypeName), }, label: `${column} - ${type}`, dataIndex: index, diff --git a/frontend/src/queries/nodes/DataVisualization/types.ts b/frontend/src/queries/nodes/DataVisualization/types.ts index ad9f186f67001..b39a78b5658d2 100644 --- a/frontend/src/queries/nodes/DataVisualization/types.ts +++ b/frontend/src/queries/nodes/DataVisualization/types.ts @@ -1,4 +1,4 @@ -export type ColumnScalar = 'INTEGER' | 'FLOAT' | 'DATETIME' | 'DATE' | 'BOOLEAN' | 'DECIMAL' | 'STRING' +export type ColumnScalar = 'INTEGER' | 'FLOAT' | 'DATETIME' | 'DATE' | 'BOOLEAN' | 'DECIMAL' | 'STRING' | 'TUPLE' export interface FormattingTemplate { id: string diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index b81e4669af38a..1d2a4d94012aa 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -5630,6 +5630,12 @@ "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" }, + "properties": { + "items": { + "type": "string" + }, + "type": "array" + }, "response": { "$ref": "#/definitions/EventTaxonomyQueryResponse" } diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 7375910003a3f..5360ae06d99f4 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -2420,6 +2420,7 @@ export type EventTaxonomyResponse = EventTaxonomyItem[] export interface EventTaxonomyQuery extends DataNode { kind: NodeKind.EventTaxonomyQuery event: string + properties?: string[] } export type EventTaxonomyQueryResponse = AnalyticsQueryResponseBase diff --git a/frontend/src/scenes/ResourcePermissionModal.tsx b/frontend/src/scenes/FeatureFlagPermissions.tsx similarity index 67% rename from frontend/src/scenes/ResourcePermissionModal.tsx rename to frontend/src/scenes/FeatureFlagPermissions.tsx index b7361519f398d..24d4ebbe458d8 100644 --- a/frontend/src/scenes/ResourcePermissionModal.tsx +++ b/frontend/src/scenes/FeatureFlagPermissions.tsx @@ -1,12 +1,16 @@ -import { IconGear, IconTrash } from '@posthog/icons' -import { LemonButton, LemonModal, LemonTable } from '@posthog/lemon-ui' -import { useValues } from 'kea' +import { IconGear, IconOpenSidebar, IconTrash } from '@posthog/icons' +import { LemonBanner, LemonButton, LemonTable } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { TitleWithIcon } from 'lib/components/TitleWithIcon' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { LemonInputSelect, LemonInputSelectOption } from 'lib/lemon-ui/LemonInputSelect/LemonInputSelect' import { LemonTableColumns } from 'lib/lemon-ui/LemonTable' -import { AccessLevel, Resource, RoleType } from '~/types' +import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' +import { AccessLevel, AvailableFeature, FeatureFlagType, Resource, RoleType, SidePanelTab } from '~/types' +import { featureFlagPermissionsLogic } from './feature-flags/featureFlagPermissionsLogic' import { permissionsLogic } from './settings/organization/Permissions/permissionsLogic' import { rolesLogic } from './settings/organization/Permissions/Roles/rolesLogic' import { urls } from './urls' @@ -23,13 +27,7 @@ interface ResourcePermissionProps { canEdit: boolean } -interface ResourcePermissionModalProps extends ResourcePermissionProps { - title: string - visible: boolean - onClose: () => void -} - -export function roleLemonSelectOptions(roles: RoleType[]): LemonInputSelectOption[] { +function roleLemonSelectOptions(roles: RoleType[]): LemonInputSelectOption[] { return roles.map((role) => ({ key: role.id, label: `${role.name}`, @@ -41,35 +39,52 @@ export function roleLemonSelectOptions(roles: RoleType[]): LemonInputSelectOptio })) } -export function ResourcePermissionModal({ - title, - visible, - onClose, - rolesToAdd, - addableRoles, - onChange, - addableRolesLoading, - onAdd, - roles, - deleteAssociatedRole, - canEdit, -}: ResourcePermissionModalProps): JSX.Element { +export function FeatureFlagPermissions({ featureFlag }: { featureFlag: FeatureFlagType }): JSX.Element { + const { addableRoles, unfilteredAddableRolesLoading, rolesToAdd, derivedRoles } = useValues( + featureFlagPermissionsLogic({ flagId: featureFlag.id }) + ) + const { setRolesToAdd, addAssociatedRoles, deleteAssociatedRole } = useActions( + featureFlagPermissionsLogic({ flagId: featureFlag.id }) + ) + const { openSidePanel } = useActions(sidePanelStateLogic) + + const newAccessControls = useFeatureFlag('ROLE_BASED_ACCESS_CONTROL') + if (newAccessControls) { + if (!featureFlag.id) { + return

Please save the feature flag before changing the access controls.

+ } + return ( +
+ + Permissions have moved! We're rolling out our new access control system. Click below to open it. + + } + onClick={() => { + openSidePanel(SidePanelTab.AccessControl) + }} + > + Open access control + +
+ ) + } + return ( - <> - - - - + + setRolesToAdd(roleIds)} + rolesToAdd={rolesToAdd} + addableRoles={addableRoles} + addableRolesLoading={unfilteredAddableRolesLoading} + onAdd={() => addAssociatedRoles()} + roles={derivedRoles} + deleteAssociatedRole={(id) => deleteAssociatedRole({ roleId: id })} + canEdit={featureFlag.can_edit} + /> + ) } @@ -108,7 +123,7 @@ export function ResourcePermission({ icon={ } - to={`${urls.settings('organization-rbac')}`} + to={`${urls.settings('organization-roles')}`} targetBlank size="small" noPadding diff --git a/frontend/src/scenes/actions/actionLogic.ts b/frontend/src/scenes/actions/actionLogic.ts index a3101cb8d9daf..f650b616ba944 100644 --- a/frontend/src/scenes/actions/actionLogic.ts +++ b/frontend/src/scenes/actions/actionLogic.ts @@ -5,7 +5,7 @@ import { DataManagementTab } from 'scenes/data-management/DataManagementScene' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { ActionType, ActivityScope, Breadcrumb, HogFunctionType } from '~/types' import { actionEditLogic } from './actionEditLogic' @@ -106,13 +106,15 @@ export const actionLogic = kea([ (action) => action?.steps?.some((step) => step.properties?.find((p) => p.type === 'cohort')) ?? false, ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ (s) => [s.action], - (action): ActivityFilters | null => { + (action): SidePanelSceneContext | null => { return action?.id ? { - scope: ActivityScope.ACTION, - item_id: String(action.id), + activity_scope: ActivityScope.ACTION, + activity_item_id: `${action.id}`, + // access_control_resource: 'action', + // access_control_resource_id: `${action.id}`, } : null }, diff --git a/frontend/src/scenes/dashboard/DashboardCollaborators.tsx b/frontend/src/scenes/dashboard/DashboardCollaborators.tsx index 048d668bc71fd..75b83719330d4 100644 --- a/frontend/src/scenes/dashboard/DashboardCollaborators.tsx +++ b/frontend/src/scenes/dashboard/DashboardCollaborators.tsx @@ -1,8 +1,10 @@ -import { IconLock, IconTrash, IconUnlock } from '@posthog/icons' +import { IconLock, IconOpenSidebar, IconTrash, IconUnlock } from '@posthog/icons' import { useActions, useValues } from 'kea' +import { router } from 'kea-router' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { usersLemonSelectOptions } from 'lib/components/UserSelectItem' import { DashboardPrivilegeLevel, DashboardRestrictionLevel, privilegeLevelToName } from 'lib/constants' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonInputSelect } from 'lib/lemon-ui/LemonInputSelect/LemonInputSelect' @@ -10,8 +12,10 @@ import { LemonSelect, LemonSelectOptions } from 'lib/lemon-ui/LemonSelect' import { ProfilePicture } from 'lib/lemon-ui/ProfilePicture' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' +import { urls } from 'scenes/urls' -import { AvailableFeature, DashboardType, FusedDashboardCollaboratorType, UserType } from '~/types' +import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' +import { AvailableFeature, DashboardType, FusedDashboardCollaboratorType, SidePanelTab, UserType } from '~/types' import { dashboardCollaboratorsLogic } from './dashboardCollaboratorsLogic' @@ -36,73 +40,96 @@ export function DashboardCollaboration({ dashboardId }: { dashboardId: Dashboard const { deleteExplicitCollaborator, setExplicitCollaboratorsToBeAdded, addExplicitCollaborators } = useActions( dashboardCollaboratorsLogic({ dashboardId }) ) + const { push } = useActions(router) + const { openSidePanel } = useActions(sidePanelStateLogic) + + const newAccessControl = useFeatureFlag('ROLE_BASED_ACCESS_CONTROL') + + if (!dashboard) { + return null + } + + if (newAccessControl) { + return ( +
+

Access control

+ + Permissions have moved! We're rolling out our new access control system. Click below to open it. + + } + onClick={() => { + openSidePanel(SidePanelTab.AccessControl) + push(urls.dashboard(dashboard.id)) + }} + > + Open access control + +
+ ) + } return ( - dashboard && ( - <> - - {(!canEditDashboard || !canRestrictDashboard) && ( - - {canEditDashboard - ? "You aren't allowed to change the restriction level ā€“ only the dashboard owner and project admins can." - : "You aren't allowed to change sharing settings ā€“ only dashboard collaborators with edit settings can."} - - )} - - triggerDashboardUpdate({ - restriction_level: newValue, - }) - } - options={DASHBOARD_RESTRICTION_OPTIONS} - loading={dashboardLoading} - fullWidth - disabled={!canRestrictDashboard} - /> - {dashboard.restriction_level > DashboardRestrictionLevel.EveryoneInProjectCanEdit && ( -
-
Collaborators
- {canEditDashboard && ( -
-
- - setExplicitCollaboratorsToBeAdded(newValues) - } - mode="multiple" - data-attr="subscribed-emails" - options={usersLemonSelectOptions(addableMembers, 'uuid')} - /> -
- addExplicitCollaborators()} - > - Add - -
- )} -
Project members with access
-
- {allCollaborators.map((collaborator) => ( - - ))} + + {(!canEditDashboard || !canRestrictDashboard) && ( + + {canEditDashboard + ? "You aren't allowed to change the restriction level ā€“ only the dashboard owner and project admins can." + : "You aren't allowed to change sharing settings ā€“ only dashboard collaborators with edit settings can."} + + )} + + triggerDashboardUpdate({ + restriction_level: newValue, + }) + } + options={DASHBOARD_RESTRICTION_OPTIONS} + loading={dashboardLoading} + fullWidth + disabled={!canRestrictDashboard} + /> + {dashboard.restriction_level > DashboardRestrictionLevel.EveryoneInProjectCanEdit && ( +
+
Collaborators
+ {canEditDashboard && ( +
+
+ setExplicitCollaboratorsToBeAdded(newValues)} + mode="multiple" + data-attr="subscribed-emails" + options={usersLemonSelectOptions(addableMembers, 'uuid')} + />
+ addExplicitCollaborators()} + > + Add +
)} - - - ) +
Project members with access
+
+ {allCollaborators.map((collaborator) => ( + + ))} +
+
+ )} +
) } diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 4addf1f04f4c0..0b6236931cf2b 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -30,6 +30,7 @@ import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { dashboardsModel } from '~/models/dashboardsModel' import { insightsModel } from '~/models/insightsModel' import { variableDataLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variableDataLogic' @@ -38,6 +39,7 @@ import { getQueryBasedDashboard, getQueryBasedInsightModel } from '~/queries/nod import { pollForResults } from '~/queries/query' import { DashboardFilter, DataVisualizationNode, HogQLVariable, NodeKind, RefreshType } from '~/queries/schema' import { + ActivityScope, AnyPropertyFilter, Breadcrumb, DashboardLayoutSize, @@ -991,6 +993,21 @@ export const dashboardLogic = kea([ }, ], ], + + [SIDE_PANEL_CONTEXT_KEY]: [ + (s) => [s.dashboard], + (dashboard): SidePanelSceneContext | null => { + return dashboard + ? { + activity_scope: ActivityScope.DASHBOARD, + activity_item_id: `${dashboard.id}`, + access_control_resource: 'dashboard', + access_control_resource_id: `${dashboard.id}`, + } + : null + }, + ], + sortTilesByLayout: [ (s) => [s.layoutForItem], (layoutForItem) => (tiles: Array) => { diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx index ae61570189150..a8e8107380ab5 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx @@ -29,7 +29,7 @@ export const dataWarehouseViewsLogic = kea([ const savedQueries = await api.dataWarehouseSavedQueries.list() if (router.values.location.pathname.includes(urls.dataModel()) && !cache.pollingInterval) { - cache.pollingInterval = setInterval(actions.loadDataWarehouseSavedQueries, 5000) + cache.pollingInterval = setInterval(() => actions.loadDataWarehouseSavedQueries(), 5000) } else { clearInterval(cache.pollingInterval) } diff --git a/frontend/src/scenes/experiments/Experiment.scss b/frontend/src/scenes/experiments/Experiment.scss deleted file mode 100644 index df10e7141aafe..0000000000000 --- a/frontend/src/scenes/experiments/Experiment.scss +++ /dev/null @@ -1,161 +0,0 @@ -.experiment-form { - .metrics-selection { - width: 100%; - padding-top: 1rem; - border-top: 1px solid var(--border); - } - - .person-selection { - align-items: center; - justify-content: space-between; - width: 100%; - padding-top: 1rem; - border-top: 1px solid var(--border); - } - - .experiment-preview { - margin-bottom: 1rem; - border-bottom: 1px solid var(--border); - } - - .variants { - padding-bottom: 1rem; - margin-top: 0.5rem; - - .border-top { - border-top-left-radius: 4px; - border-top-right-radius: 4px; - } - - .border-bottom { - border-bottom-right-radius: 4px; - border-bottom-left-radius: 4px; - } - - .feature-flag-variant { - display: flex; - align-items: center; - padding: 0.5rem; - background: var(--bg-light); - border-color: var(--border); - border-width: 1px; - border-top-style: solid; - border-right-style: solid; - border-left-style: solid; - - .extend-variant-fully { - flex: 1; - } - } - - .variant-label { - display: flex; - flex-direction: row; - align-items: center; - justify-content: center; - min-width: 52px; - padding: 2px 6px; - margin-right: 8px; - font-size: 12px; - font-weight: 500; - color: #fff; - letter-spacing: 0.01em; - border-radius: var(--radius); - } - } - - .secondary-metrics { - width: 100%; - padding-top: 1rem; - margin-top: 1rem; - margin-bottom: 1rem; - border-top: 1px solid var(--border); - } -} - -.view-experiment { - .draft-header { - margin-bottom: 1rem; - border-bottom: 1px solid var(--border); - } - - .exp-description { - font-style: italic; - } - - .participants { - background-color: white; - } - - .variants-list { - li { - display: inline; - } - - li::after { - content: ', '; - } - - li:last-child::after { - content: ''; - } - } - - .experiment-result { - padding-top: 1rem; - } - - .secondary-progress { - margin-top: 0.5rem; - - li::before { - display: inline-block; - margin-right: 4px; - font-weight: 900; - content: '\2022'; - } - } - - .no-experiment-results { - display: flex; - align-items: center; - justify-content: center; - width: 100%; - min-height: 320px; - margin-top: 1rem; - font-size: 24px; - background-color: var(--bg-3000); - border: 1px solid var(--border); - } - - .computation-time-and-sampling-notice { - margin-top: 8px; - } -} - -.experiment-preview-row { - padding-bottom: 1rem; - margin-bottom: 1rem; - border-bottom: 1px solid var(--border); - - &:last-child { - padding-bottom: 0; - margin-bottom: 0; - border-bottom: none; - } -} - -.metric-name { - flex: 1; - padding: 8px 8px 8px 16px; - margin-left: 0.5rem; - border: 1px solid var(--border); - border-radius: var(--radius); -} - -.exp-flag-copy-label { - font-size: 11px; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 0.5px; -} diff --git a/frontend/src/scenes/experiments/Experiment.tsx b/frontend/src/scenes/experiments/Experiment.tsx index 6127fa87795fc..cca319e6f486e 100644 --- a/frontend/src/scenes/experiments/Experiment.tsx +++ b/frontend/src/scenes/experiments/Experiment.tsx @@ -1,5 +1,3 @@ -import './Experiment.scss' - import { useValues } from 'kea' import { NotFound } from 'lib/components/NotFound' import { SceneExport } from 'scenes/sceneTypes' diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index 125fb2320ddab..9715e32406c2a 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -1,5 +1,3 @@ -import './Experiment.scss' - import { IconMagicWand, IconPlusSmall, IconTrash } from '@posthog/icons' import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx index b6a69aeababa3..2463e1dd791a5 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DataCollection.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { IconInfo } from '@posthog/icons' import { LemonButton, LemonDivider, LemonModal, Link, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' diff --git a/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx b/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx deleted file mode 100644 index 77a7b9d0359b3..0000000000000 --- a/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx +++ /dev/null @@ -1,449 +0,0 @@ -import { useValues } from 'kea' -import { useEffect, useRef, useState } from 'react' - -import { InsightType } from '~/types' - -import { experimentLogic } from '../experimentLogic' -import { VariantTag } from './components' - -const BAR_HEIGHT = 8 -const BAR_PADDING = 10 -const TICK_PANEL_HEIGHT = 20 -const VIEW_BOX_WIDTH = 800 -const HORIZONTAL_PADDING = 20 -const CONVERSION_RATE_RECT_WIDTH = 2 -const TICK_FONT_SIZE = 7 - -const COLORS = { - BOUNDARY_LINES: '#d0d0d0', - ZERO_LINE: '#666666', - BAR_NEGATIVE: '#F44435', - BAR_BEST: '#4DAF4F', - BAR_DEFAULT: '#d9d9d9', - BAR_CONTROL: 'rgba(217, 217, 217, 0.4)', - BAR_MIDDLE_POINT: 'black', - BAR_MIDDLE_POINT_CONTROL: 'rgba(0, 0, 0, 0.4)', -} - -// Helper function to find nice round numbers for ticks -export function getNiceTickValues(maxAbsValue: number): number[] { - // Round up maxAbsValue to ensure we cover all values - maxAbsValue = Math.ceil(maxAbsValue * 10) / 10 - - const magnitude = Math.floor(Math.log10(maxAbsValue)) - const power = Math.pow(10, magnitude) - - let baseUnit - const normalizedMax = maxAbsValue / power - if (normalizedMax <= 1) { - baseUnit = 0.2 * power - } else if (normalizedMax <= 2) { - baseUnit = 0.5 * power - } else if (normalizedMax <= 5) { - baseUnit = 1 * power - } else { - baseUnit = 2 * power - } - - // Calculate how many baseUnits we need to exceed maxAbsValue - const unitsNeeded = Math.ceil(maxAbsValue / baseUnit) - - // Determine appropriate number of decimal places based on magnitude - const decimalPlaces = Math.max(0, -magnitude + 1) - - const ticks: number[] = [] - for (let i = -unitsNeeded; i <= unitsNeeded; i++) { - // Round each tick value to avoid floating point precision issues - const tickValue = Number((baseUnit * i).toFixed(decimalPlaces)) - ticks.push(tickValue) - } - return ticks -} - -function formatTickValue(value: number): string { - if (value === 0) { - return '0%' - } - - // Determine number of decimal places needed - const absValue = Math.abs(value) - let decimals = 0 - - if (absValue < 0.01) { - decimals = 3 - } else if (absValue < 0.1) { - decimals = 2 - } else if (absValue < 1) { - decimals = 1 - } else { - decimals = 0 - } - - return `${(value * 100).toFixed(decimals)}%` -} - -export function DeltaViz(): JSX.Element { - const { experiment, experimentResults, getMetricType, metricResults } = useValues(experimentLogic) - - if (!experimentResults) { - return <> - } - - const variants = experiment.parameters.feature_flag_variants - const allResults = [...(metricResults || [])] - - return ( -
-
- {allResults.map((results, metricIndex) => { - if (!results) { - return null - } - - const isFirstMetric = metricIndex === 0 - - return ( -
- -
- ) - })} -
-
- ) -} - -function Chart({ - results, - variants, - metricType, - isFirstMetric, -}: { - results: any - variants: any[] - metricType: InsightType - isFirstMetric: boolean -}): JSX.Element { - const { credibleIntervalForVariant, conversionRateForVariant, experimentId } = useValues(experimentLogic) - const [tooltipData, setTooltipData] = useState<{ x: number; y: number; variant: string } | null>(null) - - // Update chart height calculation to include only one BAR_PADDING for each space between bars - const chartHeight = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * variants.length - - // Find the maximum absolute value from all credible intervals - const maxAbsValue = Math.max( - ...variants.flatMap((variant) => { - const interval = credibleIntervalForVariant(results, variant.key, metricType) - return interval ? [Math.abs(interval[0] / 100), Math.abs(interval[1] / 100)] : [] - }) - ) - - // Add padding to the range - const padding = Math.max(maxAbsValue * 0.05, 0.02) - const chartBound = maxAbsValue + padding - - const tickValues = getNiceTickValues(chartBound) - const maxTick = Math.max(...tickValues) - - const valueToX = (value: number): number => { - // Scale the value to fit within the padded area - const percentage = (value / maxTick + 1) / 2 - return HORIZONTAL_PADDING + percentage * (VIEW_BOX_WIDTH - 2 * HORIZONTAL_PADDING) - } - - const infoPanelWidth = '10%' - - const ticksSvgRef = useRef(null) - const chartSvgRef = useRef(null) - // :TRICKY: We need to track SVG heights dynamically because - // we're fitting regular divs to match SVG viewports. SVGs scale - // based on their viewBox and the viewport size, making it challenging - // to match their effective rendered heights with regular div elements. - const [ticksSvgHeight, setTicksSvgHeight] = useState(0) - const [chartSvgHeight, setChartSvgHeight] = useState(0) - - useEffect(() => { - const ticksSvg = ticksSvgRef.current - const chartSvg = chartSvgRef.current - - // eslint-disable-next-line compat/compat - const resizeObserver = new ResizeObserver((entries) => { - for (const entry of entries) { - if (entry.target === ticksSvg) { - setTicksSvgHeight(entry.contentRect.height) - } else if (entry.target === chartSvg) { - setChartSvgHeight(entry.contentRect.height) - } - } - }) - - if (ticksSvg) { - resizeObserver.observe(ticksSvg) - } - if (chartSvg) { - resizeObserver.observe(chartSvg) - } - - return () => { - resizeObserver.disconnect() - } - }, []) - - return ( -
- {/* eslint-disable-next-line react/forbid-dom-props */} -
- {isFirstMetric && ( - - )} - {isFirstMetric &&
} - {/* eslint-disable-next-line react/forbid-dom-props */} -
- {variants.map((variant) => ( -
- -
- ))} -
-
- - {/* SVGs container */} -
- {/* Ticks */} - {isFirstMetric && ( - - {tickValues.map((value, index) => { - const x = valueToX(value) - return ( - - - {formatTickValue(value)} - - - ) - })} - - )} - {isFirstMetric &&
} - {/* Chart */} - - {/* Vertical grid lines */} - {tickValues.map((value, index) => { - const x = valueToX(value) - return ( - - ) - })} - - {variants.map((variant, index) => { - const interval = credibleIntervalForVariant(results, variant.key, metricType) - const [lower, upper] = interval ? [interval[0] / 100, interval[1] / 100] : [0, 0] - - const variantRate = conversionRateForVariant(results, variant.key) - const controlRate = conversionRateForVariant(results, 'control') - const delta = variantRate && controlRate ? (variantRate - controlRate) / controlRate : 0 - - // Find the highest delta among all variants - const maxDelta = Math.max( - ...variants.map((v) => { - const vRate = conversionRateForVariant(results, v.key) - return vRate && controlRate ? (vRate - controlRate) / controlRate : 0 - }) - ) - - let barColor - if (variant.key === 'control') { - barColor = COLORS.BAR_DEFAULT - } else if (delta < 0) { - barColor = COLORS.BAR_NEGATIVE - } else if (delta === maxDelta) { - barColor = COLORS.BAR_BEST - } else { - barColor = COLORS.BAR_DEFAULT - } - - const y = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * index - const x1 = valueToX(lower) - const x2 = valueToX(upper) - const deltaX = valueToX(delta) - - return ( - { - const rect = e.currentTarget.getBoundingClientRect() - setTooltipData({ - x: rect.left + rect.width / 2, - y: rect.top - 10, - variant: variant.key, - }) - }} - onMouseLeave={() => setTooltipData(null)} - > - {/* Invisible full-width rect to ensure consistent hover */} - - {/* Visible elements */} - - - - ) - })} - - - {/* Tooltip */} - {tooltipData && ( -
-
- -
- Conversion rate: - - {conversionRateForVariant(results, tooltipData.variant)?.toFixed(2)}% - -
-
- Delta: - - {tooltipData.variant === 'control' ? ( - Baseline - ) : ( - (() => { - const variantRate = conversionRateForVariant(results, tooltipData.variant) - const controlRate = conversionRateForVariant(results, 'control') - const delta = - variantRate && controlRate - ? (variantRate - controlRate) / controlRate - : 0 - return delta ? ( - 0 ? 'text-success' : 'text-danger'}> - {`${delta > 0 ? '+' : ''}${(delta * 100).toFixed(2)}%`} - - ) : ( - 'ā€”' - ) - })() - )} - -
-
- Credible interval: - - {(() => { - const interval = credibleIntervalForVariant( - results, - tooltipData.variant, - metricType - ) - const [lower, upper] = interval - ? [interval[0] / 100, interval[1] / 100] - : [0, 0] - return `[${lower > 0 ? '+' : ''}${(lower * 100).toFixed(2)}%, ${ - upper > 0 ? '+' : '' - }${(upper * 100).toFixed(2)}%]` - })()} - -
-
-
- )} -
-
- ) -} diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx index 5ebf192769a2d..b3c2962d95c55 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { IconBalance, IconFlag } from '@posthog/icons' import { LemonBanner, diff --git a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx index 8225391583fc9..75aa23d2f6284 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx @@ -1,12 +1,12 @@ -import '../Experiment.scss' - import { LemonDivider, LemonTabs } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' import { PostHogFeature } from 'posthog-js/react' import { WebExperimentImplementationDetails } from 'scenes/experiments/WebExperimentImplementationDetails' import { ExperimentImplementationDetails } from '../ExperimentImplementationDetails' import { experimentLogic } from '../experimentLogic' +import { MetricsView } from '../MetricsView/MetricsView' import { ExperimentLoadingAnimation, LoadingState, @@ -25,13 +25,15 @@ import { Results } from './Results' import { SecondaryMetricsTable } from './SecondaryMetricsTable' const ResultsTab = (): JSX.Element => { - const { experiment, experimentResults } = useValues(experimentLogic) + const { experiment, experimentResults, featureFlags } = useValues(experimentLogic) const hasResultsInsight = experimentResults && experimentResults.insight return (
- {hasResultsInsight ? ( + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] ? ( + + ) : hasResultsInsight ? ( ) : ( <> @@ -67,7 +69,7 @@ const VariantsTab = (): JSX.Element => { } export function ExperimentView(): JSX.Element { - const { experimentLoading, experimentResultsLoading, experimentId, experimentResults, tabKey } = + const { experimentLoading, experimentResultsLoading, experimentId, experimentResults, tabKey, featureFlags } = useValues(experimentLogic) const { setTabKey } = useActions(experimentLogic) @@ -87,20 +89,27 @@ export function ExperimentView(): JSX.Element { ) : ( <> - {hasResultsInsight ? ( + {hasResultsInsight && !featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] ? (
) : null}
-
- -
- -
- -
+ {featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] ? ( +
+ +
+ ) : ( + <> +
+ +
+
+ +
+ + )}
Add goal @@ -324,7 +323,7 @@ export function Goal(): JSX.Element { ) : ( )} - setIsModalOpen(true)}> + openPrimaryMetricModal(0)}> Change goal
@@ -342,14 +341,7 @@ export function Goal(): JSX.Element { )}
)} - { - setIsModalOpen(false) - loadExperiment() - }} - /> +
) } diff --git a/frontend/src/scenes/experiments/ExperimentView/Info.tsx b/frontend/src/scenes/experiments/ExperimentView/Info.tsx index df08b130fe4ad..ef7940f5fa28e 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Info.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Info.tsx @@ -1,10 +1,9 @@ -import '../Experiment.scss' - import { IconWarning } from '@posthog/icons' import { Link, ProfilePicture, Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { CopyToClipboardInline } from 'lib/components/CopyToClipboard' import { EditableField } from 'lib/components/EditableField/EditableField' +import { FEATURE_FLAGS } from 'lib/constants' import { IconOpenInNew } from 'lib/lemon-ui/icons' import { urls } from 'scenes/urls' @@ -16,7 +15,7 @@ import { ActionBanner, ResultsTag, StatusTag } from './components' import { ExperimentDates } from './ExperimentDates' export function Info(): JSX.Element { - const { experiment } = useValues(experimentLogic) + const { experiment, featureFlags } = useValues(experimentLogic) const { updateExperiment } = useActions(experimentLogic) const { created_by } = experiment @@ -33,10 +32,12 @@ export function Info(): JSX.Element {
Status
-
-
Significance
- -
+ {!featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] && ( +
+
Significance
+ +
+ )} {experiment.feature_flag && (
@@ -98,7 +99,7 @@ export function Info(): JSX.Element { compactButtons />
- + {!featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] && }
) } diff --git a/frontend/src/scenes/experiments/ExperimentView/Overview.tsx b/frontend/src/scenes/experiments/ExperimentView/Overview.tsx index 35f18c3c13b67..2095309364143 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Overview.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Overview.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { useValues } from 'kea' import { experimentLogic } from '../experimentLogic' diff --git a/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx index dfe6130db788e..fc90635d9a6af 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { IconFlag } from '@posthog/icons' import { LemonBanner, LemonButton, LemonModal, LemonTable, LemonTableColumns, LemonTag } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' diff --git a/frontend/src/scenes/experiments/ExperimentView/Results.tsx b/frontend/src/scenes/experiments/ExperimentView/Results.tsx index 1f34f96fd7518..c4e7a4b05ed62 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Results.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Results.tsx @@ -1,21 +1,16 @@ -import '../Experiment.scss' - import { useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' import { experimentLogic } from '../experimentLogic' import { ResultsHeader, ResultsQuery } from './components' -import { DeltaViz } from './DeltaViz' import { SummaryTable } from './SummaryTable' export function Results(): JSX.Element { - const { experimentResults, featureFlags } = useValues(experimentLogic) + const { experimentResults } = useValues(experimentLogic) return (
- {featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] && }
) diff --git a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx index 5474962ec738b..8369038f00cbb 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SecondaryMetricsTable.tsx @@ -332,7 +332,7 @@ const AddSecondaryMetricButton = ({ } type="secondary" - size="small" + size="xsmall" onClick={() => { const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] setExperiment({ diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx index 4ba16ded0e86c..6150d4e7b7826 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { IconInfo, IconRewindPlay } from '@posthog/icons' import { LemonButton, LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index df8580fee68dd..9e5bfb7c2c4b0 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -1,5 +1,3 @@ -import '../Experiment.scss' - import { IconArchive, IconCheck, IconFlask, IconX } from '@posthog/icons' import { LemonBanner, diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx index 50468541a0d9b..2c5fe6f2da780 100644 --- a/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalFunnels.tsx @@ -25,11 +25,15 @@ import { } from './Selectors' export function PrimaryGoalFunnels(): JSX.Element { const { currentTeam } = useValues(teamLogic) - const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { experiment, isExperimentRunning, featureFlags, editingPrimaryMetricIndex } = useValues(experimentLogic) const { setExperiment, setFunnelsMetric } = useActions(experimentLogic) const hasFilters = (currentTeam?.test_account_filters || []).length > 0 - const metricIdx = 0 + if (!editingPrimaryMetricIndex && editingPrimaryMetricIndex !== 0) { + return <> + } + + const metricIdx = editingPrimaryMetricIndex const currentMetric = experiment.metrics[metricIdx] as ExperimentFunnelsQuery const actionFilterProps = { @@ -261,7 +265,7 @@ export function PrimaryGoalFunnels(): JSX.Element { checked={(() => { // :FLAG: CLEAN UP AFTER MIGRATION if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const val = (experiment.metrics[0] as ExperimentFunnelsQuery).funnels_query + const val = (experiment.metrics[metricIdx] as ExperimentFunnelsQuery).funnels_query ?.filterTestAccounts return hasFilters ? !!val : false } diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx index 0ce1cb72e33da..8f7f5fe17df4b 100644 --- a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrends.tsx @@ -1,13 +1,13 @@ import { LemonInput, LemonLabel } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' -import { EXPERIMENT_DEFAULT_DURATION, FEATURE_FLAGS } from 'lib/constants' +import { EXPERIMENT_DEFAULT_DURATION } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { teamLogic } from 'scenes/teamLogic' -import { actionsAndEventsToSeries, filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { queryNodeToFilter } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' import { Query } from '~/queries/Query/Query' import { ExperimentTrendsQuery, NodeKind } from '~/queries/schema' @@ -17,82 +17,46 @@ import { experimentLogic } from '../experimentLogic' import { commonActionFilterProps } from './Selectors' export function PrimaryGoalTrends(): JSX.Element { - const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) - const { setExperiment, setTrendsMetric } = useActions(experimentLogic) + const { experiment, isExperimentRunning, editingPrimaryMetricIndex } = useValues(experimentLogic) + const { setTrendsMetric } = useActions(experimentLogic) const { currentTeam } = useValues(teamLogic) const hasFilters = (currentTeam?.test_account_filters || []).length > 0 - const metricIdx = 0 + if (!editingPrimaryMetricIndex && editingPrimaryMetricIndex !== 0) { + return <> + } + + const metricIdx = editingPrimaryMetricIndex const currentMetric = experiment.metrics[metricIdx] as ExperimentTrendsQuery return ( <>
Name (optional) - {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && ( - { - setTrendsMetric({ - metricIdx, - name: newName, - }) - }} - /> - )} + { + setTrendsMetric({ + metricIdx, + name: newName, + }) + }} + />
{ - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - return queryNodeToFilter(currentMetric.count_query) - } - return experiment.filters - })()} + filters={queryNodeToFilter(currentMetric.count_query)} setFilters={({ actions, events, data_warehouse }: Partial): void => { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const series = actionsAndEventsToSeries( - { actions, events, data_warehouse } as any, - true, - MathAvailability.All - ) + const series = actionsAndEventsToSeries( + { actions, events, data_warehouse } as any, + true, + MathAvailability.All + ) - setTrendsMetric({ - metricIdx, - series, - }) - } else { - if (actions?.length) { - setExperiment({ - filters: { - ...experiment.filters, - actions, - events: undefined, - data_warehouse: undefined, - }, - }) - } else if (events?.length) { - setExperiment({ - filters: { - ...experiment.filters, - events, - actions: undefined, - data_warehouse: undefined, - }, - }) - } else if (data_warehouse?.length) { - setExperiment({ - filters: { - ...experiment.filters, - data_warehouse, - actions: undefined, - events: undefined, - }, - }) - } - } + setTrendsMetric({ + metricIdx, + series, + }) }} typeKey="experiment-metric" buttonCopy="Add graph series" @@ -103,29 +67,12 @@ export function PrimaryGoalTrends(): JSX.Element { />
{ - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const val = currentMetric.count_query?.filterTestAccounts - return hasFilters ? !!val : false - } - return hasFilters ? !!experiment.filters.filter_test_accounts : false - })()} + checked={hasFilters ? !!currentMetric.count_query?.filterTestAccounts : false} onChange={(checked: boolean) => { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - setTrendsMetric({ - metricIdx, - filterTestAccounts: checked, - }) - } else { - setExperiment({ - filters: { - ...experiment.filters, - filter_test_accounts: checked, - }, - }) - } + setTrendsMetric({ + metricIdx, + filterTestAccounts: checked, + }) }} fullWidth /> @@ -137,17 +84,10 @@ export function PrimaryGoalTrends(): JSX.Element { )}
- {/* :FLAG: CLEAN UP AFTER MIGRATION */} { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - return currentMetric.count_query - } - return filtersToQueryNode(experiment.filters) - })(), + source: currentMetric.count_query, showTable: false, showLastComputation: true, showLastComputationRefresh: false, diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx index 4ebe43c30e928..1dfa4aa8b08a4 100644 --- a/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx +++ b/frontend/src/scenes/experiments/Metrics/PrimaryGoalTrendsExposure.tsx @@ -16,11 +16,17 @@ import { experimentLogic } from '../experimentLogic' import { commonActionFilterProps } from './Selectors' export function PrimaryGoalTrendsExposure(): JSX.Element { - const { experiment, isExperimentRunning, featureFlags } = useValues(experimentLogic) + const { experiment, isExperimentRunning, featureFlags, editingPrimaryMetricIndex } = useValues(experimentLogic) const { setExperiment, setTrendsExposureMetric } = useActions(experimentLogic) const { currentTeam } = useValues(teamLogic) const hasFilters = (currentTeam?.test_account_filters || []).length > 0 - const currentMetric = experiment.metrics[0] as ExperimentTrendsQuery + + if (!editingPrimaryMetricIndex && editingPrimaryMetricIndex !== 0) { + return <> + } + + const metricIdx = editingPrimaryMetricIndex + const currentMetric = experiment.metrics[metricIdx] as ExperimentTrendsQuery return ( <> @@ -43,7 +49,7 @@ export function PrimaryGoalTrendsExposure(): JSX.Element { ) setTrendsExposureMetric({ - metricIdx: 0, + metricIdx, series, }) } else { @@ -109,7 +115,7 @@ export function PrimaryGoalTrendsExposure(): JSX.Element { // :FLAG: CLEAN UP AFTER MIGRATION if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { setTrendsExposureMetric({ - metricIdx: 0, + metricIdx, filterTestAccounts: checked, }) } else { diff --git a/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx b/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx index 14fd6c7d4e967..1afce61899d6b 100644 --- a/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx +++ b/frontend/src/scenes/experiments/Metrics/PrimaryMetricModal.tsx @@ -9,19 +9,24 @@ import { experimentLogic, getDefaultFilters, getDefaultFunnelsMetric, getDefault import { PrimaryGoalFunnels } from '../Metrics/PrimaryGoalFunnels' import { PrimaryGoalTrends } from '../Metrics/PrimaryGoalTrends' -export function PrimaryMetricModal({ - experimentId, - isOpen, - onClose, -}: { - experimentId: Experiment['id'] - isOpen: boolean - onClose: () => void -}): JSX.Element { - const { experiment, experimentLoading, getMetricType, featureFlags } = useValues(experimentLogic({ experimentId })) - const { updateExperimentGoal, setExperiment } = useActions(experimentLogic({ experimentId })) +export function PrimaryMetricModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const { + experiment, + experimentLoading, + getMetricType, + featureFlags, + isPrimaryMetricModalOpen, + editingPrimaryMetricIndex, + } = useValues(experimentLogic({ experimentId })) + const { updateExperimentGoal, setExperiment, closePrimaryMetricModal } = useActions( + experimentLogic({ experimentId }) + ) + + if (!editingPrimaryMetricIndex && editingPrimaryMetricIndex !== 0) { + return <> + } - const metricIdx = 0 + const metricIdx = editingPrimaryMetricIndex const metricType = getMetricType(metricIdx) let funnelStepsLength = 0 @@ -34,31 +39,50 @@ export function PrimaryMetricModal({ return ( - - Cancel - +
{ + const newMetrics = experiment.metrics.filter((_, idx) => idx !== metricIdx) + setExperiment({ + metrics: newMetrics, + }) updateExperimentGoal(experiment.filters) }} - type="primary" - loading={experimentLoading} - data-attr="create-annotation-submit" > - Save + Delete +
+ + Cancel + + { + updateExperimentGoal(experiment.filters) + }} + type="primary" + loading={experimentLoading} + data-attr="create-annotation-submit" + > + Save + +
} > diff --git a/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx b/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx index 14a8304b973e2..423fb1c48d5c1 100644 --- a/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx +++ b/frontend/src/scenes/experiments/Metrics/SecondaryMetricModal.tsx @@ -1,10 +1,9 @@ import { LemonButton, LemonModal, LemonSelect } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' import { Experiment, InsightType } from '~/types' -import { experimentLogic, getDefaultFilters, getDefaultFunnelsMetric, getDefaultTrendsMetric } from '../experimentLogic' +import { experimentLogic, getDefaultFunnelsMetric, getDefaultTrendsMetric } from '../experimentLogic' import { SecondaryGoalFunnels } from './SecondaryGoalFunnels' import { SecondaryGoalTrends } from './SecondaryGoalTrends' @@ -19,9 +18,7 @@ export function SecondaryMetricModal({ isOpen: boolean onClose: () => void }): JSX.Element { - const { experiment, experimentLoading, getSecondaryMetricType, featureFlags } = useValues( - experimentLogic({ experimentId }) - ) + const { experiment, experimentLoading, getSecondaryMetricType } = useValues(experimentLogic({ experimentId })) const { setExperiment, updateExperiment } = useActions(experimentLogic({ experimentId })) const metricType = getSecondaryMetricType(metricIdx) @@ -37,28 +34,15 @@ export function SecondaryMetricModal({ type="secondary" status="danger" onClick={() => { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const newMetricsSecondary = experiment.metrics_secondary.filter( - (_, idx) => idx !== metricIdx - ) - setExperiment({ - metrics_secondary: newMetricsSecondary, - }) - updateExperiment({ - metrics_secondary: newMetricsSecondary, - }) - } else { - const newSecondaryMetrics = experiment.secondary_metrics.filter( - (_, idx) => idx !== metricIdx - ) - setExperiment({ - secondary_metrics: newSecondaryMetrics, - }) - updateExperiment({ - secondary_metrics: newSecondaryMetrics, - }) - } + const newMetricsSecondary = experiment.metrics_secondary.filter( + (_, idx) => idx !== metricIdx + ) + setExperiment({ + metrics_secondary: newMetricsSecondary, + }) + updateExperiment({ + metrics_secondary: newMetricsSecondary, + }) }} > Delete @@ -69,16 +53,9 @@ export function SecondaryMetricModal({ { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - updateExperiment({ - metrics_secondary: experiment.metrics_secondary, - }) - } else { - updateExperiment({ - secondary_metrics: experiment.secondary_metrics, - }) - } + updateExperiment({ + metrics_secondary: experiment.metrics_secondary, + }) }} type="primary" loading={experimentLoading} @@ -96,30 +73,16 @@ export function SecondaryMetricModal({ data-attr="metrics-selector" value={metricType} onChange={(newMetricType) => { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - setExperiment({ - ...experiment, - metrics_secondary: [ - ...experiment.metrics_secondary.slice(0, metricIdx), - newMetricType === InsightType.TRENDS - ? getDefaultTrendsMetric() - : getDefaultFunnelsMetric(), - ...experiment.metrics_secondary.slice(metricIdx + 1), - ], - }) - } else { - setExperiment({ - ...experiment, - secondary_metrics: [ - ...experiment.secondary_metrics.slice(0, metricIdx), - newMetricType === InsightType.TRENDS - ? { name: '', filters: getDefaultFilters(InsightType.TRENDS, undefined) } - : { name: '', filters: getDefaultFilters(InsightType.FUNNELS, undefined) }, - ...experiment.secondary_metrics.slice(metricIdx + 1), - ], - }) - } + setExperiment({ + ...experiment, + metrics_secondary: [ + ...experiment.metrics_secondary.slice(0, metricIdx), + newMetricType === InsightType.TRENDS + ? getDefaultTrendsMetric() + : getDefaultFunnelsMetric(), + ...experiment.metrics_secondary.slice(metricIdx + 1), + ], + }) }} options={[ { value: InsightType.TRENDS, label: Trends }, diff --git a/frontend/src/scenes/experiments/MetricsView/DeltaChart.tsx b/frontend/src/scenes/experiments/MetricsView/DeltaChart.tsx new file mode 100644 index 0000000000000..da42b4ac5ca94 --- /dev/null +++ b/frontend/src/scenes/experiments/MetricsView/DeltaChart.tsx @@ -0,0 +1,667 @@ +import { IconActivity, IconPencil } from '@posthog/icons' +import { LemonButton, LemonTag } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { humanFriendlyNumber } from 'lib/utils' +import { useEffect, useRef, useState } from 'react' + +import { themeLogic } from '~/layout/navigation-3000/themeLogic' +import { InsightType, TrendExperimentVariant } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { VariantTag } from '../ExperimentView/components' +import { NoResultEmptyState } from './NoResultEmptyState' + +function formatTickValue(value: number): string { + if (value === 0) { + return '0%' + } + + // Determine number of decimal places needed + const absValue = Math.abs(value) + let decimals = 0 + + if (absValue < 0.01) { + decimals = 3 + } else if (absValue < 0.1) { + decimals = 2 + } else if (absValue < 1) { + decimals = 1 + } else { + decimals = 0 + } + + return `${(value * 100).toFixed(decimals)}%` +} + +export function DeltaChart({ + result, + error, + variants, + metricType, + metricIndex, + isFirstMetric, + metric, + tickValues, + chartBound, +}: { + result: any + error: any + variants: any[] + metricType: InsightType + metricIndex: number + isFirstMetric: boolean + metric: any + tickValues: number[] + chartBound: number +}): JSX.Element { + const { + credibleIntervalForVariant, + conversionRateForVariant, + experimentId, + countDataForVariant, + exposureCountDataForVariant, + metricResultsLoading, + } = useValues(experimentLogic) + + const { experiment } = useValues(experimentLogic) + const { openPrimaryMetricModal } = useActions(experimentLogic) + const [tooltipData, setTooltipData] = useState<{ x: number; y: number; variant: string } | null>(null) + const [emptyStateTooltipVisible, setEmptyStateTooltipVisible] = useState(true) + const [tooltipPosition, setTooltipPosition] = useState({ x: 0, y: 0 }) + + const BAR_HEIGHT = 8 + const BAR_PADDING = 10 + const TICK_PANEL_HEIGHT = 20 + const VIEW_BOX_WIDTH = 800 + const HORIZONTAL_PADDING = 20 + const CONVERSION_RATE_RECT_WIDTH = 2 + const TICK_FONT_SIZE = 9 + + const { isDarkModeOn } = useValues(themeLogic) + const COLORS = { + TICK_TEXT_COLOR: 'var(--text-secondary-3000)', + BOUNDARY_LINES: 'var(--border-3000)', + ZERO_LINE: 'var(--border-bold)', + BAR_NEGATIVE: isDarkModeOn ? 'rgb(206 66 54)' : '#F44435', + BAR_BEST: isDarkModeOn ? 'rgb(49 145 51)' : '#4DAF4F', + BAR_DEFAULT: isDarkModeOn ? 'rgb(121 121 121)' : 'rgb(217 217 217)', + BAR_CONTROL: isDarkModeOn ? 'rgba(217, 217, 217, 0.2)' : 'rgba(217, 217, 217, 0.4)', + BAR_MIDDLE_POINT: 'black', + BAR_MIDDLE_POINT_CONTROL: 'rgba(0, 0, 0, 0.4)', + } + + // Update chart height calculation to include only one BAR_PADDING for each space between bars + const chartHeight = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * variants.length + + const valueToX = (value: number): number => { + // Scale the value to fit within the padded area + const percentage = (value / chartBound + 1) / 2 + return HORIZONTAL_PADDING + percentage * (VIEW_BOX_WIDTH - 2 * HORIZONTAL_PADDING) + } + + const metricTitlePanelWidth = '20%' + const variantsPanelWidth = '10%' + + const ticksSvgRef = useRef(null) + const chartSvgRef = useRef(null) + // :TRICKY: We need to track SVG heights dynamically because + // we're fitting regular divs to match SVG viewports. SVGs scale + // based on their viewBox and the viewport size, making it challenging + // to match their effective rendered heights with regular div elements. + const [ticksSvgHeight, setTicksSvgHeight] = useState(0) + const [chartSvgHeight, setChartSvgHeight] = useState(0) + + useEffect(() => { + const ticksSvg = ticksSvgRef.current + const chartSvg = chartSvgRef.current + + // eslint-disable-next-line compat/compat + const resizeObserver = new ResizeObserver((entries) => { + for (const entry of entries) { + if (entry.target === ticksSvg) { + setTicksSvgHeight(entry.contentRect.height) + } else if (entry.target === chartSvg) { + setChartSvgHeight(entry.contentRect.height) + } + } + }) + + if (ticksSvg) { + resizeObserver.observe(ticksSvg) + } + if (chartSvg) { + resizeObserver.observe(chartSvg) + } + + return () => { + resizeObserver.disconnect() + } + }, []) + + return ( +
+ {/* Metric title panel */} + {/* eslint-disable-next-line react/forbid-dom-props */} +
+ {isFirstMetric && ( + + )} + {isFirstMetric &&
} +
+
+
+
+
+ {metricIndex + 1}.{' '} + {metric.name || Untitled metric} +
+ } + onClick={() => openPrimaryMetricModal(metricIndex)} + /> +
+ + {metric.kind === 'ExperimentFunnelsQuery' ? 'Funnel' : 'Trend'} + +
+
+
+
+ + {/* Variants panel */} + {/* eslint-disable-next-line react/forbid-dom-props */} +
+ {isFirstMetric && ( + + )} + {isFirstMetric &&
} + {/* eslint-disable-next-line react/forbid-dom-props */} +
+ {variants.map((variant) => ( +
+ +
+ ))} +
+
+ + {/* SVGs container */} +
+ {/* Ticks */} + {isFirstMetric && ( + + {tickValues.map((value, index) => { + const x = valueToX(value) + return ( + + + {formatTickValue(value)} + + + ) + })} + + )} + {isFirstMetric &&
} + {/* Chart */} + {result ? ( + + {/* Vertical grid lines */} + {tickValues.map((value, index) => { + const x = valueToX(value) + return ( + + ) + })} + + {variants.map((variant, index) => { + const interval = credibleIntervalForVariant(result, variant.key, metricType) + const [lower, upper] = interval ? [interval[0] / 100, interval[1] / 100] : [0, 0] + + let delta: number + if (metricType === InsightType.TRENDS) { + const controlVariant = result.variants.find( + (v: TrendExperimentVariant) => v.key === 'control' + ) as TrendExperimentVariant + + const variantData = result.variants.find( + (v: TrendExperimentVariant) => v.key === variant.key + ) as TrendExperimentVariant + + if ( + !variantData?.count || + !variantData?.absolute_exposure || + !controlVariant?.count || + !controlVariant?.absolute_exposure + ) { + delta = 0 + } else { + const controlMean = controlVariant.count / controlVariant.absolute_exposure + const variantMean = variantData.count / variantData.absolute_exposure + delta = (variantMean - controlMean) / controlMean + } + } else { + const variantRate = conversionRateForVariant(result, variant.key) + const controlRate = conversionRateForVariant(result, 'control') + delta = variantRate && controlRate ? (variantRate - controlRate) / controlRate : 0 + } + + const y = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * index + const x1 = valueToX(lower) + const x2 = valueToX(upper) + const deltaX = valueToX(delta) + + return ( + { + const rect = e.currentTarget.getBoundingClientRect() + setTooltipData({ + x: rect.left + rect.width / 2, + y: rect.top - 10, + variant: variant.key, + }) + }} + onMouseLeave={() => setTooltipData(null)} + > + {variant.key === 'control' ? ( + // Control variant - single gray bar + <> + + + + ) : ( + // Test variants - split into positive and negative sections if needed + <> + + {lower < 0 && upper > 0 ? ( + // Bar spans across zero - need to split + <> + + + + ) : ( + // Bar is entirely positive or negative + + )} + + )} + {/* Delta marker */} + + + ) + })} + + ) : metricResultsLoading ? ( + + { + const rect = e.currentTarget.getBoundingClientRect() + setTooltipPosition({ + x: rect.left + rect.width / 2, + y: rect.top, + }) + setEmptyStateTooltipVisible(true) + }} + onMouseLeave={() => setEmptyStateTooltipVisible(false)} + > +
+ Results loading… +
+
+
+ ) : ( + + {!experiment.start_date ? ( + +
+ Waiting for experiment to start… +
+
+ ) : ( + { + const rect = e.currentTarget.getBoundingClientRect() + setTooltipPosition({ + x: rect.left + rect.width / 2, + y: rect.top, + }) + setEmptyStateTooltipVisible(true) + }} + onMouseLeave={() => setEmptyStateTooltipVisible(false)} + > +
+ {error?.hasDiagnostics ? ( + + + + {(() => { + try { + const detail = JSON.parse(error.detail) + return Object.values(detail).filter((v) => v === false).length + } catch { + return '0' + } + })()} + + /4 + + ) : ( + + Error + + )} + Results not yet available +
+
+ )} +
+ )} + + {/* Variant result tooltip */} + {tooltipData && ( +
+
+ + {metricType === InsightType.TRENDS ? ( + <> +
+ Count: + + {(() => { + const count = countDataForVariant(result, tooltipData.variant) + return count !== null ? humanFriendlyNumber(count) : 'ā€”' + })()} + +
+
+ Exposure: + + {(() => { + const exposure = exposureCountDataForVariant( + result, + tooltipData.variant + ) + return exposure !== null ? humanFriendlyNumber(exposure) : 'ā€”' + })()} + +
+
+ Mean: + + {(() => { + const variant = result.variants.find( + (v: TrendExperimentVariant) => v.key === tooltipData.variant + ) + return variant?.count && variant?.absolute_exposure + ? (variant.count / variant.absolute_exposure).toFixed(2) + : 'ā€”' + })()} + +
+ + ) : ( +
+ Conversion rate: + + {conversionRateForVariant(result, tooltipData.variant)?.toFixed(2)}% + +
+ )} +
+ Delta: + + {tooltipData.variant === 'control' ? ( + Baseline + ) : ( + (() => { + if (metricType === InsightType.TRENDS) { + const controlVariant = result.variants.find( + (v: TrendExperimentVariant) => v.key === 'control' + ) + const variant = result.variants.find( + (v: TrendExperimentVariant) => v.key === tooltipData.variant + ) + + if ( + !variant?.count || + !variant?.absolute_exposure || + !controlVariant?.count || + !controlVariant?.absolute_exposure + ) { + return 'ā€”' + } + + const controlMean = + controlVariant.count / controlVariant.absolute_exposure + const variantMean = variant.count / variant.absolute_exposure + const delta = (variantMean - controlMean) / controlMean + return delta ? ( + 0 ? 'text-success' : 'text-danger'}> + {`${delta > 0 ? '+' : ''}${(delta * 100).toFixed(2)}%`} + + ) : ( + 'ā€”' + ) + } + + const variantRate = conversionRateForVariant(result, tooltipData.variant) + const controlRate = conversionRateForVariant(result, 'control') + const delta = + variantRate && controlRate + ? (variantRate - controlRate) / controlRate + : 0 + return delta ? ( + 0 ? 'text-success' : 'text-danger'}> + {`${delta > 0 ? '+' : ''}${(delta * 100).toFixed(2)}%`} + + ) : ( + 'ā€”' + ) + })() + )} + +
+
+ Credible interval: + + {(() => { + const interval = credibleIntervalForVariant( + result, + tooltipData.variant, + metricType + ) + const [lower, upper] = interval + ? [interval[0] / 100, interval[1] / 100] + : [0, 0] + return `[${lower > 0 ? '+' : ''}${(lower * 100).toFixed(2)}%, ${ + upper > 0 ? '+' : '' + }${(upper * 100).toFixed(2)}%]` + })()} + +
+
+
+ )} + + {/* Empty state tooltip */} + {emptyStateTooltipVisible && ( +
+ +
+ )} +
+
+ ) +} diff --git a/frontend/src/scenes/experiments/MetricsView/MetricsView.tsx b/frontend/src/scenes/experiments/MetricsView/MetricsView.tsx new file mode 100644 index 0000000000000..3d56a7681b3ec --- /dev/null +++ b/frontend/src/scenes/experiments/MetricsView/MetricsView.tsx @@ -0,0 +1,180 @@ +import { IconPlus } from '@posthog/icons' +import { LemonButton } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { IconAreaChart } from 'lib/lemon-ui/icons' + +import { experimentLogic, getDefaultFunnelsMetric } from '../experimentLogic' +import { MAX_PRIMARY_METRICS } from './const' +import { DeltaChart } from './DeltaChart' + +// Helper function to find nice round numbers for ticks +export function getNiceTickValues(maxAbsValue: number): number[] { + // Round up maxAbsValue to ensure we cover all values + maxAbsValue = Math.ceil(maxAbsValue * 10) / 10 + + const magnitude = Math.floor(Math.log10(maxAbsValue)) + const power = Math.pow(10, magnitude) + + let baseUnit + const normalizedMax = maxAbsValue / power + if (normalizedMax <= 1) { + baseUnit = 0.2 * power + } else if (normalizedMax <= 2) { + baseUnit = 0.5 * power + } else if (normalizedMax <= 5) { + baseUnit = 1 * power + } else { + baseUnit = 2 * power + } + + // Calculate how many baseUnits we need to exceed maxAbsValue + const unitsNeeded = Math.ceil(maxAbsValue / baseUnit) + + // Determine appropriate number of decimal places based on magnitude + const decimalPlaces = Math.max(0, -magnitude + 1) + + const ticks: number[] = [] + for (let i = -unitsNeeded; i <= unitsNeeded; i++) { + // Round each tick value to avoid floating point precision issues + const tickValue = Number((baseUnit * i).toFixed(decimalPlaces)) + ticks.push(tickValue) + } + return ticks +} + +function AddMetric({ + metrics, + setExperiment, + openPrimaryMetricModal, +}: { + metrics: any[] + setExperiment: (payload: { metrics: any[] }) => void + openPrimaryMetricModal: (index: number) => void +}): JSX.Element { + return ( + } + type="secondary" + size="xsmall" + onClick={() => { + const newMetrics = [...metrics, getDefaultFunnelsMetric()] + setExperiment({ + metrics: newMetrics, + }) + openPrimaryMetricModal(newMetrics.length - 1) + }} + disabledReason={ + metrics.length >= MAX_PRIMARY_METRICS + ? `You can only add up to ${MAX_PRIMARY_METRICS} primary metrics.` + : undefined + } + > + Add metric + + ) +} + +export function MetricsView(): JSX.Element { + const { experiment, getMetricType, metricResults, primaryMetricsResultErrors, credibleIntervalForVariant } = + useValues(experimentLogic) + const { setExperiment, openPrimaryMetricModal } = useActions(experimentLogic) + + const variants = experiment.parameters.feature_flag_variants + const metrics = experiment.metrics || [] + + // Calculate the maximum absolute value across ALL metrics + const maxAbsValue = Math.max( + ...metrics.flatMap((_, metricIndex) => { + const result = metricResults?.[metricIndex] + if (!result) { + return [] + } + return variants.flatMap((variant) => { + const interval = credibleIntervalForVariant(result, variant.key, getMetricType(metricIndex)) + return interval ? [Math.abs(interval[0] / 100), Math.abs(interval[1] / 100)] : [] + }) + }) + ) + + const padding = Math.max(maxAbsValue * 0.05, 0.02) + const chartBound = maxAbsValue + padding + + const commonTickValues = getNiceTickValues(chartBound) + + return ( +
+
+
+
+

Primary metrics

+
+
+ +
+
+ {metrics.length > 0 && ( +
+ +
+ )} +
+
+
+ {metrics.length > 0 ? ( +
+
+ {metrics.map((metric, metricIndex) => { + const result = metricResults?.[metricIndex] + const isFirstMetric = metricIndex === 0 + + return ( +
+ +
+ ) + })} +
+
+ ) : ( +
+
+ +
+ Add up to {MAX_PRIMARY_METRICS} primary metrics. +
+ +
+
+ )} +
+ ) +} diff --git a/frontend/src/scenes/experiments/MetricsView/NoResultEmptyState.tsx b/frontend/src/scenes/experiments/MetricsView/NoResultEmptyState.tsx new file mode 100644 index 0000000000000..e773e7d8d4494 --- /dev/null +++ b/frontend/src/scenes/experiments/MetricsView/NoResultEmptyState.tsx @@ -0,0 +1,68 @@ +import { IconCheck, IconX } from '@posthog/icons' + +export function NoResultEmptyState({ error }: { error: any }): JSX.Element { + if (!error) { + return <> + } + + type ErrorCode = 'no-events' | 'no-flag-info' | 'no-control-variant' | 'no-test-variant' + + const { statusCode, hasDiagnostics } = error + + function ChecklistItem({ errorCode, value }: { errorCode: ErrorCode; value: boolean }): JSX.Element { + const failureText = { + 'no-events': 'Metric events not received', + 'no-flag-info': 'Feature flag information not present on the events', + 'no-control-variant': 'Events with the control variant not received', + 'no-test-variant': 'Events with at least one test variant not received', + } + + const successText = { + 'no-events': 'Experiment events have been received', + 'no-flag-info': 'Feature flag information is present on the events', + 'no-control-variant': 'Events with the control variant received', + 'no-test-variant': 'Events with at least one test variant received', + } + + return ( +
+ {value === false ? ( + + + {successText[errorCode]} + + ) : ( + + + {failureText[errorCode]} + + )} +
+ ) + } + + if (hasDiagnostics) { + const checklistItems = [] + for (const [errorCode, value] of Object.entries(error.detail as Record)) { + checklistItems.push() + } + + return
{checklistItems}
+ } + + if (statusCode === 504) { + return ( + <> +

Experiment results timed out

+
+ This may occur when the experiment has a large amount of data or is particularly complex. We are + actively working on fixing this. In the meantime, please try refreshing the experiment to retrieve + the results. +
+ + ) + } + + // Other unexpected errors + return
{error.detail}
+} diff --git a/frontend/src/scenes/experiments/MetricsView/const.tsx b/frontend/src/scenes/experiments/MetricsView/const.tsx new file mode 100644 index 0000000000000..d1f720a7b256a --- /dev/null +++ b/frontend/src/scenes/experiments/MetricsView/const.tsx @@ -0,0 +1 @@ +export const MAX_PRIMARY_METRICS = 10 diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 88d57b134e0d3..698c4182dc84d 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -263,6 +263,9 @@ export const experimentLogic = kea([ isSecondary, }), setTabKey: (tabKey: string) => ({ tabKey }), + openPrimaryMetricModal: (index: number) => ({ index }), + closePrimaryMetricModal: true, + setPrimaryMetricsResultErrors: (errors: any[]) => ({ errors }), }), reducers({ experiment: [ @@ -471,6 +474,31 @@ export const experimentLogic = kea([ setTabKey: (_, { tabKey }) => tabKey, }, ], + isPrimaryMetricModalOpen: [ + false, + { + openPrimaryMetricModal: () => true, + closePrimaryMetricModal: () => false, + }, + ], + editingPrimaryMetricIndex: [ + null as number | null, + { + openPrimaryMetricModal: (_, { index }) => index, + closePrimaryMetricModal: () => null, + updateExperimentGoal: () => null, + }, + ], + primaryMetricsResultErrors: [ + [] as any[], + { + setPrimaryMetricsResultErrors: (_, { errors }) => errors, + // Reset errors when loading new results + loadMetricResults: () => [], + // Reset errors when loading a new experiment + loadExperiment: () => [], + }, + ], }), listeners(({ values, actions }) => ({ createExperiment: async ({ draft }) => { @@ -620,6 +648,7 @@ export const experimentLogic = kea([ minimum_detectable_effect: minimumDetectableEffect, }, }) + actions.closePrimaryMetricModal() }, updateExperimentCollectionGoal: async () => { const { recommendedRunningTime, recommendedSampleSize, minimumDetectableEffect } = values @@ -648,6 +677,9 @@ export const experimentLogic = kea([ actions.loadExperiment() } }, + closePrimaryMetricModal: () => { + actions.loadExperiment() + }, resetRunningExperiment: async () => { actions.updateExperiment({ start_date: null, end_date: null, archived: false }) values.experiment && actions.reportExperimentReset(values.experiment) @@ -858,7 +890,7 @@ export const experimentLogic = kea([ // :HANDLE FLAG: CLEAN UP AFTER MIGRATION if (values.featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { const errorDetailMatch = error.detail.match(/\{.*\}/) - errorDetail = errorDetailMatch[0] + errorDetail = errorDetailMatch ? errorDetailMatch[0] : error.detail } actions.setExperimentResultCalculationError({ detail: errorDetail, statusCode: error.status }) if (error.status === 504) { @@ -870,15 +902,14 @@ export const experimentLogic = kea([ }, ], metricResults: [ - null as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] | null, + null as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse | null)[] | null, { loadMetricResults: async ( refresh?: boolean - ): Promise<(CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] | null> => { + ): Promise<(CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse | null)[]> => { return (await Promise.all( - values.experiment?.metrics.map(async (metric) => { + values.experiment?.metrics.map(async (metric, index) => { try { - // Queries are shareable, so we need to set the experiment_id for the backend to correctly associate the query with the experiment const queryWithExperimentId = { ...metric, experiment_id: values.experimentId, @@ -889,11 +920,22 @@ export const experimentLogic = kea([ ...response, fakeInsightId: Math.random().toString(36).substring(2, 15), } - } catch (error) { - return {} + } catch (error: any) { + const errorDetailMatch = error.detail.match(/\{.*\}/) + const errorDetail = errorDetailMatch ? JSON.parse(errorDetailMatch[0]) : error.detail + + // Store the error in primaryMetricsResultErrors + const currentErrors = [...(values.primaryMetricsResultErrors || [])] + currentErrors[index] = { + detail: errorDetail, + statusCode: error.status, + hasDiagnostics: !!errorDetailMatch, + } + actions.setPrimaryMetricsResultErrors(currentErrors) + return null } }) - )) as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] + )) as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse | null)[] }, }, ], diff --git a/frontend/src/scenes/experiments/utils.test.ts b/frontend/src/scenes/experiments/utils.test.ts index 22d03cad8829a..906841aaec363 100644 --- a/frontend/src/scenes/experiments/utils.test.ts +++ b/frontend/src/scenes/experiments/utils.test.ts @@ -1,6 +1,6 @@ import { EntityType, FeatureFlagFilters, InsightType } from '~/types' -import { getNiceTickValues } from './ExperimentView/DeltaViz' +import { getNiceTickValues } from './MetricsView/MetricsView' import { getMinimumDetectableEffect, transformFiltersForWinningVariant } from './utils' describe('utils', () => { diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index d7e2ad01c9133..c9d7b50b6ab78 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -10,7 +10,6 @@ import { CopyToClipboardInline } from 'lib/components/CopyToClipboard' import { NotFound } from 'lib/components/NotFound' import { ObjectTags } from 'lib/components/ObjectTags/ObjectTags' import { PageHeader } from 'lib/components/PageHeader' -import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { FEATURE_FLAGS } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonButton } from 'lib/lemon-ui/LemonButton' @@ -34,9 +33,9 @@ import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' import { EmptyDashboardComponent } from 'scenes/dashboard/EmptyDashboardComponent' import { UTM_TAGS } from 'scenes/feature-flags/FeatureFlagSnippets' import { JSONEditorInput } from 'scenes/feature-flags/JSONEditorInput' +import { FeatureFlagPermissions } from 'scenes/FeatureFlagPermissions' import { concatWithPunctuation } from 'scenes/insights/utils' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' -import { ResourcePermission } from 'scenes/ResourcePermissionModal' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' @@ -58,14 +57,12 @@ import { PropertyOperator, QueryBasedInsightModel, ReplayTabs, - Resource, } from '~/types' import { AnalysisTab } from './FeatureFlagAnalysisTab' import { FeatureFlagAutoRollback } from './FeatureFlagAutoRollout' import { FeatureFlagCodeExample } from './FeatureFlagCodeExample' import { featureFlagLogic, getRecordingFilterForFlagVariant } from './featureFlagLogic' -import { featureFlagPermissionsLogic } from './featureFlagPermissionsLogic' import FeatureFlagProjects from './FeatureFlagProjects' import { FeatureFlagReleaseConditions } from './FeatureFlagReleaseConditions' import FeatureFlagSchedule from './FeatureFlagSchedule' @@ -103,13 +100,6 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { setActiveTab, } = useActions(featureFlagLogic) - const { addableRoles, unfilteredAddableRolesLoading, rolesToAdd, derivedRoles } = useValues( - featureFlagPermissionsLogic({ flagId: featureFlag.id }) - ) - const { setRolesToAdd, addAssociatedRoles, deleteAssociatedRole } = useActions( - featureFlagPermissionsLogic({ flagId: featureFlag.id }) - ) - const { tags } = useValues(tagsModel) const { hasAvailableFeature } = useValues(userLogic) @@ -221,21 +211,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { tabs.push({ label: 'Permissions', key: FeatureFlagsTab.PERMISSIONS, - content: ( - - setRolesToAdd(roleIds)} - rolesToAdd={rolesToAdd} - addableRoles={addableRoles} - addableRolesLoading={unfilteredAddableRolesLoading} - onAdd={() => addAssociatedRoles()} - roles={derivedRoles} - deleteAssociatedRole={(id) => deleteAssociatedRole({ roleId: id })} - canEdit={featureFlag.can_edit} - /> - - ), + content: , }) } @@ -433,21 +409,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {

Permissions

- - setRolesToAdd(roleIds)} - rolesToAdd={rolesToAdd} - addableRoles={addableRoles} - addableRolesLoading={unfilteredAddableRolesLoading} - onAdd={() => addAssociatedRoles()} - roles={derivedRoles} - deleteAssociatedRole={(id) => - deleteAssociatedRole({ roleId: id }) - } - canEdit={featureFlag.can_edit} - /> - +
diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 978348e795149..4a9fd05113d3e 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -23,9 +23,11 @@ import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { groupsModel } from '~/models/groupsModel' import { getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils' import { + ActivityScope, AvailableFeature, Breadcrumb, CohortType, @@ -973,6 +975,19 @@ export const featureFlagLogic = kea([ { key: [Scene.FeatureFlag, featureFlag.id || 'unknown'], name: featureFlag.key || 'Unnamed' }, ], ], + [SIDE_PANEL_CONTEXT_KEY]: [ + (s) => [s.featureFlag], + (featureFlag): SidePanelSceneContext | null => { + return featureFlag?.id + ? { + activity_scope: ActivityScope.FEATURE_FLAG, + activity_item_id: `${featureFlag.id}`, + access_control_resource: 'feature_flag', + access_control_resource_id: `${featureFlag.id}`, + } + : null + }, + ], filteredDashboards: [ (s) => [s.dashboards, s.featureFlag], (dashboards, featureFlag) => { diff --git a/frontend/src/scenes/insights/Insight.tsx b/frontend/src/scenes/insights/Insight.tsx index a0edff02deab5..573321196a64f 100644 --- a/frontend/src/scenes/insights/Insight.tsx +++ b/frontend/src/scenes/insights/Insight.tsx @@ -4,6 +4,7 @@ import { DebugCHQueries } from 'lib/components/CommandPalette/DebugCHQueries' import { isObject } from 'lib/utils' import { InsightPageHeader } from 'scenes/insights/InsightPageHeader' import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' +import { ReloadInsight } from 'scenes/saved-insights/ReloadInsight' import { urls } from 'scenes/urls' import { Query } from '~/queries/Query/Query' @@ -21,7 +22,7 @@ export interface InsightSceneProps { export function Insight({ insightId }: InsightSceneProps): JSX.Element { // insightSceneLogic - const { insightMode, insight, filtersOverride, variablesOverride } = useValues(insightSceneLogic) + const { insightMode, insight, filtersOverride, variablesOverride, freshQuery } = useValues(insightSceneLogic) // insightLogic const logic = insightLogic({ @@ -79,6 +80,8 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element {
)} + {freshQuery ? : null} + ([ @@ -29,6 +34,10 @@ export const insightDataLogic = kea([ values: [ insightLogic, ['insight', 'savedInsight'], + insightSceneLogic, + ['insightId', 'insightMode', 'activeScene'], + teamLogic, + ['currentTeamId'], dataNodeLogic({ key: insightVizDataNodeKey(props), loadPriority: props.loadPriority, @@ -49,7 +58,7 @@ export const insightDataLogic = kea([ ], actions: [ insightLogic, - ['setInsight', 'loadInsightSuccess'], + ['setInsight'], dataNodeLogic({ key: insightVizDataNodeKey(props) } as DataNodeLogicProps), ['loadData', 'loadDataSuccess', 'loadDataFailure', 'setResponse as setInsightData'], ], @@ -187,21 +196,59 @@ export const insightDataLogic = kea([ actions.setInsightData({ ...values.insightData, result }) } }, - loadInsightSuccess: ({ insight }) => { - if (insight.query) { - actions.setQuery(insight.query) - } - }, cancelChanges: () => { const savedQuery = values.savedInsight.query const savedResult = values.savedInsight.result actions.setQuery(savedQuery || null) actions.setInsightData({ ...values.insightData, result: savedResult ? savedResult : null }) }, + setQuery: ({ query }) => { + // if the query is not changed, don't save it + if (!query || !values.queryChanged) { + return + } + // only run on insight scene + if (insightSceneLogic.values.activeScene !== Scene.Insight) { + return + } + // don't save for saved insights + if (insightSceneLogic.values.insightId !== 'new') { + return + } + + if (isQueryTooLarge(query)) { + localStorage.removeItem(`draft-query-${values.currentTeamId}`) + } + localStorage.setItem( + `draft-query-${values.currentTeamId}`, + crushDraftQueryForLocalStorage(query, Date.now()) + ) + }, })), propsChanged(({ actions, props, values }) => { if (props.cachedInsight?.query && !objectsEqual(props.cachedInsight.query, values.query)) { actions.setQuery(props.cachedInsight.query) } }), + actionToUrl(({ values }) => ({ + setQuery: ({ query }) => { + if ( + values.queryChanged && + insightSceneLogic.values.activeScene === Scene.Insight && + insightSceneLogic.values.insightId === 'new' + ) { + // query is changed and we are in edit mode + return [ + router.values.currentLocation.pathname, + { + ...router.values.currentLocation.searchParams, + }, + { + ...router.values.currentLocation.hashParams, + q: crushDraftQueryForURL(query), + }, + ] + } + }, + })), ]) diff --git a/frontend/src/scenes/insights/insightLogic.tsx b/frontend/src/scenes/insights/insightLogic.tsx index a3c9905180538..1ca1548f30047 100644 --- a/frontend/src/scenes/insights/insightLogic.tsx +++ b/frontend/src/scenes/insights/insightLogic.tsx @@ -336,6 +336,8 @@ export const insightLogic: LogicWrapper = kea { }) }) - it('redirects when opening /insight/new with insight type in theurl', async () => { + it('redirects maintaining url params when opening /insight/new with insight type in theurl', async () => { router.actions.push(urls.insightNew(InsightType.FUNNELS)) await expectLogic(logic).toFinishAllListeners() - await expectLogic(router) - .delay(1) - .toMatchValues({ - location: partial({ - pathname: addProjectIdIfMissing(urls.insightNew(), MOCK_TEAM_ID), - search: '', - hash: '', - }), - }) expect((logic.values.insightLogicRef?.logic.values.insight.query as InsightVizNode).source?.kind).toEqual( 'FunnelsQuery' ) }) - it('redirects when opening /insight/new with query in the url', async () => { + it('redirects maintaining url params when opening /insight/new with query in the url', async () => { router.actions.push( urls.insightNew(undefined, undefined, { kind: NodeKind.InsightVizNode, @@ -70,15 +61,6 @@ describe('insightSceneLogic', () => { } as InsightVizNode) ) await expectLogic(logic).toFinishAllListeners() - await expectLogic(router) - .delay(1) - .toMatchValues({ - location: partial({ - pathname: addProjectIdIfMissing(urls.insightNew(), MOCK_TEAM_ID), - search: '', - hash: '', - }), - }) expect((logic.values.insightLogicRef?.logic.values.insight.query as InsightVizNode).source?.kind).toEqual( 'PathsQuery' diff --git a/frontend/src/scenes/insights/insightSceneLogic.tsx b/frontend/src/scenes/insights/insightSceneLogic.tsx index 3f79ace2432d9..d9b89d13ff25c 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.tsx +++ b/frontend/src/scenes/insights/insightSceneLogic.tsx @@ -15,7 +15,7 @@ import { teamLogic } from 'scenes/teamLogic' import { mathsLogic } from 'scenes/trends/mathsLogic' import { urls } from 'scenes/urls' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { cohortsModel } from '~/models/cohortsModel' import { groupsModel } from '~/models/groupsModel' import { getDefaultQuery } from '~/queries/nodes/InsightViz/utils' @@ -26,6 +26,10 @@ import { insightDataLogic } from './insightDataLogic' import { insightDataLogicType } from './insightDataLogicType' import type { insightSceneLogicType } from './insightSceneLogicType' import { summarizeInsight } from './summarizeInsight' +import { parseDraftQueryFromLocalStorage, parseDraftQueryFromURL } from './utils' + +const NEW_INSIGHT = 'new' as const +export type InsightId = InsightShortId | typeof NEW_INSIGHT | null export const insightSceneLogic = kea([ path(['scenes', 'insights', 'insightSceneLogic']), @@ -33,7 +37,7 @@ export const insightSceneLogic = kea([ logic: [eventUsageLogic], values: [ teamLogic, - ['currentTeam'], + ['currentTeam', 'currentTeamId'], sceneLogic, ['activeScene'], preflightLogic, @@ -73,10 +77,11 @@ export const insightSceneLogic = kea([ unmount, }), setOpenedWithQuery: (query: Node | null) => ({ query }), + setFreshQuery: (freshQuery: boolean) => ({ freshQuery }), }), reducers({ insightId: [ - null as null | 'new' | InsightShortId, + null as null | InsightId, { setSceneState: (_, { insightId }) => insightId, }, @@ -150,6 +155,7 @@ export const insightSceneLogic = kea([ }, ], openedWithQuery: [null as Node | null, { setOpenedWithQuery: (_, { query }) => query }], + freshQuery: [false, { setFreshQuery: (_, { freshQuery }) => freshQuery }], }), selectors(() => ({ insightSelector: [(s) => [s.insightLogicRef], (insightLogicRef) => insightLogicRef?.logic.selectors.insight], @@ -210,13 +216,15 @@ export const insightSceneLogic = kea([ ] }, ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ (s) => [s.insight], - (insight): ActivityFilters | null => { - return insight + (insight): SidePanelSceneContext | null => { + return insight?.id ? { - scope: ActivityScope.INSIGHT, - item_id: `${insight.id}`, + activity_scope: ActivityScope.INSIGHT, + activity_item_id: `${insight.id}`, + access_control_resource: 'insight', + access_control_resource_id: `${insight.id}`, } : null }, @@ -330,24 +338,20 @@ export const insightSceneLogic = kea([ let queryFromUrl: Node | null = null if (q) { - queryFromUrl = JSON.parse(q) + const validQuery = parseDraftQueryFromURL(q) + if (validQuery) { + queryFromUrl = validQuery + } else { + console.error('Invalid query', q) + } } else if (insightType && Object.values(InsightType).includes(insightType)) { queryFromUrl = getDefaultQuery(insightType, values.filterTestAccountsDefault) } - // Redirect to a simple URL if we had a query in the URL - if (q || insightType) { - router.actions.replace( - insightId === 'new' - ? urls.insightNew(undefined, dashboard) - : insightMode === ItemMode.Edit - ? urls.insightEdit(insightId) - : urls.insightView(insightId) - ) - } + actions.setFreshQuery(false) // reset the insight's state if we have to - if (initial || method === 'PUSH' || queryFromUrl) { + if (initial || queryFromUrl || method === 'PUSH') { if (insightId === 'new') { const query = queryFromUrl || getDefaultQuery(InsightType.TRENDS, values.filterTestAccountsDefault) values.insightLogicRef?.logic.actions.setInsight( @@ -362,6 +366,10 @@ export const insightSceneLogic = kea([ } ) + if (!queryFromUrl) { + actions.setFreshQuery(true) + } + actions.setOpenedWithQuery(query) eventUsageLogic.actions.reportInsightCreated(query) @@ -414,6 +422,22 @@ export const insightSceneLogic = kea([ const metadataChanged = !!values.insightLogicRef?.logic.values.insightChanged const queryChanged = !!values.insightDataLogicRef?.logic.values.queryChanged + const draftQueryFromLocalStorage = localStorage.getItem(`draft-query-${values.currentTeamId}`) + let draftQuery: { query: Node; timestamp: number } | null = null + if (draftQueryFromLocalStorage) { + const parsedQuery = parseDraftQueryFromLocalStorage(draftQueryFromLocalStorage) + if (parsedQuery) { + draftQuery = parsedQuery + } else { + // If the draft query is invalid, remove it + localStorage.removeItem(`draft-query-${values.currentTeamId}`) + } + } + const query = values.insightDataLogicRef?.logic.values.query + + if (draftQuery && query && objectsEqual(draftQuery.query, query)) { + return false + } return metadataChanged || queryChanged }, diff --git a/frontend/src/scenes/insights/utils.tsx b/frontend/src/scenes/insights/utils.tsx index 96d3129e47fa6..5a1b4d56ec7d9 100644 --- a/frontend/src/scenes/insights/utils.tsx +++ b/frontend/src/scenes/insights/utils.tsx @@ -1,3 +1,4 @@ +import JSONCrush from 'jsoncrush' import api from 'lib/api' import { dayjs } from 'lib/dayjs' import { CORE_FILTER_DEFINITIONS_BY_GROUP } from 'lib/taxonomy' @@ -15,6 +16,7 @@ import { DataWarehouseNode, EventsNode, InsightVizNode, + Node, NodeKind, PathsFilter, } from '~/queries/schema' @@ -433,3 +435,50 @@ export function insightUrlForEvent(event: Pick>): boolean { + // Chrome has a 2MB limit for the HASH params, limit ours at 1MB + const queryLength = encodeURI(JSON.stringify(query)).split(/%..|./).length - 1 + return queryLength > 1024 * 1024 +} + +export function parseDraftQueryFromLocalStorage( + query: string +): { query: Node>; timestamp: number } | null { + // First try to uncrush the query if it's a JSONCrush query else fall back to parsing it as a JSON + try { + const uncrushedQuery = JSONCrush.uncrush(query) + return JSON.parse(uncrushedQuery) + } catch (e) { + console.error('Error parsing uncrushed query', e) + try { + return JSON.parse(query) + } catch (e) { + console.error('Error parsing query', e) + return null + } + } +} + +export function crushDraftQueryForLocalStorage(query: Node>, timestamp: number): string { + return JSONCrush.crush(JSON.stringify({ query, timestamp })) +} + +export function parseDraftQueryFromURL(query: string): Node> | null { + try { + const uncrushedQuery = JSONCrush.uncrush(query) + return JSON.parse(uncrushedQuery) + } catch (e) { + console.error('Error parsing uncrushed query', e) + try { + return JSON.parse(query) + } catch (e) { + console.error('Error parsing query', e) + return null + } + } +} + +export function crushDraftQueryForURL(query: Node>): string { + return JSONCrush.crush(JSON.stringify(query)) +} diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx deleted file mode 100644 index 1a9233289616c..0000000000000 --- a/frontend/src/scenes/notebooks/Notebook/NotebookShare.tsx +++ /dev/null @@ -1,104 +0,0 @@ -import { IconCopy } from '@posthog/icons' -import { LemonBanner, LemonButton, LemonDivider } from '@posthog/lemon-ui' -import { useValues } from 'kea' -import { LemonDialog } from 'lib/lemon-ui/LemonDialog' -import { base64Encode } from 'lib/utils' -import { copyToClipboard } from 'lib/utils/copyToClipboard' -import posthog from 'posthog-js' -import { useState } from 'react' -import { urls } from 'scenes/urls' - -import { notebookLogic } from './notebookLogic' - -export type NotebookShareProps = { - shortId: string -} -export function NotebookShare({ shortId }: NotebookShareProps): JSX.Element { - const { content, isLocalOnly } = useValues(notebookLogic({ shortId })) - - const notebookUrl = urls.absolute(urls.currentProject(urls.notebook(shortId))) - const canvasUrl = urls.absolute(urls.canvas()) + `#šŸ¦”=${base64Encode(JSON.stringify(content))}` - - const [interestTracked, setInterestTracked] = useState(false) - - const trackInterest = (): void => { - posthog.capture('pressed interested in notebook sharing', { url: notebookUrl }) - } - - return ( -
-

Internal Link

- {!isLocalOnly ? ( - <> -

- Click the button below to copy a direct link to this Notebook. Make sure the person you - share it with has access to this PostHog project. -

- } - onClick={() => void copyToClipboard(notebookUrl, 'notebook link')} - title={notebookUrl} - > - {notebookUrl} - - - - - ) : ( - -

This Notebook cannot be shared directly with others as it is only visible to you.

-
- )} - -

Template Link

-

- The link below will open a Canvas with the contents of this Notebook, allowing the receiver to view it, - edit it or create their own Notebook without affecting this one. -

- } - onClick={() => void copyToClipboard(canvasUrl, 'canvas link')} - title={canvasUrl} - > - {canvasUrl} - - - - -

External Sharing

- - { - if (!interestTracked) { - trackInterest() - setInterestTracked(true) - } - }, - }} - > - We donā€™t currently support sharing notebooks externally, but itā€™s on our roadmap! - -
- ) -} - -export function openNotebookShareDialog({ shortId }: NotebookShareProps): void { - LemonDialog.open({ - title: 'Share notebook', - content: , - width: 600, - primaryButton: { - children: 'Close', - type: 'secondary', - }, - }) -} diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookShareModal.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookShareModal.tsx new file mode 100644 index 0000000000000..534599664149f --- /dev/null +++ b/frontend/src/scenes/notebooks/Notebook/NotebookShareModal.tsx @@ -0,0 +1,133 @@ +import { IconCopy, IconOpenSidebar } from '@posthog/icons' +import { LemonBanner, LemonButton, LemonDivider, LemonModal } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' +import { SHARING_MODAL_WIDTH } from 'lib/components/Sharing/SharingModal' +import { base64Encode } from 'lib/utils' +import { copyToClipboard } from 'lib/utils/copyToClipboard' +import posthog from 'posthog-js' +import { useState } from 'react' +import { urls } from 'scenes/urls' + +import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' +import { SidePanelTab } from '~/types' + +import { notebookLogic } from './notebookLogic' + +export type NotebookShareModalProps = { + shortId: string +} + +export function NotebookShareModal({ shortId }: NotebookShareModalProps): JSX.Element { + const { content, isLocalOnly, isShareModalOpen } = useValues(notebookLogic({ shortId })) + const { closeShareModal } = useActions(notebookLogic({ shortId })) + const { openSidePanel } = useActions(sidePanelStateLogic) + + const notebookUrl = urls.absolute(urls.currentProject(urls.notebook(shortId))) + const canvasUrl = urls.absolute(urls.canvas()) + `#šŸ¦”=${base64Encode(JSON.stringify(content))}` + + const [interestTracked, setInterestTracked] = useState(false) + + const trackInterest = (): void => { + posthog.capture('pressed interested in notebook sharing', { url: notebookUrl }) + } + + return ( + closeShareModal()} + isOpen={isShareModalOpen} + width={SHARING_MODAL_WIDTH} + footer={ + + Done + + } + > +
+ + <> +
+

Access control

+ + Permissions have moved! We're rolling out our new access control system. Click below to + open it. + + } + onClick={() => { + openSidePanel(SidePanelTab.AccessControl) + closeShareModal() + }} + > + Open access control + +
+ + +
+

Internal Link

+ {!isLocalOnly ? ( + <> +

+ Click the button below to copy a direct link to this Notebook. Make sure the person + you share it with has access to this PostHog project. +

+ } + onClick={() => void copyToClipboard(notebookUrl, 'notebook link')} + title={notebookUrl} + > + {notebookUrl} + + + + + ) : ( + +

This Notebook cannot be shared directly with others as it is only visible to you.

+
+ )} + +

Template Link

+

+ The link below will open a Canvas with the contents of this Notebook, allowing the receiver to view + it, edit it or create their own Notebook without affecting this one. +

+ } + onClick={() => void copyToClipboard(canvasUrl, 'canvas link')} + title={canvasUrl} + > + {canvasUrl} + + + + +

External Sharing

+ + { + if (!interestTracked) { + trackInterest() + setInterestTracked(true) + } + }, + }} + > + We donā€™t currently support sharing notebooks externally, but itā€™s on our roadmap! + +
+
+ ) +} diff --git a/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-12345.json b/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-12345.json index f2ac6bd3c8d16..4e31800d43919 100644 --- a/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-12345.json +++ b/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-12345.json @@ -59,5 +59,6 @@ "first_name": "Paul", "email": "paul@posthog.com", "is_email_verified": false - } + }, + "user_access_level": "editor" } diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index bc0593c22bff3..68fc4d6e7f0f1 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -133,8 +133,17 @@ export const notebookLogic = kea([ setContainerSize: (containerSize: 'small' | 'medium') => ({ containerSize }), insertComment: (context: Record) => ({ context }), selectComment: (itemContextId: string) => ({ itemContextId }), + openShareModal: true, + closeShareModal: true, }), reducers(({ props }) => ({ + isShareModalOpen: [ + false, + { + openShareModal: () => true, + closeShareModal: () => false, + }, + ], localContent: [ null as JSONContent | null, { persist: props.mode !== 'canvas', prefix: NOTEBOOKS_VERSION }, @@ -348,9 +357,9 @@ export const notebookLogic = kea([ mode: [() => [(_, props) => props], (props): NotebookLogicMode => props.mode ?? 'notebook'], isTemplate: [(s) => [s.shortId], (shortId): boolean => shortId.startsWith('template-')], isLocalOnly: [ - () => [(_, props) => props], - (props): boolean => { - return props.shortId === 'scratchpad' || props.mode === 'canvas' + (s) => [(_, props) => props, s.isTemplate], + (props, isTemplate): boolean => { + return props.shortId === 'scratchpad' || props.mode === 'canvas' || isTemplate }, ], notebookMissing: [ @@ -443,8 +452,9 @@ export const notebookLogic = kea([ ], isEditable: [ - (s) => [s.shouldBeEditable, s.previewContent], - (shouldBeEditable, previewContent) => shouldBeEditable && !previewContent, + (s) => [s.shouldBeEditable, s.previewContent, s.notebook], + (shouldBeEditable, previewContent, notebook) => + shouldBeEditable && !previewContent && notebook?.user_access_level === 'editor', ], }), listeners(({ values, actions, cache }) => ({ @@ -518,6 +528,11 @@ export const notebookLogic = kea([ ) }, setLocalContent: async ({ updateEditor, jsonContent }, breakpoint) => { + if (values.notebook?.user_access_level !== 'editor') { + actions.clearLocalContent() + return + } + if (values.previewContent) { // We don't want to modify the content if we are viewing a preview return diff --git a/frontend/src/scenes/notebooks/NotebookMenu.tsx b/frontend/src/scenes/notebooks/NotebookMenu.tsx index 9cea5e74fbe37..aeeebfa35cdfa 100644 --- a/frontend/src/scenes/notebooks/NotebookMenu.tsx +++ b/frontend/src/scenes/notebooks/NotebookMenu.tsx @@ -10,10 +10,10 @@ import { urls } from 'scenes/urls' import { notebooksModel } from '~/models/notebooksModel' import { notebookLogic, NotebookLogicProps } from './Notebook/notebookLogic' -import { openNotebookShareDialog } from './Notebook/NotebookShare' export function NotebookMenu({ shortId }: NotebookLogicProps): JSX.Element { const { notebook, showHistory, isLocalOnly } = useValues(notebookLogic({ shortId })) + const { openShareModal } = useActions(notebookLogic({ shortId })) const { exportJSON, setShowHistory } = useActions(notebookLogic({ shortId })) return ( @@ -32,14 +32,17 @@ export function NotebookMenu({ shortId }: NotebookLogicProps): JSX.Element { { label: 'Share', icon: , - onClick: () => openNotebookShareDialog({ shortId }), + onClick: () => openShareModal(), }, !isLocalOnly && !notebook?.is_template && { label: 'Delete', icon: , status: 'danger', - + disabledReason: + notebook?.user_access_level !== 'editor' + ? 'You do not have permission to delete this notebook.' + : undefined, onClick: () => { notebooksModel.actions.deleteNotebook(shortId, notebook?.title) router.actions.push(urls.notebooks()) diff --git a/frontend/src/scenes/notebooks/NotebookScene.tsx b/frontend/src/scenes/notebooks/NotebookScene.tsx index e24c3bdd498c5..a0cc87a441c74 100644 --- a/frontend/src/scenes/notebooks/NotebookScene.tsx +++ b/frontend/src/scenes/notebooks/NotebookScene.tsx @@ -14,6 +14,7 @@ import { Notebook } from './Notebook/Notebook' import { NotebookLoadingState } from './Notebook/NotebookLoadingState' import { notebookLogic } from './Notebook/notebookLogic' import { NotebookExpandButton, NotebookSyncInfo } from './Notebook/NotebookMeta' +import { NotebookShareModal } from './Notebook/NotebookShareModal' import { NotebookMenu } from './NotebookMenu' import { notebookPanelLogic } from './NotebookPanel/notebookPanelLogic' import { notebookSceneLogic, NotebookSceneLogicProps } from './notebookSceneLogic' @@ -128,6 +129,7 @@ export function NotebookScene(): JSX.Element {
+
) } diff --git a/frontend/src/scenes/notebooks/notebookSceneLogic.ts b/frontend/src/scenes/notebooks/notebookSceneLogic.ts index 592a1b39e09ed..6d987f3a780a4 100644 --- a/frontend/src/scenes/notebooks/notebookSceneLogic.ts +++ b/frontend/src/scenes/notebooks/notebookSceneLogic.ts @@ -2,8 +2,9 @@ import { afterMount, connect, kea, key, path, props, selectors } from 'kea' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { notebooksModel } from '~/models/notebooksModel' -import { Breadcrumb } from '~/types' +import { ActivityScope, Breadcrumb } from '~/types' import { notebookLogic } from './Notebook/notebookLogic' import type { notebookSceneLogicType } from './notebookSceneLogicType' @@ -16,7 +17,12 @@ export const notebookSceneLogic = kea([ props({} as NotebookSceneLogicProps), key(({ shortId }) => shortId), connect((props: NotebookSceneLogicProps) => ({ - values: [notebookLogic(props), ['notebook', 'notebookLoading'], notebooksModel, ['notebooksLoading']], + values: [ + notebookLogic(props), + ['notebook', 'notebookLoading', 'isLocalOnly'], + notebooksModel, + ['notebooksLoading'], + ], actions: [notebookLogic(props), ['loadNotebook'], notebooksModel, ['createNotebook']], })), selectors(() => ({ @@ -41,6 +47,20 @@ export const notebookSceneLogic = kea([ }, ], ], + + [SIDE_PANEL_CONTEXT_KEY]: [ + (s) => [s.notebookId, s.isLocalOnly], + (notebookId, isLocalOnly): SidePanelSceneContext | null => { + return notebookId && !isLocalOnly + ? { + activity_scope: ActivityScope.NOTEBOOK, + activity_item_id: notebookId, + access_control_resource: 'notebook', + access_control_resource_id: notebookId, + } + : null + }, + ], })), afterMount(({ actions, props }) => { diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index d408ec3a74ed0..fcfb21200a7c4 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -13,7 +13,7 @@ import { Scene } from 'scenes/sceneTypes' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { hogqlQuery } from '~/queries/query' import { ActivityScope, @@ -256,13 +256,13 @@ export const personsLogic = kea([ }, ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ (s) => [s.person], - (person): ActivityFilters => { + (person): SidePanelSceneContext => { return { - scope: ActivityScope.PERSON, + activity_scope: ActivityScope.PERSON, // TODO: Is this correct? It doesn't seem to work... - item_id: person?.id ? `${person?.id}` : undefined, + activity_item_id: person?.id ? `${person?.id}` : undefined, } }, ], diff --git a/frontend/src/scenes/pipeline/pipelineLogic.tsx b/frontend/src/scenes/pipeline/pipelineLogic.tsx index 38ea5f1d54b4f..23438fbe86185 100644 --- a/frontend/src/scenes/pipeline/pipelineLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineLogic.tsx @@ -5,7 +5,7 @@ import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { ActivityScope, Breadcrumb, PipelineTab } from '~/types' import type { pipelineLogicType } from './pipelineLogicType' @@ -44,11 +44,11 @@ export const pipelineLogic = kea([ }, ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ () => [], - (): ActivityFilters | null => { + (): SidePanelSceneContext => { return { - scope: ActivityScope.PLUGIN, + activity_scope: ActivityScope.PLUGIN, } }, ], diff --git a/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx b/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx index 4faedce085b8a..2d2e7b977aec5 100644 --- a/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineNodeLogic.tsx @@ -4,7 +4,7 @@ import { capitalizeFirstLetter } from 'lib/utils' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { ActivityScope, Breadcrumb, PipelineNodeTab, PipelineStage } from '~/types' import type { pipelineNodeLogicType } from './pipelineNodeLogicType' @@ -78,13 +78,15 @@ export const pipelineNodeLogic = kea([ ], ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ (s) => [s.node], - (node): ActivityFilters | null => { + (node): SidePanelSceneContext | null => { return node.backend === PipelineBackend.Plugin ? { - scope: ActivityScope.PLUGIN, - item_id: `${node.id}`, + activity_scope: ActivityScope.PLUGIN, + activity_item_id: `${node.id}`, + // access_control_resource: 'plugin', + // access_control_resource_id: `${node.id}`, } : null }, diff --git a/frontend/src/scenes/saved-insights/ReloadInsight.tsx b/frontend/src/scenes/saved-insights/ReloadInsight.tsx new file mode 100644 index 0000000000000..66a258eabdc3c --- /dev/null +++ b/frontend/src/scenes/saved-insights/ReloadInsight.tsx @@ -0,0 +1,31 @@ +import { Link } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { parseDraftQueryFromLocalStorage } from 'scenes/insights/utils' +import { teamLogic } from 'scenes/teamLogic' +import { urls } from 'scenes/urls' + +import { Node } from '~/queries/schema' + +export function ReloadInsight(): JSX.Element { + const { currentTeamId } = useValues(teamLogic) + const draftQueryLocalStorage = localStorage.getItem(`draft-query-${currentTeamId}`) + let draftQuery: { query: Node>; timestamp: number } | null = null + if (draftQueryLocalStorage) { + const parsedQuery = parseDraftQueryFromLocalStorage(draftQueryLocalStorage) + if (parsedQuery) { + draftQuery = parsedQuery + } else { + localStorage.removeItem(`draft-query-${currentTeamId}`) + } + } + + if (!draftQuery?.query) { + return <> + } + return ( +
+ You have an unsaved insight from {new Date(draftQuery.timestamp).toLocaleString()}.{' '} + Click here to view it. +
+ ) +} diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 05f4c5c131668..bd155048b0490 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -57,6 +57,7 @@ import { NodeKind } from '~/queries/schema' import { isNodeWithSource } from '~/queries/utils' import { ActivityScope, InsightType, LayoutView, QueryBasedInsightModel, SavedInsightsTabs } from '~/types' +import { ReloadInsight } from './ReloadInsight' import { INSIGHTS_PER_PAGE, savedInsightsLogic } from './savedInsightsLogic' interface NewInsightButtonProps { @@ -671,6 +672,7 @@ export function SavedInsights(): JSX.Element { ) : ( <> + {layoutView === LayoutView.List ? ( urls.pipelineNode(PipelineStage.Transformation, id), '/messaging': urls.messagingBroadcasts(), + '/settings/organization-rbac': urls.settings('organization-roles'), } export const routes: Record = { diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx index 6bac98a9fc381..5c811872d4134 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx @@ -1,6 +1,6 @@ import './PlayerMeta.scss' -import { LemonBanner, LemonSelect, LemonSelectOption, Link } from '@posthog/lemon-ui' +import { LemonSelect, LemonSelectOption, Link } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { CopyToClipboardInline } from 'lib/components/CopyToClipboard' @@ -59,26 +59,6 @@ function URLOrScreen({ lastUrl }: { lastUrl: string | undefined }): JSX.Element ) } -function PlayerWarningsRow(): JSX.Element | null { - const { messageTooLargeWarnings } = useValues(sessionRecordingPlayerLogic) - - return messageTooLargeWarnings.length ? ( -
- - This session recording had recording data that was too large and could not be captured. This will mean - playback is not 100% accurate.{' '} - -
- ) : null -} - export function PlayerMeta({ iconsOnly }: { iconsOnly: boolean }): JSX.Element { const { logicProps, isFullScreen } = useValues(sessionRecordingPlayerLogic) @@ -206,7 +186,6 @@ export function PlayerMeta({ iconsOnly }: { iconsOnly: boolean }): JSX.Element { {resolutionView}
-
) diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index 69f3541aa3ed6..42b8b7d317768 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -57,6 +57,7 @@ function PinToPlaylistButton({ /> ) : ( : } {...buttonProps} @@ -135,7 +136,7 @@ export function PlayerMetaLinks({ iconsOnly }: { iconsOnly: boolean }): JSX.Elem {buttonContent('Comment')} - } onClick={onShare} {...commonProps}> + } onClick={onShare} {...commonProps} tooltip="Share this recording"> {buttonContent('Share')} @@ -149,6 +150,7 @@ export function PlayerMetaLinks({ iconsOnly }: { iconsOnly: boolean }): JSX.Elem attrs: { id: sessionRecordingId }, }) }} + tooltip="Comment in a notebook" /> ) : null} diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx index 6b28feca120ae..d70a3267a2532 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx @@ -4,7 +4,6 @@ import { LemonButton } from '@posthog/lemon-ui' import clsx from 'clsx' import { BindLogic, useActions, useValues } from 'kea' import { BuilderHog2 } from 'lib/components/hedgehogs' -import { dayjs } from 'lib/dayjs' import { FloatingContainerContext } from 'lib/hooks/useFloatingContainerContext' import { HotkeysInterface, useKeyboardHotkeys } from 'lib/hooks/useKeyboardHotkeys' import { usePageVisibility } from 'lib/hooks/usePageVisibility' @@ -87,11 +86,9 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. setSpeed, closeExplorer, } = useActions(sessionRecordingPlayerLogic(logicProps)) - const { isNotFound, snapshotsInvalid, start } = useValues(sessionRecordingDataLogic(logicProps)) + const { isNotFound, isRecentAndInvalid } = useValues(sessionRecordingDataLogic(logicProps)) const { loadSnapshots } = useActions(sessionRecordingDataLogic(logicProps)) - const { isFullScreen, explorerMode, isBuffering, messageTooLargeWarnings } = useValues( - sessionRecordingPlayerLogic(logicProps) - ) + const { isFullScreen, explorerMode, isBuffering } = useValues(sessionRecordingPlayerLogic(logicProps)) const { setPlayNextAnimationInterrupted } = useActions(sessionRecordingPlayerLogic(logicProps)) const speedHotkeys = useMemo(() => createPlaybackSpeedKey(setSpeed), [setSpeed]) const { isVerticallyStacked, sidebarOpen, playbackMode } = useValues(playerSettingsLogic) @@ -158,9 +155,6 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. } ) - const lessThanFiveMinutesOld = dayjs().diff(start, 'minute') <= 5 - const cannotPlayback = snapshotsInvalid && lessThanFiveMinutesOld && !messageTooLargeWarnings - const { draggable, elementProps } = useNotebookDrag({ href: urls.replaySingle(sessionRecordingId) }) if (isNotFound) { @@ -198,7 +192,7 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. className="SessionRecordingPlayer__main flex flex-col h-full w-full" ref={playerMainRef} > - {cannotPlayback ? ( + {isRecentAndInvalid ? (

We're still working on it

diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx index 0498fb9efe05f..f539c787f7209 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx @@ -82,7 +82,7 @@ function ShowMouseTail(): JSX.Element { return ( setTimestampFormat(TimestampFormat.UTC), + active: timestampFormat === TimestampFormat.UTC, + }, + { + label: 'Device', + onClick: () => setTimestampFormat(TimestampFormat.Device), + active: timestampFormat === TimestampFormat.Device, + }, + { + label: 'Relative', + onClick: () => setTimestampFormat(TimestampFormat.Relative), + active: timestampFormat === TimestampFormat.Relative, + }, + ]} + icon={} + label={TimestampFormatToLabel[timestampFormat]} + /> + ) +} + function InspectDOM(): JSX.Element { const { sessionPlayerMetaData } = useValues(sessionRecordingPlayerLogic) const { openExplorer } = useActions(sessionRecordingPlayerLogic) @@ -125,39 +155,15 @@ function InspectDOM(): JSX.Element { } function PlayerBottomSettings(): JSX.Element { - const { timestampFormat } = useValues(playerSettingsLogic) - const { setTimestampFormat } = useActions(playerSettingsLogic) - return (
- setTimestampFormat(TimestampFormat.UTC), - active: timestampFormat === TimestampFormat.UTC, - }, - { - label: 'Device', - onClick: () => setTimestampFormat(TimestampFormat.Device), - active: timestampFormat === TimestampFormat.Device, - }, - { - label: 'Relative', - onClick: () => setTimestampFormat(TimestampFormat.Relative), - active: timestampFormat === TimestampFormat.Relative, - }, - ]} - icon={} - label={TimestampFormatToLabel[timestampFormat]} - /> - +
+
) } diff --git a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts index 98958a186e51b..89e36899c5aeb 100644 --- a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts +++ b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts @@ -1,4 +1,5 @@ -import { actions, connect, kea, path, reducers, selectors } from 'kea' +import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' +import posthog from 'posthog-js' import { teamLogic } from 'scenes/teamLogic' import { AutoplayDirection, SessionRecordingSidebarStacking } from '~/types' @@ -122,4 +123,13 @@ export const playerSettingsLogic = kea([ (preferredSidebarStacking) => preferredSidebarStacking === SessionRecordingSidebarStacking.Vertical, ], }), + + listeners({ + setSpeed: ({ speed }) => { + posthog.capture('recording player speed changed', { new_speed: speed }) + }, + setSkipInactivitySetting: ({ skipInactivitySetting }) => { + posthog.capture('recording player skip inactivity toggled', { skip_inactivity: skipInactivitySetting }) + }, + }), ]) diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 10118ce5defdc..ff63bd4b1f397 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -1087,13 +1087,13 @@ export const sessionRecordingDataLogic = kea([ if (everyWindowMissingFullSnapshot) { // video is definitely unplayable posthog.capture('recording_has_no_full_snapshot', { - sessionId: sessionRecordingId, + watchedSession: sessionRecordingId, teamId: currentTeam?.id, teamName: currentTeam?.name, }) } else if (anyWindowMissingFullSnapshot) { posthog.capture('recording_window_missing_full_snapshot', { - sessionId: sessionRecordingId, + watchedSession: sessionRecordingId, teamID: currentTeam?.id, teamName: currentTeam?.name, }) @@ -1103,6 +1103,14 @@ export const sessionRecordingDataLogic = kea([ }, ], + isRecentAndInvalid: [ + (s) => [s.start, s.snapshotsInvalid], + (start, snapshotsInvalid) => { + const lessThanFiveMinutesOld = dayjs().diff(start, 'minute') <= 5 + return snapshotsInvalid && lessThanFiveMinutesOld + }, + ], + bufferedToTime: [ (s) => [s.segments], (segments): number | null => { @@ -1160,6 +1168,13 @@ export const sessionRecordingDataLogic = kea([ actions.loadFullEventData(value) } }, + isRecentAndInvalid: (prev: boolean, next: boolean) => { + if (!prev && next) { + posthog.capture('recording cannot playback yet', { + watchedSession: values.sessionPlayerData.sessionRecordingId, + }) + } + }, })), afterMount(({ cache }) => { resetTimingsCache(cache) diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts index 3dde171f5c309..7a45c26637046 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts @@ -141,7 +141,7 @@ describe('sessionRecordingPlayerLogic', () => { sessionRecordingDataLogic({ sessionRecordingId: '2' }).actionTypes.loadSnapshotSourcesFailure, ]) .toFinishAllListeners() - .toDispatchActions(['setErrorPlayerState']) + .toDispatchActions(['setPlayerError']) expect(logic.values).toMatchObject({ sessionPlayerData: { @@ -149,7 +149,7 @@ describe('sessionRecordingPlayerLogic', () => { snapshotsByWindowId: {}, bufferedToTime: 0, }, - isErrored: true, + playerError: 'loadSnapshotSourcesFailure', }) resumeKeaLoadersErrors() }) diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index cfda001ed9ea5..5e7e7955f4602 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -1,5 +1,5 @@ import { lemonToast } from '@posthog/lemon-ui' -import { customEvent, EventType, eventWithTime, IncrementalSource } from '@rrweb/types' +import { EventType, eventWithTime, IncrementalSource } from '@rrweb/types' import { captureException } from '@sentry/react' import { actions, @@ -138,12 +138,7 @@ export const sessionRecordingPlayerLogic = kea( playerSettingsLogic, ['setSpeed', 'setSkipInactivitySetting'], eventUsageLogic, - [ - 'reportNextRecordingTriggered', - 'reportRecordingPlayerSkipInactivityToggled', - 'reportRecordingPlayerSpeedChanged', - 'reportRecordingExportedToFile', - ], + ['reportNextRecordingTriggered', 'reportRecordingExportedToFile'], ], })), actions({ @@ -156,7 +151,8 @@ export const sessionRecordingPlayerLogic = kea( endBuffer: true, startScrub: true, endScrub: true, - setErrorPlayerState: (show: boolean) => ({ show }), + setPlayerError: (reason: string) => ({ reason }), + clearPlayerError: true, setSkippingInactivity: (isSkippingInactivity: boolean) => ({ isSkippingInactivity }), syncPlayerSpeed: true, setCurrentTimestamp: (timestamp: number) => ({ timestamp }), @@ -189,7 +185,6 @@ export const sessionRecordingPlayerLogic = kea( // the error is emitted from code we don't control in rrweb, so we can't guarantee it's really an Error playerErrorSeen: (error: any) => ({ error }), fingerprintReported: (fingerprint: string) => ({ fingerprint }), - reportMessageTooLargeWarningSeen: (sessionRecordingId: string) => ({ sessionRecordingId }), setDebugSnapshotTypes: (types: EventType[]) => ({ types }), setDebugSnapshotIncrementalSources: (incrementalSources: IncrementalSource[]) => ({ incrementalSources }), setPlayNextAnimationInterrupted: (interrupted: boolean) => ({ interrupted }), @@ -349,10 +344,7 @@ export const sessionRecordingPlayerLogic = kea( bufferTime: state.bufferTime, } }, - setErrorPlayerState: (state, { show }) => { - if (!show) { - return state - } + setPlayerError: (state) => { return { isPlaying: state.isPlaying, isBuffering: state.isBuffering, @@ -374,7 +366,13 @@ export const sessionRecordingPlayerLogic = kea( }, ], isBuffering: [true, { startBuffer: () => true, endBuffer: () => false }], - isErrored: [false, { setErrorPlayerState: (_, { show }) => show }], + playerError: [ + null as string | null, + { + setPlayerError: (_, { reason }) => (reason.trim().length ? reason : null), + clearPlayerError: () => null, + }, + ], isScrubbing: [false, { startScrub: () => true, endScrub: () => false }], errorCount: [0, { incrementErrorCount: (prevErrorCount) => prevErrorCount + 1 }], @@ -400,12 +398,6 @@ export const sessionRecordingPlayerLogic = kea( setIsFullScreen: (_, { isFullScreen }) => isFullScreen, }, ], - messageTooLargeWarningSeen: [ - null as string | null, - { - reportMessageTooLargeWarningSeen: (_, { sessionRecordingId }) => sessionRecordingId, - }, - ], debugSettings: [ { types: [EventType.FullSnapshot, EventType.IncrementalSnapshot], @@ -431,7 +423,7 @@ export const sessionRecordingPlayerLogic = kea( (s) => [ s.playingState, s.isBuffering, - s.isErrored, + s.playerError, s.isScrubbing, s.isSkippingInactivity, s.snapshotsLoaded, @@ -440,7 +432,7 @@ export const sessionRecordingPlayerLogic = kea( ( playingState, isBuffering, - isErrored, + playerError, isScrubbing, isSkippingInactivity, snapshotsLoaded, @@ -452,7 +444,7 @@ export const sessionRecordingPlayerLogic = kea( return playingState case !snapshotsLoaded && !snapshotsLoading: return SessionPlayerState.READY - case isErrored: + case !!playerError?.trim().length: return SessionPlayerState.ERROR case isSkippingInactivity && playingState !== SessionPlayerState.PAUSE: return SessionPlayerState.SKIP @@ -544,13 +536,6 @@ export const sessionRecordingPlayerLogic = kea( }, ], - messageTooLargeWarnings: [ - (s) => [s.customRRWebEvents], - (customRRWebEvents: customEvent[]) => { - return customRRWebEvents.filter((event) => event.data.tag === 'Message too large') - }, - ], - debugSnapshots: [ (s) => [s.sessionPlayerData, s.debugSettings], (sessionPlayerData: SessionPlayerData, debugSettings): eventWithTime[] => { @@ -672,7 +657,6 @@ export const sessionRecordingPlayerLogic = kea( } }, setSkipInactivitySetting: ({ skipInactivitySetting }) => { - actions.reportRecordingPlayerSkipInactivityToggled(skipInactivitySetting) if (!values.currentSegment?.isActive && skipInactivitySetting) { actions.setSkippingInactivity(true) } else { @@ -784,13 +768,13 @@ export const sessionRecordingPlayerLogic = kea( loadSnapshotsForSourceFailure: () => { if (Object.keys(values.sessionPlayerData.snapshotsByWindowId).length === 0) { console.error('PostHog Recording Playback Error: No snapshots loaded') - actions.setErrorPlayerState(true) + actions.setPlayerError('loadSnapshotsForSourceFailure') } }, loadSnapshotSourcesFailure: () => { if (Object.keys(values.sessionPlayerData.snapshotsByWindowId).length === 0) { console.error('PostHog Recording Playback Error: No snapshots loaded') - actions.setErrorPlayerState(true) + actions.setPlayerError('loadSnapshotSourcesFailure') } }, setPlay: () => { @@ -839,18 +823,17 @@ export const sessionRecordingPlayerLogic = kea( startBuffer: () => { actions.stopAnimation() }, - setErrorPlayerState: ({ show }) => { - if (show) { - actions.incrementErrorCount() - actions.stopAnimation() - } + setPlayerError: () => { + actions.incrementErrorCount() + actions.stopAnimation() }, startScrub: () => { actions.stopAnimation() }, - setSpeed: ({ speed }) => { - actions.reportRecordingPlayerSpeedChanged(speed) - actions.syncPlayerSpeed() + setSpeed: () => { + if (props.mode !== SessionRecordingPlayerMode.Preview) { + actions.syncPlayerSpeed() + } }, seekToTimestamp: ({ timestamp, forcePlay }, breakpoint) => { actions.stopAnimation() @@ -866,25 +849,13 @@ export const sessionRecordingPlayerLogic = kea( // If next time is greater than last buffered time, set to buffering else if (segment?.kind === 'buffer') { - const isStillLoading = values.isRealtimePolling || values.snapshotsLoading - const isPastEnd = values.sessionPlayerData.end && timestamp > values.sessionPlayerData.end.valueOf() - if (isStillLoading) { + const isPastEnd = values.sessionPlayerData.end && timestamp >= values.sessionPlayerData.end.valueOf() + if (isPastEnd) { + actions.setEndReached(true) + } else { values.player?.replayer?.pause() actions.startBuffer() - actions.setErrorPlayerState(false) - } else { - if (isPastEnd) { - actions.setEndReached(true) - } else { - // If not currently loading anything, - // not past the end of the recording, - // and part of the recording hasn't loaded, - // set error state - values.player?.replayer?.pause() - actions.endBuffer() - console.error("Error: Player tried to seek to a position that hasn't loaded yet") - actions.setErrorPlayerState(true) - } + actions.clearPlayerError() } } @@ -895,14 +866,14 @@ export const sessionRecordingPlayerLogic = kea( // can consume 100% CPU and freeze the entire page values.player?.replayer?.pause(values.toRRWebPlayerTime(timestamp)) actions.endBuffer() - actions.setErrorPlayerState(false) + actions.clearPlayerError() } // Otherwise play else { values.player?.replayer?.play(values.toRRWebPlayerTime(timestamp)) actions.updateAnimation() actions.endBuffer() - actions.setErrorPlayerState(false) + actions.clearPlayerError() } breakpoint() @@ -962,7 +933,7 @@ export const sessionRecordingPlayerLogic = kea( // when the buffering progresses values.player?.replayer?.pause() actions.startBuffer() - actions.setErrorPlayerState(false) + actions.clearPlayerError() cache.debug('buffering') return } @@ -1018,7 +989,7 @@ export const sessionRecordingPlayerLogic = kea( cache.pausedMediaElements = values.endReached ? [] : playingElements }, restartIframePlayback: () => { - cache.pausedMediaElements.forEach((el: HTMLMediaElement) => el.play()) + cache.pausedMediaElements?.forEach((el: HTMLMediaElement) => el.play()) cache.pausedMediaElements = [] }, @@ -1107,13 +1078,9 @@ export const sessionRecordingPlayerLogic = kea( await document.exitFullscreen() } }, - - reportMessageTooLargeWarningSeen: async ({ sessionRecordingId }) => { - posthog.capture('message too large warning seen', { sessionRecordingId }) - }, })), - subscriptions(({ actions, values, props }) => ({ + subscriptions(({ actions, values }) => ({ sessionPlayerData: (next, prev) => { const hasSnapshotChanges = next?.snapshotsByWindowId !== prev?.snapshotsByWindowId @@ -1134,13 +1101,15 @@ export const sessionRecordingPlayerLogic = kea( actions.skipPlayerForward(rrwebPlayerTime, values.roughAnimationFPS) } }, - messageTooLargeWarnings: (next) => { - if ( - values.messageTooLargeWarningSeen !== values.sessionRecordingId && - next.length > 0 && - props.mode !== SessionRecordingPlayerMode.Preview - ) { - actions.reportMessageTooLargeWarningSeen(values.sessionRecordingId) + playerError: (next) => { + if (next) { + posthog.capture('recording player error', { + watchedSessionId: values.sessionRecordingId, + currentTimestamp: values.currentTimestamp, + currentSegment: values.currentSegment, + currentPlayerTime: values.currentPlayerTime, + error: next, + }) } }, })), diff --git a/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts b/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts index 5f1bee532fdaa..246fa495ba5a2 100644 --- a/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts +++ b/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts @@ -6,7 +6,7 @@ import { capitalizeFirstLetter } from 'lib/utils' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' -import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic' +import { SIDE_PANEL_CONTEXT_KEY, SidePanelSceneContext } from '~/layout/navigation-3000/sidepanel/types' import { ActivityScope, Breadcrumb, ReplayTabs } from '~/types' import type { sessionReplaySceneLogicType } from './sessionReplaySceneLogicType' @@ -92,13 +92,13 @@ export const sessionReplaySceneLogic = kea([ return breadcrumbs }, ], - activityFilters: [ + [SIDE_PANEL_CONTEXT_KEY]: [ () => [router.selectors.searchParams], - (searchParams): ActivityFilters | null => { + (searchParams): SidePanelSceneContext | null => { return searchParams.sessionRecordingId ? { - scope: ActivityScope.REPLAY, - item_id: searchParams.sessionRecordingId, + activity_scope: ActivityScope.REPLAY, + activity_item_id: searchParams.sessionRecordingId, } : null }, diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx index 8985441f89067..0a2e3e432a2fb 100644 --- a/frontend/src/scenes/settings/SettingsMap.tsx +++ b/frontend/src/scenes/settings/SettingsMap.tsx @@ -50,7 +50,7 @@ import { OrganizationDangerZone } from './organization/OrganizationDangerZone' import { OrganizationDisplayName } from './organization/OrgDisplayName' import { OrganizationEmailPreferences } from './organization/OrgEmailPreferences' import { OrganizationLogo } from './organization/OrgLogo' -import { PermissionsGrid } from './organization/Permissions/PermissionsGrid' +import { RoleBasedAccess } from './organization/Permissions/RoleBasedAccess' import { VerifiedDomains } from './organization/VerifiedDomains/VerifiedDomains' import { ProjectDangerZone } from './project/ProjectDangerZone' import { ProjectDisplayName, ProjectProductDescription } from './project/ProjectSettings' @@ -314,11 +314,11 @@ export const SETTINGS_MAP: SettingSection[] = [ }, { level: 'environment', - id: 'environment-rbac', + id: 'environment-access-control', title: 'Access control', settings: [ { - id: 'environment-rbac', + id: 'environment-access-control', title: 'Access control', component: , }, @@ -413,25 +413,25 @@ export const SETTINGS_MAP: SettingSection[] = [ }, { level: 'organization', - id: 'organization-authentication', - title: 'Authentication domains & SSO', + id: 'organization-roles', + title: 'Roles', settings: [ { - id: 'authentication-domains', - title: 'Authentication Domains', - component: , + id: 'organization-roles', + title: 'Roles', + component: , }, ], }, { level: 'organization', - id: 'organization-rbac', - title: 'Role-based access', + id: 'organization-authentication', + title: 'Authentication domains & SSO', settings: [ { - id: 'organization-rbac', - title: 'Role-based access', - component: , + id: 'authentication-domains', + title: 'Authentication Domains', + component: , }, ], }, diff --git a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx index 69258301575c2..4d50352e5def4 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx @@ -58,6 +58,7 @@ function LinkedFlagSelector(): JSX.Element | null { Enable recordings using feature flag {featureFlagLoading && } +

Linking a flag means that recordings will only be collected for users who have the flag enabled.

- - {samplingControlFeatureEnabled && ( <>
@@ -243,6 +242,7 @@ export function SessionRecordingIngestionSettings(): JSX.Element | null { } />
+

Use this setting to restrict the percentage of sessions that will be recorded. This is useful if you want to reduce the amount of data you collect. 100% means all sessions will be @@ -264,6 +264,7 @@ export function SessionRecordingIngestionSettings(): JSX.Element | null { value={currentTeam?.session_recording_minimum_duration_milliseconds} />

+

Setting a minimum session duration will ensure that only sessions that last longer than that value are collected. This helps you avoid collecting sessions that are too short to be diff --git a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx index ff6650d2bffd5..d6c5dba244859 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx @@ -56,7 +56,7 @@ export function SupportedPlatforms(props: { flutter?: boolean | { note?: ReactNode } }): JSX.Element { return ( -

+
Supported platforms:

Log capture

- +

This setting controls if browser console logs will be captured as a part of recordings. The console logs will be shown in the recording player to help you debug any issues. @@ -208,7 +208,13 @@ export function NetworkCaptureSettings(): JSX.Element { return ( <> - + RN network capture is only supported on iOS }} + />

This setting controls if performance and network information will be captured alongside recordings. The network requests and timings will be shown in the recording player to help you debug any issues. diff --git a/frontend/src/scenes/settings/environment/TeamAccessControl.tsx b/frontend/src/scenes/settings/environment/TeamAccessControl.tsx index 88cfdf5f2caee..6674c261800e8 100644 --- a/frontend/src/scenes/settings/environment/TeamAccessControl.tsx +++ b/frontend/src/scenes/settings/environment/TeamAccessControl.tsx @@ -4,6 +4,7 @@ import { useActions, useValues } from 'kea' import { RestrictionScope, useRestrictedArea } from 'lib/components/RestrictedArea' import { upgradeModalLogic } from 'lib/components/UpgradeModal/upgradeModalLogic' import { OrganizationMembershipLevel, TeamMembershipLevel } from 'lib/constants' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { IconCancel } from 'lib/lemon-ui/icons' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { LemonTableColumns } from 'lib/lemon-ui/LemonTable' @@ -19,6 +20,7 @@ import { organizationLogic } from 'scenes/organizationLogic' import { isAuthenticatedTeam, teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' +import { AccessControlObject } from '~/layout/navigation-3000/sidepanel/panels/access_control/AccessControlObject' import { AvailableFeature, FusedTeamMemberType } from '~/types' import { AddMembersModalWithButton } from './AddMembersModal' @@ -154,7 +156,7 @@ export function TeamMembers(): JSX.Element | null { title: 'Name', key: 'user_first_name', render: (_, member) => - member.user.uuid == user.uuid ? `${member.user.first_name} (me)` : member.user.first_name, + member.user.uuid == user.uuid ? `${member.user.first_name} (you)` : member.user.first_name, sorter: (a, b) => a.user.first_name.localeCompare(b.user.first_name), }, { @@ -214,6 +216,11 @@ export function TeamAccessControl(): JSX.Element { minimumAccessLevel: OrganizationMembershipLevel.Admin, }) + const newAccessControl = useFeatureFlag('ROLE_BASED_ACCESS_CONTROL') + if (newAccessControl) { + return + } + return ( <>

diff --git a/frontend/src/scenes/settings/organization/Members.tsx b/frontend/src/scenes/settings/organization/Members.tsx index 997582fa81982..42face838324a 100644 --- a/frontend/src/scenes/settings/organization/Members.tsx +++ b/frontend/src/scenes/settings/organization/Members.tsx @@ -1,6 +1,7 @@ import { LemonInput, LemonSwitch } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' +import { useRestrictedArea } from 'lib/components/RestrictedArea' import { TZLabel } from 'lib/components/TZLabel' import { OrganizationMembershipLevel } from 'lib/constants' import { LemonButton } from 'lib/lemon-ui/LemonButton' @@ -141,11 +142,12 @@ export function Members(): JSX.Element | null { const { currentOrganization } = useValues(organizationLogic) const { preflight } = useValues(preflightLogic) const { user } = useValues(userLogic) - const { setSearch, ensureAllMembersLoaded } = useActions(membersLogic) const { updateOrganization } = useActions(organizationLogic) const { openTwoFactorSetupModal } = useActions(twoFactorLogic) + const twoFactorRestrictionReason = useRestrictedArea({ minimumAccessLevel: OrganizationMembershipLevel.Admin }) + useEffect(() => { ensureAllMembersLoaded() }, []) @@ -166,7 +168,7 @@ export function Members(): JSX.Element | null { title: 'Name', key: 'user_name', render: (_, member) => - member.user.uuid == user.uuid ? `${fullName(member.user)} (me)` : fullName(member.user), + member.user.uuid == user.uuid ? `${fullName(member.user)} (you)` : fullName(member.user), sorter: (a, b) => fullName(a.user).localeCompare(fullName(b.user)), }, { @@ -290,6 +292,7 @@ export function Members(): JSX.Element | null { bordered checked={!!currentOrganization?.enforce_2fa} onChange={(enforce_2fa) => updateOrganization({ enforce_2fa })} + disabledReason={twoFactorRestrictionReason} /> diff --git a/frontend/src/scenes/settings/organization/Permissions/RoleBasedAccess.tsx b/frontend/src/scenes/settings/organization/Permissions/RoleBasedAccess.tsx new file mode 100644 index 0000000000000..62ef8ff7a1f95 --- /dev/null +++ b/frontend/src/scenes/settings/organization/Permissions/RoleBasedAccess.tsx @@ -0,0 +1,12 @@ +// NOTE: This is only to allow testing the new RBAC system + +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' + +import { RolesAndResourceAccessControls } from '~/layout/navigation-3000/sidepanel/panels/access_control/RolesAndResourceAccessControls' + +import { PermissionsGrid } from './PermissionsGrid' + +export function RoleBasedAccess(): JSX.Element { + const newAccessControl = useFeatureFlag('ROLE_BASED_ACCESS_CONTROL') + return newAccessControl ? : +} diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index 0103298077232..56db33d95d3cf 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -24,7 +24,8 @@ export type SettingSectionId = | 'environment-surveys' | 'environment-toolbar' | 'environment-integrations' - | 'environment-rbac' + | 'environment-access-control' + | 'environment-role-based-access-control' | 'environment-danger-zone' | 'project-details' | 'project-autocapture' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out @@ -33,12 +34,13 @@ export type SettingSectionId = | 'project-surveys' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out | 'project-toolbar' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out | 'project-integrations' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out - | 'project-rbac' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out + | 'project-access-control' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out + | 'project-role-based-access-control' // TODO: This section is for backward compat ā€“ remove when Environments are rolled out | 'project-danger-zone' | 'organization-details' | 'organization-members' | 'organization-authentication' - | 'organization-rbac' + | 'organization-roles' | 'organization-proxy' | 'organization-danger-zone' | 'user-profile' @@ -72,7 +74,8 @@ export type SettingId = | 'integration-slack' | 'integration-other' | 'integration-ip-allowlist' - | 'environment-rbac' + | 'environment-access-control' + | 'environment-role-based-access-control' | 'environment-delete' | 'project-delete' | 'organization-logo' @@ -81,7 +84,7 @@ export type SettingId = | 'members' | 'email-members' | 'authentication-domains' - | 'organization-rbac' + | 'organization-roles' | 'organization-delete' | 'organization-proxy' | 'product-description' diff --git a/frontend/src/scenes/teamLogic.tsx b/frontend/src/scenes/teamLogic.tsx index b27c8621db68a..19cb9ac10c840 100644 --- a/frontend/src/scenes/teamLogic.tsx +++ b/frontend/src/scenes/teamLogic.tsx @@ -188,7 +188,8 @@ export const teamLogic = kea([ (selectors) => [selectors.currentTeam, selectors.currentTeamLoading], // If project has been loaded and is still null, it means the user just doesn't have access. (currentTeam, currentTeamLoading): boolean => - !currentTeam?.effective_membership_level && !currentTeamLoading, + (!currentTeam?.effective_membership_level || currentTeam.user_access_level === 'none') && + !currentTeamLoading, ], demoOnlyProject: [ (selectors) => [selectors.currentTeam, organizationLogic.selectors.currentOrganization], @@ -210,8 +211,9 @@ export const teamLogic = kea([ isTeamTokenResetAvailable: [ (selectors) => [selectors.currentTeam], (currentTeam): boolean => - !!currentTeam?.effective_membership_level && - currentTeam.effective_membership_level >= OrganizationMembershipLevel.Admin, + (!!currentTeam?.effective_membership_level && + currentTeam.effective_membership_level >= OrganizationMembershipLevel.Admin) || + currentTeam?.user_access_level === 'admin', ], testAccountFilterFrequentMistakes: [ (selectors) => [selectors.currentTeam], diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 477064de6497e..293481f598e3b 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -1,3 +1,4 @@ +import JSONCrush from 'jsoncrush' import { combineUrl } from 'kea-router' import { AlertType } from 'lib/components/Alerts/types' import { getCurrentTeamId } from 'lib/utils/getAppContext' @@ -74,7 +75,8 @@ export const urls = { insightNew: (type?: InsightType, dashboardId?: DashboardType['id'] | null, query?: Node): string => combineUrl('/insights/new', dashboardId ? { dashboard: dashboardId } : {}, { ...(type ? { insight: type } : {}), - ...(query ? { q: typeof query === 'string' ? query : JSON.stringify(query) } : {}), + // have to use JSONCrush directly rather than the util to avoid circular dep + ...(query ? { q: typeof query === 'string' ? query : JSONCrush.crush(JSON.stringify(query)) } : {}), }).url, insightNewHogQL: (query: string, filters?: HogQLFilters): string => combineUrl( diff --git a/frontend/src/toolbar/debug/EventDebugMenu.tsx b/frontend/src/toolbar/debug/EventDebugMenu.tsx index c7b47688d8f45..747fe146024f4 100644 --- a/frontend/src/toolbar/debug/EventDebugMenu.tsx +++ b/frontend/src/toolbar/debug/EventDebugMenu.tsx @@ -1,4 +1,4 @@ -import { BaseIcon, IconCheck, IconEye, IconLogomark, IconSearch, IconVideoCamera } from '@posthog/icons' +import { BaseIcon, IconCheck, IconEye, IconHide, IconLogomark, IconSearch, IconVideoCamera } from '@posthog/icons' import { useActions, useValues } from 'kea' import { AnimatedCollapsible } from 'lib/components/AnimatedCollapsible' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' @@ -15,10 +15,10 @@ import { EventType } from '~/types' import { ToolbarMenu } from '../bar/ToolbarMenu' -function showEventMenuItem( +function checkableMenuItem( label: string, - count: number, - icon: JSX.Element, + count: number | null, + icon: JSX.Element | null, isActive: boolean, onClick: () => void ): LemonMenuItem { @@ -30,13 +30,15 @@ function showEventMenuItem( {icon} {label}

- - ({count}) - + {count !== null && ( + + ({count}) + + )}
), active: isActive, @@ -70,25 +72,35 @@ export const EventDebugMenu = (): JSX.Element => { searchFilteredEventsCount, expandedEvent, selectedEventTypes, + hidePostHogProperties, + hidePostHogFlags, + expandedProperties, } = useValues(eventDebugMenuLogic) - const { markExpanded, setSelectedEventType, setSearchText, setSearchVisible } = useActions(eventDebugMenuLogic) + const { + markExpanded, + setSelectedEventType, + setSearchText, + setSearchVisible, + setHidePostHogProperties, + setHidePostHogFlags, + } = useActions(eventDebugMenuLogic) const showEventsMenuItems = [ - showEventMenuItem( + checkableMenuItem( 'PostHog Events', searchFilteredEventsCount['posthog'], , selectedEventTypes.includes('posthog'), () => setSelectedEventType('posthog', !selectedEventTypes.includes('posthog')) ), - showEventMenuItem( + checkableMenuItem( 'Custom Events', searchFilteredEventsCount['custom'], , selectedEventTypes.includes('custom'), () => setSelectedEventType('custom', !selectedEventTypes.includes('custom')) ), - showEventMenuItem( + checkableMenuItem( 'Replay Events', searchFilteredEventsCount['snapshot'], , @@ -96,13 +108,23 @@ export const EventDebugMenu = (): JSX.Element => { () => setSelectedEventType('snapshot', !selectedEventTypes.includes('snapshot')) ), ] + + const hideThingsMenuItems = [ + checkableMenuItem('Hide PostHog properties', null, null, hidePostHogProperties, () => + setHidePostHogProperties(!hidePostHogProperties) + ), + checkableMenuItem('Hide PostHog flags', null, null, hidePostHogFlags, () => + setHidePostHogFlags(!hidePostHogFlags) + ), + ] + return (
-
+
View events from this page as they are sent to PostHog.
{ >
@@ -167,7 +189,13 @@ export const EventDebugMenu = (): JSX.Element => {
- + + } + label="Hide properties" + /> ([ eventType, enabled, }), + setHidePostHogProperties: (hide: boolean) => ({ hide }), + setHidePostHogFlags: (hide: boolean) => ({ hide }), }), reducers({ + hidePostHogProperties: [ + false, + { + setHidePostHogProperties: (_, { hide }) => hide, + }, + ], + hidePostHogFlags: [ + false, + { + setHidePostHogFlags: (_, { hide }) => hide, + }, + ], searchVisible: [ false, { @@ -123,6 +137,42 @@ export const eventDebugMenuLogic = kea([ }) }, ], + + expandedProperties: [ + (s) => [s.expandedEvent, s.events, s.hidePostHogProperties, s.hidePostHogFlags], + (expandedEvent, events, hidePostHogProperties, hidePostHogFlags) => { + if (!expandedEvent) { + return [] + } + const theExpandedEvent = events.find((e) => e.uuid === expandedEvent) + if (!theExpandedEvent) { + return [] + } + + const propsFiltered = hidePostHogProperties + ? Object.fromEntries( + Object.entries(theExpandedEvent.properties).filter(([key]) => { + const isPostHogProperty = key.startsWith('$') && PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + }) + ) + : theExpandedEvent.properties + + return Object.fromEntries( + Object.entries(propsFiltered).filter(([key]) => { + if (hidePostHogFlags) { + if (key === '$active_feature_flags') { + return false + } else if (key.startsWith('$feature/')) { + return false + } + } + return true + }) + ) + }, + ], }), afterMount(({ values, actions }) => { values.posthog?.on('eventCaptured', (e) => { diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 317b046e7acf6..d8e85d4cdc027 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -4488,7 +4488,7 @@ export enum SidePanelTab { Discussion = 'discussion', Status = 'status', Exports = 'exports', - // AccessControl = 'access-control', + AccessControl = 'access-control', } export interface SourceFieldOauthConfig { diff --git a/hogvm/__tests__/__snapshots__/stl.hoge b/hogvm/__tests__/__snapshots__/stl.hoge index 8b05b793faa17..08000276db21c 100644 --- a/hogvm/__tests__/__snapshots__/stl.hoge +++ b/hogvm/__tests__/__snapshots__/stl.hoge @@ -28,4 +28,38 @@ "print", 1, 35, 32, "-- isNull, isNotNull --", 2, "print", 1, 35, 31, 2, "isNull", 1, 31, 2, "isNotNull", 1, 2, "print", 2, 35, 29, 2, "isNull", 1, 29, 2, "isNotNull", 1, 2, "print", 2, 35, 32, "banana", 2, "isNull", 1, 32, "banana", 2, "isNotNull", 1, 2, "print", 2, 35, 30, 2, "isNull", 1, 30, 2, "isNotNull", 1, 2, "print", 2, 35, 33, 0, 2, "isNull", 1, -33, 0, 2, "isNotNull", 1, 2, "print", 2, 35, 33, 1, 2, "isNull", 1, 33, 1, 2, "isNotNull", 1, 2, "print", 2, 35] +33, 0, 2, "isNotNull", 1, 2, "print", 2, 35, 33, 1, 2, "isNull", 1, 33, 1, 2, "isNotNull", 1, 2, "print", 2, 35, 32, "", +2, "print", 1, 35, 32, "-- comparisons --", 2, "print", 1, 35, 33, 1, 33, 1, 2, "equals", 2, 33, 1, 33, 2, 2, "equals", +2, 33, 1, 32, "1", 2, "equals", 2, 2, "print", 3, 35, 33, 2, 33, 3, 2, "notEquals", 2, 29, 5, 2, "print", 2, 35, 33, 2, +33, 1, 2, "greater", 2, 33, 2, 33, 2, 2, "greaterOrEquals", 2, 2, "print", 2, 35, 33, 1, 33, 2, 2, "less", 2, 33, 2, 33, +2, 2, "lessOrEquals", 2, 33, -3, 33, 2, 2, "less", 2, 2, "print", 3, 35, 30, 29, 4, 2, 33, 0, 33, 0, 4, 2, 33, 1, 33, 0, +4, 2, 33, 1, 30, 4, 2, 33, 0, 30, 4, 2, 33, 1, 2, "or", 1, 32, "string", 2, "or", 1, 33, 100, 2, "or", 1, 2, "print", 8, +35, 30, 29, 3, 2, 33, 0, 33, 0, 3, 2, 33, 1, 33, 0, 3, 2, 33, 1, 30, 3, 2, 33, 0, 30, 3, 2, 33, 1, 33, 1, 3, 2, 33, 1, +2, "and", 1, 29, 2, "and", 1, 32, "string", 2, "and", 1, 33, 100, 2, "and", 1, 2, "print", 10, 35, 32, "", 2, "print", +1, 35, 32, "-- logic --", 2, "print", 1, 35, 29, 40, 4, 32, "yes", 39, 2, 32, "no", 30, 40, 4, 32, "yes", 39, 2, 32, +"no", 2, "print", 2, 35, 29, 40, 4, 32, "one", 39, 9, 30, 40, 4, 32, "two", 39, 2, 32, "default", 2, "print", 1, 35, 32, +"", 2, "print", 1, 35, 32, "-- math --", 2, "print", 1, 35, 33, 3, 33, 5, 2, "min2", 2, 2, "print", 1, 35, 33, 10, 33, +5, 2, "plus", 2, 33, 10, 33, 5, 2, "minus", 2, 2, "print", 2, 35, 34, 3.99, 2, "floor", 1, 34, 3.5, 2, "round", 1, 2, +"print", 2, 35, 33, 5, 2, "range", 1, 2, "print", 1, 35, 33, 3, 33, 6, 2, "range", 2, 2, "print", 1, 35, 32, "", 2, +"print", 1, 35, 32, "-- string/array --", 2, "print", 1, 35, 32, "a", 32, "a", 32, "b", 32, "c", 2, "tuple", 3, 2, "in", +2, 32, "z", 32, "a", 32, "b", 32, "c", 2, "tuple", 3, 2, "in", 2, 2, "print", 2, 35, 32, "a", 32, "a", 32, "b", 32, "c", +43, 3, 2, "in", 2, 32, "z", 32, "a", 32, "b", 32, "c", 43, 3, 2, "in", 2, 2, "print", 2, 35, 32, "hello", 32, "he", 2, +"startsWith", 2, 32, "abcdef", 33, 2, 33, 3, 2, "substring", 3, 2, "print", 2, 35, 31, 31, 32, "firstNonNull", 2, +"coalesce", 3, 32, "notNull", 2, "assumeNotNull", 1, 2, "print", 2, 35, 32, "", 2, "print", 1, 35, 32, "-- date --", 2, +"print", 1, 35, 32, "2024-12-18T00:00:00Z", 2, "toDateTime", 1, 2, "toYear", 1, 32, "2024-12-18T00:00:00Z", 2, +"toDateTime", 1, 2, "toMonth", 1, 2, "print", 2, 35, 2, "now", 0, 2, "typeof", 1, 2, "print", 1, 35, 32, +"2024-12-18T11:11:11Z", 2, "toDateTime", 1, 2, "toStartOfDay", 1, 32, "2024-12-18T11:11:11Z", 2, "toDateTime", 1, 2, +"toStartOfWeek", 1, 2, "print", 2, 35, 32, "2024-12-18T00:00:00Z", 2, "toDateTime", 1, 2, "toYYYYMM", 1, 2, "print", 1, +35, 32, "day", 33, 1, 32, "2024-12-18", 2, "toDate", 1, 2, "dateAdd", 3, 32, "day", 32, "2024-12-18", 2, "toDate", 1, +32, "day", 33, 5, 32, "2024-12-18", 2, "toDate", 1, 2, "dateAdd", 3, 2, "dateDiff", 3, 2, "print", 2, 35, 32, "day", 32, +"2024-12-18T12:34:56Z", 2, "toDateTime", 1, 2, "dateTrunc", 2, 2, "print", 1, 35, 32, "2024-12-18", 2, "toDate", 1, 33, +3, 2, "addDays", 2, 2, "print", 1, 35, 33, 5, 2, "toIntervalDay", 1, 33, 2, 2, "toIntervalMonth", 1, 2, "print", 2, 35, +2, "today", 0, 2, "typeof", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- json --", 2, "print", 1, 35, 32, +"{\"a\":123.1}", 32, "a", 2, "JSONExtractInt", 2, 2, "jsonStringify", 1, 32, "{\"a\":\"hello\"}", 32, "a", 2, +"JSONExtractInt", 2, 2, "jsonStringify", 1, 2, "print", 2, 35, 32, "{\"a\":123.1}", 32, "a", 2, "JSONExtractFloat", 2, +2, "jsonStringify", 1, 32, "{\"a\":\"hello\"}", 32, "a", 2, "JSONExtractFloat", 2, 2, "jsonStringify", 1, 2, "print", 2, +35, 32, "{\"a\":123.1}", 32, "a", 2, "JSONExtractString", 2, 2, "jsonStringify", 1, 32, "{\"a\":\"hello\"}", 32, "a", 2, +"JSONExtractString", 2, 2, "jsonStringify", 1, 2, "print", 2, 35, 32, "{\"a\":123}", 32, "a", 2, "JSONExtractArrayRaw", +2, 2, "jsonStringify", 1, 32, "{\"a\":\"hello\"}", 32, "a", 2, "JSONExtractArrayRaw", 2, 2, "jsonStringify", 1, 2, +"print", 2, 35, 32, "{\"a\":[]}", 32, "a", 2, "JSONExtractArrayRaw", 2, 2, "jsonStringify", 1, 32, +"{\"a\":[\"hello\"]}", 32, "a", 2, "JSONExtractArrayRaw", 2, 2, "jsonStringify", 1, 2, "print", 2, 35] diff --git a/hogvm/__tests__/__snapshots__/stl.js b/hogvm/__tests__/__snapshots__/stl.js index 6ddd9f689075b..247aa1ec4abed 100644 --- a/hogvm/__tests__/__snapshots__/stl.js +++ b/hogvm/__tests__/__snapshots__/stl.js @@ -1,15 +1,162 @@ function upper (value) { return value.toUpperCase() } +function __x_typeof (value) { + if (value === null || value === undefined) { return 'null' + } else if (__isHogDateTime(value)) { return 'datetime' + } else if (__isHogDate(value)) { return 'date' + } else if (__isHogError(value)) { return 'error' + } else if (typeof value === 'function') { return 'function' + } else if (Array.isArray(value)) { if (value.__isHogTuple) { return 'tuple' } return 'array' + } else if (typeof value === 'object') { return 'object' + } else if (typeof value === 'number') { return Number.isInteger(value) ? 'integer' : 'float' + } else if (typeof value === 'string') { return 'string' + } else if (typeof value === 'boolean') { return 'boolean' } + return 'unknown' +} function tuple (...args) { const tuple = args.slice(); tuple.__isHogTuple = true; return tuple; } +function today() { + const now = new Date(); + return __toHogDate(now.getUTCFullYear(), now.getUTCMonth()+1, now.getUTCDate()); +} +function toYear(value) { return extract('year', value) } +function toYYYYMM(value) { + const y = extract('year', value); + const m = extract('month', value); + return y*100 + m; +} +function toStartOfWeek(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime'); + } + let d; + if (__isHogDate(value)) { + d = new Date(Date.UTC(value.year, value.month - 1, value.day)); + } else { + d = new Date(value.dt * 1000); + } + // Monday=1,... Sunday=7 + // getUTCDay(): Sunday=0,... Saturday=6 + // We want ISO weekday: Monday=1,... Sunday=7 + let dayOfWeek = d.getUTCDay(); // Sunday=0,... + let isoWeekday = dayOfWeek === 0 ? 7 : dayOfWeek; + + // subtract isoWeekday-1 days + const start = new Date(d.getTime() - (isoWeekday - 1) * 24 * 3600 * 1000); + + // Zero out hours, minutes, seconds, ms + start.setUTCHours(0, 0, 0, 0); + + return { __hogDateTime__: true, dt: start.getTime() / 1000, zone: (__isHogDateTime(value) ? value.zone : 'UTC') }; +} +function toStartOfDay(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime for toStartOfDay'); + } + if (__isHogDate(value)) { + value = __toHogDateTime(Date.UTC(value.year, value.month-1, value.day)/1000, 'UTC'); + } + return dateTrunc('day', value); +} +function toMonth(value) { return extract('month', value) } +function toIntervalMonth(val) { return __toHogInterval(val, 'month') } +function toIntervalDay(val) { return __toHogInterval(val, 'day') } +function toDateTime (input, zone) { return __toDateTime(input, zone) } +function toDate (input) { return __toDate(input) } +function substring(s, start, length) { + if (typeof s !== 'string') return ''; + const startIdx = start - 1; + if (startIdx < 0 || length < 0) return ''; + const endIdx = startIdx + length; + return startIdx < s.length ? s.slice(startIdx, endIdx) : ''; +} +function startsWith(str, prefix) { + return typeof str === 'string' && typeof prefix === 'string' && str.startsWith(prefix); +} +function round(a) { return Math.round(a) } function reverse (value) { return value.split('').reverse().join('') } function replaceOne (str, searchValue, replaceValue) { return str.replace(searchValue, replaceValue) } function replaceAll (str, searchValue, replaceValue) { return str.replaceAll(searchValue, replaceValue) } +function range(...args) { + if (args.length === 1) { + const end = args[0]; + return Array.from({length:end}, (_,i)=>i); + } else { + const start = args[0]; + const end = args[1]; + return Array.from({length:end - start}, (_,i)=>start+i); + } +} function print (...args) { console.log(...args.map(__printHogStringOutput)) } +function plus(a, b) { return a + b } +function or(...args) { return args.some(Boolean) } +function now () { return __now() } +function notEquals(a, b) { return a !== b } function notEmpty (value) { return !empty(value) } +function minus(a, b) { return a - b } +function min2(a, b) { return a < b ? a : b } function lower (value) { return value.toLowerCase() } +function lessOrEquals(a, b) { return a <= b } +function less(a, b) { return a < b } function length (value) { return value.length } +function jsonStringify (value, spacing) { + function convert(x, marked) { + if (!marked) { marked = new Set() } + if (typeof x === 'object' && x !== null) { + if (marked.has(x)) { return null } + marked.add(x) + try { + if (x instanceof Map) { + const obj = {} + x.forEach((value, key) => { obj[convert(key, marked)] = convert(value, marked) }) + return obj + } + if (Array.isArray(x)) { return x.map((v) => convert(v, marked)) } + if (__isHogDateTime(x) || __isHogDate(x) || __isHogError(x)) { return x } + if (typeof x === 'function') { return `fn<${x.name || 'lambda'}(${x.length})>` } + const obj = {}; for (const key in x) { obj[key] = convert(x[key], marked) } + return obj + } finally { + marked.delete(x) + } + } + return x + } + if (spacing && typeof spacing === 'number' && spacing > 0) { + return JSON.stringify(convert(value), null, spacing) + } + return JSON.stringify(convert(value), (key, val) => typeof val === 'function' ? `fn<${val.name || 'lambda'}(${val.length})>` : val) +} function isNull (value) { return value === null || value === undefined } function isNotNull (value) { return value !== null && value !== undefined } +function __x_in(val, arr) { + if (Array.isArray(arr) || (arr && arr.__isHogTuple)) { + return arr.includes(val); + } + return false; +} +function greaterOrEquals(a, b) { return a >= b } +function greater(a, b) { return a > b } function generateUUIDv4 () { return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) { const r = (Math.random() * 16) | 0; const v = c === 'x' ? r : (r & 0x3) | 0x8; return v.toString(16) })} +function floor(a) { return Math.floor(a) } +function extract(part, val) { + function toDate(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else if (__isHogDate(obj)) { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } else { + return new Date(obj); + } + } + const date = toDate(val); + if (part === 'year') return date.getUTCFullYear(); + else if (part === 'month') return date.getUTCMonth() + 1; + else if (part === 'day') return date.getUTCDate(); + else if (part === 'hour') return date.getUTCHours(); + else if (part === 'minute') return date.getUTCMinutes(); + else if (part === 'second') return date.getUTCSeconds(); + else throw new Error("Unknown extract part: " + part); +} +function equals(a, b) { return a === b } function encodeURLComponent (str) { return encodeURIComponent(str) } function empty (value) { if (typeof value === 'object') { @@ -18,8 +165,116 @@ function empty (value) { } else if (typeof value === 'number' || typeof value === 'boolean') { return false } return !value } function decodeURLComponent (str) { return decodeURIComponent(str) } +function dateTrunc(unit, val) { + if (!__isHogDateTime(val)) { + throw new Error('Expected a DateTime for dateTrunc'); + } + const zone = val.zone || 'UTC'; + const date = new Date(val.dt * 1000); + let year = date.getUTCFullYear(); + let month = date.getUTCMonth(); + let day = date.getUTCDate(); + let hour = date.getUTCHours(); + let minute = date.getUTCMinutes(); + let second = 0; + let ms = 0; + + if (unit === 'year') { + month = 0; day = 1; hour = 0; minute = 0; second = 0; + } else if (unit === 'month') { + day = 1; hour = 0; minute = 0; second = 0; + } else if (unit === 'day') { + hour = 0; minute = 0; second = 0; + } else if (unit === 'hour') { + minute = 0; second = 0; + } else if (unit === 'minute') { + second = 0; + } else { + throw new Error("Unsupported unit for dateTrunc: " + unit); + } + + const truncated = new Date(Date.UTC(year, month, day, hour, minute, second, ms)); + return { __hogDateTime__: true, dt: truncated.getTime()/1000, zone: zone }; +} +function dateDiff(unit, startVal, endVal) { + function toDateTime(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else if (__isHogDate(obj)) { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } else { + return new Date(obj); + } + } + const start = toDateTime(startVal); + const end = toDateTime(endVal); + const diffMs = end - start; + const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24)); + if (unit === 'day') { + return diffDays; + } else if (unit === 'hour') { + return Math.floor(diffMs / (1000 * 60 * 60)); + } else if (unit === 'minute') { + return Math.floor(diffMs / (1000 * 60)); + } else if (unit === 'second') { + return Math.floor(diffMs / 1000); + } else if (unit === 'week') { + return Math.floor(diffDays / 7); + } else if (unit === 'month') { + // Approx months difference + const sy = start.getUTCFullYear(); + const sm = start.getUTCMonth() + 1; + const ey = end.getUTCFullYear(); + const em = end.getUTCMonth() + 1; + return (ey - sy)*12 + (em - sm); + } else if (unit === 'year') { + return end.getUTCFullYear() - start.getUTCFullYear(); + } else { + throw new Error("Unsupported unit for dateDiff: " + unit); + } +} +function dateAdd(unit, amount, datetime) { + // transform unit if needed (week -> day, year -> month) + if (unit === 'week') { + unit = 'day'; + amount = amount * 7; + } else if (unit === 'year') { + unit = 'month'; + amount = amount * 12; + } + const interval = __toHogInterval(amount, unit); + return __applyIntervalToDateTime(datetime, interval); +} +function coalesce(...args) { + for (let a of args) { + if (a !== null && a !== undefined) return a; + } + return null; +} function base64Encode (str) { return Buffer.from(str).toString('base64') } function base64Decode (str) { return Buffer.from(str, 'base64').toString() } +function assumeNotNull(value) { + if (value === null || value === undefined) { + throw new Error("Value is null in assumeNotNull"); + } + return value; +} +function and(...args) { return args.every(Boolean) } +function addDays(dateOrDt, days) { + const interval = __toHogInterval(days, 'day'); + return __applyIntervalToDateTime(dateOrDt, interval); +} +function __toHogInterval(value, unit) { + return { __hogInterval__: true, value: value, unit: unit }; +} +function __toDateTime(input, zone) { let dt; + if (typeof input === 'number') { dt = input; } + else { const date = new Date(input); if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } dt = date.getTime() / 1000; } + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; } +function __toDate(input) { let date; + if (typeof input === 'number') { date = new Date(input * 1000); } else { date = new Date(input); } + if (isNaN(date.getTime())) { throw new Error('Invalid date input'); } + return { __hogDate__: true, year: date.getUTCFullYear(), month: date.getUTCMonth() + 1, day: date.getUTCDate() }; } function __printHogStringOutput(obj) { if (typeof obj === 'string') { return obj } return __printHogValue(obj) } function __printHogValue(obj, marked = new Set()) { if (typeof obj === 'object' && obj !== null && obj !== undefined) { @@ -44,9 +299,8 @@ function __printHogValue(obj, marked = new Set()) { if (typeof obj === 'function') return `fn<${__escapeIdentifier(obj.name || 'lambda')}(${obj.length})>`; return obj.toString(); } +function __now(zone) { return __toHogDateTime(Date.now() / 1000, zone) } function __isHogError(obj) {return obj && obj.__hogError__ === true} -function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } -function __isHogDate(obj) { return obj && obj.__hogDate__ === true } function __escapeString(value) { const singlequoteEscapeCharsMap = { '\b': '\\b', '\f': '\\f', '\r': '\\r', '\n': '\\n', '\t': '\\t', '\0': '\\0', '\v': '\\v', '\\': '\\\\', "'": "\\'" } return `'${value.split('').map((c) => singlequoteEscapeCharsMap[c] || c).join('')}'`; @@ -57,6 +311,130 @@ function __escapeIdentifier(identifier) { if (/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(identifier)) return identifier; return `\`${identifier.split('').map((c) => backquoteEscapeCharsMap[c] || c).join('')}\``; } +function __applyIntervalToDateTime(base, interval) { + // base can be HogDate or HogDateTime + if (!(__isHogDate(base) || __isHogDateTime(base))) { + throw new Error("Expected a HogDate or HogDateTime"); + } + + let zone = __isHogDateTime(base) ? (base.zone || 'UTC') : 'UTC'; + + function toDate(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } + } + + const dt = toDate(base); + const value = interval.value; + let unit = interval.unit; + + // Expand weeks/years if needed + if (unit === 'week') { + unit = 'day'; + interval.value = value * 7; + } else if (unit === 'year') { + unit = 'month'; + interval.value = value * 12; + } + + let year = dt.getUTCFullYear(); + let month = dt.getUTCMonth() + 1; + let day = dt.getUTCDate(); + let hours = dt.getUTCHours(); + let minutes = dt.getUTCMinutes(); + let seconds = dt.getUTCSeconds(); + let ms = dt.getUTCMilliseconds(); + + if (unit === 'day') { + day += interval.value; + } else if (unit === 'hour') { + hours += interval.value; + } else if (unit === 'minute') { + minutes += interval.value; + } else if (unit === 'second') { + seconds += interval.value; + } else if (unit === 'month') { + month += interval.value; + // Adjust year and month + year += Math.floor((month - 1) / 12); + month = ((month - 1) % 12) + 1; + // If day is invalid for the new month, clamp it + let maxDay = new Date(Date.UTC(year, month, 0)).getUTCDate(); + if (day > maxDay) { day = maxDay; } + } else { + throw new Error("Unsupported interval unit: " + unit); + } + + const newDt = new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds, ms)); + + if (__isHogDate(base)) { + return __toHogDate(newDt.getUTCFullYear(), newDt.getUTCMonth() + 1, newDt.getUTCDate()); + } else { + return __toHogDateTime(newDt.getTime() / 1000, zone); + } +} +function __toHogDateTime(timestamp, zone) { + if (__isHogDate(timestamp)) { + const date = new Date(Date.UTC(timestamp.year, timestamp.month - 1, timestamp.day)); + const dt = date.getTime() / 1000; + return { __hogDateTime__: true, dt: dt, zone: zone || 'UTC' }; + } + return { __hogDateTime__: true, dt: timestamp, zone: zone || 'UTC' }; } +function __toHogDate(year, month, day) { return { __hogDate__: true, year: year, month: month, day: day, } } +function __isHogDateTime(obj) { return obj && obj.__hogDateTime__ === true } +function __isHogDate(obj) { return obj && obj.__hogDate__ === true } +function JSONExtractString(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + return val != null ? String(val) : null; +} +function JSONExtractInt(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + const i = parseInt(val); + return isNaN(i) ? null : i; +} +function JSONExtractFloat(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + const f = parseFloat(val); + return isNaN(f) ? null : f; +} +function JSONExtractArrayRaw(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + return Array.isArray(val) ? val : null; +} +function __getNestedValue(obj, path, allowNull = false) { + let current = obj + for (const key of path) { + if (current == null) { + return null + } + if (current instanceof Map) { + current = current.get(key) + } else if (typeof current === 'object' && current !== null) { + current = current[key] + } else { + return null + } + } + if (current === null && !allowNull) { + return null + } + return current +} print("-- empty, notEmpty, length, lower, upper, reverse --"); if (!!(empty("") && notEmpty("234"))) { @@ -129,3 +507,46 @@ print(isNull("banana"), isNotNull("banana")); print(isNull(false), isNotNull(false)); print(isNull(0), isNotNull(0)); print(isNull(1), isNotNull(1)); +print(""); +print("-- comparisons --"); +print(equals(1, 1), equals(1, 2), equals(1, "1")); +print(notEquals(2, 3), (!true)); +print(greater(2, 1), greaterOrEquals(2, 2)); +print(less(1, 2), lessOrEquals(2, 2), less(-3, 2)); +print(!!(false || true), !!(0 || 0), !!(1 || 0), !!(1 || false), !!(0 || false), or(1), or("string"), or(100)); +print(!!(false && true), !!(0 && 0), !!(1 && 0), !!(1 && false), !!(0 && false), !!(1 && 1), and(1), and(true), and("string"), and(100)); +print(""); +print("-- logic --"); +print((true ? "yes" : "no"), (false ? "yes" : "no")); +print((true ? "one" : (false ? "two" : "default"))); +print(""); +print("-- math --"); +print(min2(3, 5)); +print(plus(10, 5), minus(10, 5)); +print(floor(3.99), round(3.5)); +print(range(5)); +print(range(3, 6)); +print(""); +print("-- string/array --"); +print(__x_in("a", tuple("a", "b", "c")), __x_in("z", tuple("a", "b", "c"))); +print(__x_in("a", ["a", "b", "c"]), __x_in("z", ["a", "b", "c"])); +print(startsWith("hello", "he"), substring("abcdef", 2, 3)); +print(coalesce(null, null, "firstNonNull"), assumeNotNull("notNull")); +print(""); +print("-- date --"); +print(toYear(toDateTime("2024-12-18T00:00:00Z")), toMonth(toDateTime("2024-12-18T00:00:00Z"))); +print(__x_typeof(now())); +print(toStartOfDay(toDateTime("2024-12-18T11:11:11Z")), toStartOfWeek(toDateTime("2024-12-18T11:11:11Z"))); +print(toYYYYMM(toDateTime("2024-12-18T00:00:00Z"))); +print(dateAdd("day", 1, toDate("2024-12-18")), dateDiff("day", toDate("2024-12-18"), dateAdd("day", 5, toDate("2024-12-18")))); +print(dateTrunc("day", toDateTime("2024-12-18T12:34:56Z"))); +print(addDays(toDate("2024-12-18"), 3)); +print(toIntervalDay(5), toIntervalMonth(2)); +print(__x_typeof(today())); +print(""); +print("-- json --"); +print(jsonStringify(JSONExtractInt("{\"a\":123.1}", "a")), jsonStringify(JSONExtractInt("{\"a\":\"hello\"}", "a"))); +print(jsonStringify(JSONExtractFloat("{\"a\":123.1}", "a")), jsonStringify(JSONExtractFloat("{\"a\":\"hello\"}", "a"))); +print(jsonStringify(JSONExtractString("{\"a\":123.1}", "a")), jsonStringify(JSONExtractString("{\"a\":\"hello\"}", "a"))); +print(jsonStringify(JSONExtractArrayRaw("{\"a\":123}", "a")), jsonStringify(JSONExtractArrayRaw("{\"a\":\"hello\"}", "a"))); +print(jsonStringify(JSONExtractArrayRaw("{\"a\":[]}", "a")), jsonStringify(JSONExtractArrayRaw("{\"a\":[\"hello\"]}", "a"))); diff --git a/hogvm/__tests__/__snapshots__/stl.stdout b/hogvm/__tests__/__snapshots__/stl.stdout index e25f13b11c287..b9f6eabbc5c82 100644 --- a/hogvm/__tests__/__snapshots__/stl.stdout +++ b/hogvm/__tests__/__snapshots__/stl.stdout @@ -65,3 +65,46 @@ false true false true false true false true + +-- comparisons -- +true false false +true false +true true +true true true +true false true true false true true true +false false false false false true true true true true + +-- logic -- +yes no +one + +-- math -- +3 +15 5 +3 4 +[0, 1, 2, 3, 4] +[3, 4, 5] + +-- string/array -- +true false +true false +true bcd +firstNonNull notNull + +-- date -- +2024 12 +datetime +DateTime(1734480000.0, 'UTC') DateTime(1734307200.0, 'UTC') +202412 +Date(2024, 12, 19) 5 +DateTime(1734480000.0, 'UTC') +Date(2024, 12, 21) +{'__hogInterval__': true, 'value': 5, 'unit': 'day'} {'__hogInterval__': true, 'value': 2, 'unit': 'month'} +date + +-- json -- +123 null +123.1 null +"123.1" "hello" +null null +[] ["hello"] diff --git a/hogvm/__tests__/stl.hog b/hogvm/__tests__/stl.hog index 45c3629efa0d6..a00871f041b38 100644 --- a/hogvm/__tests__/stl.hog +++ b/hogvm/__tests__/stl.hog @@ -64,4 +64,47 @@ print(isNull(true), isNotNull(true)) print(isNull('banana'), isNotNull('banana')) print(isNull(false), isNotNull(false)) print(isNull(0), isNotNull(0)) -print(isNull(1), isNotNull(1)) \ No newline at end of file +print(isNull(1), isNotNull(1)) +print('') +print('-- comparisons --') +print(equals(1,1), equals(1,2), equals(1, '1')) +print(notEquals(2,3), not(true)) +print(greater(2,1), greaterOrEquals(2,2)) +print(less(1,2), lessOrEquals(2,2), less(-3, 2)) +print(or(false, true), or(0, 0), or(1, 0), or(1, false), or(0, false), or(1), or('string'), or(100)) +print(and(false, true), and(0, 0), and(1, 0), and(1, false), and(0, false), and(1, 1), and(1), and(true), and('string'), and(100)) +print('') +print('-- logic --') +print(if(true, 'yes', 'no'), if(false, 'yes', 'no')) +print(multiIf(true, 'one', false, 'two', 'default')) +print('') +print('-- math --') +print(min2(3,5)) +print(plus(10,5), minus(10,5)) +print(floor(3.99), round(3.5)) +print(range(5)) +print(range(3,6)) +print('') +print('-- string/array --') +print(in('a', tuple('a','b','c')), in('z', tuple('a','b','c'))) +print(in('a', ['a','b','c']), in('z', ['a','b','c'])) +print(startsWith('hello','he'), substring('abcdef',2,3)) +print(coalesce(null, null, 'firstNonNull'), assumeNotNull('notNull')) +print('') +print('-- date --') +print(toYear(toDateTime('2024-12-18T00:00:00Z')), toMonth(toDateTime('2024-12-18T00:00:00Z'))) +print(typeof(now())) +print(toStartOfDay(toDateTime('2024-12-18T11:11:11Z')), toStartOfWeek(toDateTime('2024-12-18T11:11:11Z'))) +print(toYYYYMM(toDateTime('2024-12-18T00:00:00Z'))) +print(dateAdd('day', 1, toDate('2024-12-18')), dateDiff('day', toDate('2024-12-18'), dateAdd('day', 5, toDate('2024-12-18')))) +print(dateTrunc('day', toDateTime('2024-12-18T12:34:56Z'))) +print(addDays(toDate('2024-12-18'), 3)) +print(toIntervalDay(5), toIntervalMonth(2)) +print(typeof(today())) +print('') +print('-- json --') +print(jsonStringify(JSONExtractInt('{"a":123.1}', 'a')), jsonStringify(JSONExtractInt('{"a":"hello"}','a'))) +print(jsonStringify(JSONExtractFloat('{"a":123.1}', 'a')), jsonStringify(JSONExtractFloat('{"a":"hello"}','a'))) +print(jsonStringify(JSONExtractString('{"a":123.1}', 'a')), jsonStringify(JSONExtractString('{"a":"hello"}','a'))) +print(jsonStringify(JSONExtractArrayRaw('{"a":123}', 'a')), jsonStringify(JSONExtractArrayRaw('{"a":"hello"}','a'))) +print(jsonStringify(JSONExtractArrayRaw('{"a":[]}', 'a')), jsonStringify(JSONExtractArrayRaw('{"a":["hello"]}','a'))) diff --git a/hogvm/python/objects.py b/hogvm/python/objects.py index aa03cb656f8d7..9dc2acdcb09bb 100644 --- a/hogvm/python/objects.py +++ b/hogvm/python/objects.py @@ -82,3 +82,15 @@ def is_hog_upvalue(obj: Any) -> bool: and "value" in obj and "id" in obj ) + + +def is_hog_interval(obj: Any) -> bool: + return isinstance(obj, dict) and obj.get("__hogInterval__") is True + + +def to_hog_interval(value: int, unit: str): + return { + "__hogInterval__": True, + "value": value, + "unit": unit, + } diff --git a/hogvm/python/stl/__init__.py b/hogvm/python/stl/__init__.py index 51a75d24e6e72..4945743cdb881 100644 --- a/hogvm/python/stl/__init__.py +++ b/hogvm/python/stl/__init__.py @@ -1,5 +1,6 @@ import dataclasses import datetime +import math import time from typing import Any, Optional, TYPE_CHECKING from collections.abc import Callable @@ -23,7 +24,7 @@ is_hog_date, ) from .crypto import sha256Hex, md5Hex, sha256HmacChainHex -from ..objects import is_hog_error, new_hog_error, is_hog_callable, is_hog_closure +from ..objects import is_hog_error, new_hog_error, is_hog_callable, is_hog_closure, to_hog_interval from ..utils import like, get_nested_value if TYPE_CHECKING: @@ -421,6 +422,465 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] return "unknown" +def apply_interval_to_datetime(dt: dict, interval: dict) -> dict: + # interval["unit"] in {"day", "hour", "minute", "month"} + if not (is_hog_date(dt) or is_hog_datetime(dt)): + raise ValueError("Expected a HogDate or HogDateTime") + + zone = dt["zone"] if is_hog_datetime(dt) else "UTC" + if is_hog_datetime(dt): + base_dt = datetime.datetime.utcfromtimestamp(dt["dt"]) + base_dt = pytz.timezone(zone).localize(base_dt) + else: + base_dt = datetime.datetime(dt["year"], dt["month"], dt["day"], tzinfo=pytz.timezone(zone)) + + value = interval["value"] + unit = interval["unit"] + + if unit == "day": + base_dt = base_dt + datetime.timedelta(days=value) + elif unit == "hour": + base_dt = base_dt + datetime.timedelta(hours=value) + elif unit == "minute": + base_dt = base_dt + datetime.timedelta(minutes=value) + elif unit == "second": + base_dt = base_dt + datetime.timedelta(seconds=value) + elif unit == "month": + # Add months by incrementing month/year + # Adding months can overflow year and month boundaries + # We'll do a rough calculation + year = base_dt.year + month = base_dt.month + value + day = base_dt.day + # adjust year and month + year += (month - 1) // 12 + month = ((month - 1) % 12) + 1 + # If day is invalid for the new month, clamp + # For simplicity, clamp to last valid day of month + # This matches ClickHouse dateAdd('month',...) behavior + while True: + try: + base_dt = base_dt.replace(year=year, month=month, day=day) + break + except ValueError: + day -= 1 + # no need to add timedelta here + else: + raise ValueError(f"Unknown interval unit {unit}") + + if is_hog_date(dt): + return { + "__hogDate__": True, + "year": base_dt.year, + "month": base_dt.month, + "day": base_dt.day, + } + else: + return { + "__hogDateTime__": True, + "dt": base_dt.timestamp(), + "zone": zone, + } + + +def date_add(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # dateAdd(unit, amount, datetime) + # unit: 'second','minute','hour','day','week','month','year'... + unit = args[0] + amount = args[1] + dt = args[2] + + if unit in ["day", "hour", "minute", "second", "month"]: + pass + elif unit == "week": + # dateAdd('week', x, ...) = dateAdd('day', x*7, ...) + unit = "day" + amount = amount * 7 + elif unit == "year": + # year intervals: adding year means 12 months + unit = "month" + amount = amount * 12 + else: + raise ValueError(f"Unsupported interval unit: {unit}") + + interval = to_hog_interval(amount, unit) + return apply_interval_to_datetime(dt, interval) + + +def date_diff(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # dateDiff(unit, start, end) + unit = args[0] + start = args[1] + end = args[2] + + # Convert start/end to aware datetimes + def to_dt(obj): + if is_hog_datetime(obj): + z = obj["zone"] + return pytz.timezone(z).localize(datetime.datetime.utcfromtimestamp(obj["dt"])) + elif is_hog_date(obj): + return pytz.UTC.localize(datetime.datetime(obj["year"], obj["month"], obj["day"])) + else: + # try parse string + d = datetime.datetime.fromisoformat(obj) + return d.replace(tzinfo=pytz.UTC) + + start_dt = to_dt(start) + end_dt = to_dt(end) + + diff = end_dt - start_dt + if unit == "day": + return diff.days + elif unit == "hour": + return int(diff.total_seconds() // 3600) + elif unit == "minute": + return int(diff.total_seconds() // 60) + elif unit == "second": + return int(diff.total_seconds()) + elif unit == "week": + return diff.days // 7 + elif unit == "month": + # approximate: count months difference + return (end_dt.year - start_dt.year) * 12 + (end_dt.month - start_dt.month) + elif unit == "year": + return end_dt.year - start_dt.year + else: + raise ValueError(f"Unsupported unit for dateDiff: {unit}") + + +def date_trunc(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # dateTrunc(unit, datetime) + unit = args[0] + dt = args[1] + + if not is_hog_datetime(dt): + raise ValueError("Expected a DateTime for dateTrunc") + + zone = dt["zone"] + base_dt = datetime.datetime.utcfromtimestamp(dt["dt"]) + base_dt = pytz.timezone(zone).localize(base_dt) + + if unit == "year": + truncated = datetime.datetime(base_dt.year, 1, 1, tzinfo=base_dt.tzinfo) + elif unit == "month": + truncated = datetime.datetime(base_dt.year, base_dt.month, 1, tzinfo=base_dt.tzinfo) + elif unit == "day": + truncated = datetime.datetime(base_dt.year, base_dt.month, base_dt.day, tzinfo=base_dt.tzinfo) + elif unit == "hour": + truncated = datetime.datetime(base_dt.year, base_dt.month, base_dt.day, base_dt.hour, tzinfo=base_dt.tzinfo) + elif unit == "minute": + truncated = datetime.datetime( + base_dt.year, base_dt.month, base_dt.day, base_dt.hour, base_dt.minute, tzinfo=base_dt.tzinfo + ) + else: + raise ValueError(f"Unsupported unit for dateTrunc: {unit}") + + return { + "__hogDateTime__": True, + "dt": truncated.timestamp(), + "zone": zone, + } + + +def coalesce(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + for a in args: + if a is not None: + return a + return None + + +def assumeNotNull(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + if args[0] is None: + raise ValueError("Value is null in assumeNotNull") + return args[0] + + +def equals(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] == args[1] + + +def greater(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] > args[1] + + +def greaterOrEquals(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] >= args[1] + + +def less(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] < args[1] + + +def lessOrEquals(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] <= args[1] + + +def notEquals(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] != args[1] + + +def not_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return not bool(args[0]) + + +def and_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return all(args) + + +def or_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return any(args) + + +def if_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return args[1] if args[0] else args[2] + + +def in_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return args[0] in args[1] if isinstance(args[1], list | tuple) else False + + +def min2(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return args[0] if args[0] < args[1] else args[1] + + +def max2(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return args[0] if args[0] > args[1] else args[1] + + +def plus(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return args[0] + args[1] + + +def minus(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return args[0] - args[1] + + +def multiIf(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # multiIf(cond1, val1, cond2, val2, ..., default) + default = args[-1] + pairs = args[:-1] + for i in range(0, len(pairs), 2): + cond = pairs[i] + val = pairs[i + 1] + if cond: + return val + return default + + +def floor_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return math.floor(args[0]) + + +def extract(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # extract(part, datetime) + # part in { 'year', 'month', 'day', 'hour', 'minute', 'second' } + part = args[0] + val = args[1] + + def to_dt(obj): + if is_hog_datetime(obj): + z = obj["zone"] + return pytz.timezone(z).localize(datetime.datetime.utcfromtimestamp(obj["dt"])) + elif is_hog_date(obj): + return pytz.UTC.localize(datetime.datetime(obj["year"], obj["month"], obj["day"])) + else: + d = datetime.datetime.fromisoformat(obj) + return d.replace(tzinfo=pytz.UTC) + + dt = to_dt(val) + if part == "year": + return dt.year + elif part == "month": + return dt.month + elif part == "day": + return dt.day + elif part == "hour": + return dt.hour + elif part == "minute": + return dt.minute + elif part == "second": + return dt.second + else: + raise ValueError(f"Unknown extract part: {part}") + + +def round_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return round(args[0]) + + +def startsWith(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> bool: + return isinstance(args[0], str) and isinstance(args[1], str) and args[0].startswith(args[1]) + + +def substring(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # substring(str, start, length) + # start is 1-based. + s = args[0] + start = args[1] + length = args[2] + if not isinstance(s, str): + return "" + start_idx = start - 1 + if start_idx < 0 or length < 0: + return "" + end_idx = start_idx + length + return s[start_idx:end_idx] if 0 <= start_idx < len(s) else "" + + +def addDays(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + interval = to_hog_interval(args[1], "day") + return apply_interval_to_datetime(args[0], interval) + + +def toIntervalDay(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return to_hog_interval(args[0], "day") + + +def toIntervalHour(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return to_hog_interval(args[0], "hour") + + +def toIntervalMinute(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return to_hog_interval(args[0], "minute") + + +def toIntervalMonth(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return to_hog_interval(args[0], "month") + + +def toYear(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return extract(["year", args[0]], team, stdout, timeout) + + +def toMonth_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return extract(["month", args[0]], team, stdout, timeout) + + +def trunc_to_unit(dt: dict, unit: str, team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> dict: + # helper for toStartOfDay, etc. + if not is_hog_datetime(dt): + if is_hog_date(dt): + dt = toDateTime(f"{dt['year']:04d}-{dt['month']:02d}-{dt['day']:02d}") + else: + raise ValueError("Expected a Date or DateTime") + + return date_trunc([unit, dt], team, stdout, timeout) + + +def toStartOfDay(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return trunc_to_unit(args[0], "day", team, stdout, timeout) + + +def toStartOfHour(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return trunc_to_unit(args[0], "hour", team, stdout, timeout) + + +def toStartOfMonth(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + return trunc_to_unit(args[0], "month", team, stdout, timeout) + + +def toStartOfWeek(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + dt = args[0] + if not is_hog_datetime(dt): + if is_hog_date(dt): + dt = toDateTime(f"{dt['year']}-{dt['month']:02d}-{dt['day']:02d}") + else: + raise ValueError("Expected a Date or DateTime") + base_dt = datetime.datetime.utcfromtimestamp(dt["dt"]) + zone = dt["zone"] + base_dt = pytz.timezone(zone).localize(base_dt) + weekday = base_dt.isoweekday() # Monday=1, Sunday=7 + start_of_week = base_dt - datetime.timedelta(days=weekday - 1) + start_of_week = start_of_week.replace(hour=0, minute=0, second=0, microsecond=0) + return { + "__hogDateTime__": True, + "dt": start_of_week.timestamp(), + "zone": zone, + } + + +def toYYYYMM(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + y = toYear([args[0]], team, stdout, timeout) + m = toMonth_fn([args[0]], team, stdout, timeout) + return y * 100 + m + + +def today(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + now_dt = datetime.datetime.now(tz=pytz.UTC) + return { + "__hogDate__": True, + "year": now_dt.year, + "month": now_dt.month, + "day": now_dt.day, + } + + +def range_fn(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + # range(a,b) -> [a..b-1], range(x) -> [0..x-1] + if len(args) == 1: + return list(range(args[0])) + elif len(args) == 2: + return list(range(args[0], args[1])) + else: + raise ValueError("range function supports 1 or 2 arguments only") + + +def JSONExtractArrayRaw(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + obj = args[0] + path = args[1:] + try: + if isinstance(obj, str): + obj = json.loads(obj) + except json.JSONDecodeError: + return None + val = get_nested_value(obj, path, True) + if isinstance(val, list): + return val + return None + + +def JSONExtractFloat(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + obj = args[0] + path = args[1:] + try: + if isinstance(obj, str): + obj = json.loads(obj) + except json.JSONDecodeError: + return None + val = get_nested_value(obj, path, True) + try: + return float(val) + except (TypeError, ValueError): + return None + + +def JSONExtractInt(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + obj = args[0] + path = args[1:] + try: + if isinstance(obj, str): + obj = json.loads(obj) + except json.JSONDecodeError: + return None + val = get_nested_value(obj, path, True) + try: + return int(val) + except (TypeError, ValueError): + return None + + +def JSONExtractString(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]], timeout: float) -> Any: + obj = args[0] + path = args[1:] + try: + if isinstance(obj, str): + obj = json.loads(obj) + except json.JSONDecodeError: + return None + val = get_nested_value(obj, path, True) + return str(val) if val is not None else None + + STL: dict[str, STLFunction] = { "concat": STLFunction( fn=lambda args, team, stdout, timeout: "".join( @@ -565,6 +1025,50 @@ def _typeof(args: list[Any], team: Optional["Team"], stdout: Optional[list[str]] maxArgs=2, ), "typeof": STLFunction(fn=_typeof, minArgs=1, maxArgs=1), + "JSONExtractArrayRaw": STLFunction(fn=JSONExtractArrayRaw, minArgs=1), + "JSONExtractFloat": STLFunction(fn=JSONExtractFloat, minArgs=1), + "JSONExtractInt": STLFunction(fn=JSONExtractInt, minArgs=1), + "JSONExtractString": STLFunction(fn=JSONExtractString, minArgs=1), + "and": STLFunction(fn=and_fn, minArgs=2, maxArgs=2), + "addDays": STLFunction(fn=addDays, minArgs=2, maxArgs=2), + "assumeNotNull": STLFunction(fn=assumeNotNull, minArgs=1, maxArgs=1), + "coalesce": STLFunction(fn=coalesce, minArgs=1, maxArgs=None), + "dateAdd": STLFunction(fn=date_add, minArgs=3, maxArgs=3), + "dateDiff": STLFunction(fn=date_diff, minArgs=3, maxArgs=3), + "dateTrunc": STLFunction(fn=date_trunc, minArgs=2, maxArgs=2), + "equals": STLFunction(fn=equals, minArgs=2, maxArgs=2), + "extract": STLFunction(fn=extract, minArgs=2, maxArgs=2), + "floor": STLFunction(fn=floor_fn, minArgs=1, maxArgs=1), + "greater": STLFunction(fn=greater, minArgs=2, maxArgs=2), + "greaterOrEquals": STLFunction(fn=greaterOrEquals, minArgs=2, maxArgs=2), + "if": STLFunction(fn=if_fn, minArgs=3, maxArgs=3), + "in": STLFunction(fn=in_fn, minArgs=2, maxArgs=2), + "less": STLFunction(fn=less, minArgs=2, maxArgs=2), + "lessOrEquals": STLFunction(fn=lessOrEquals, minArgs=2, maxArgs=2), + "min2": STLFunction(fn=min2, minArgs=2, maxArgs=2), + "max2": STLFunction(fn=max2, minArgs=2, maxArgs=2), + "minus": STLFunction(fn=minus, minArgs=2, maxArgs=2), + "multiIf": STLFunction(fn=multiIf, minArgs=3), + "not": STLFunction(fn=not_fn, minArgs=1, maxArgs=1), + "notEquals": STLFunction(fn=notEquals, minArgs=2, maxArgs=2), + "or": STLFunction(fn=or_fn, minArgs=2, maxArgs=2), + "plus": STLFunction(fn=plus, minArgs=2, maxArgs=2), + "range": STLFunction(fn=range_fn, minArgs=1, maxArgs=2), + "round": STLFunction(fn=round_fn, minArgs=1, maxArgs=1), + "startsWith": STLFunction(fn=startsWith, minArgs=2, maxArgs=2), + "substring": STLFunction(fn=substring, minArgs=3, maxArgs=3), + "toIntervalDay": STLFunction(fn=toIntervalDay, minArgs=1, maxArgs=1), + "toIntervalHour": STLFunction(fn=toIntervalHour, minArgs=1, maxArgs=1), + "toIntervalMinute": STLFunction(fn=toIntervalMinute, minArgs=1, maxArgs=1), + "toIntervalMonth": STLFunction(fn=toIntervalMonth, minArgs=1, maxArgs=1), + "toMonth": STLFunction(fn=toMonth_fn, minArgs=1, maxArgs=1), + "toStartOfDay": STLFunction(fn=toStartOfDay, minArgs=1, maxArgs=1), + "toStartOfHour": STLFunction(fn=toStartOfHour, minArgs=1, maxArgs=1), + "toStartOfMonth": STLFunction(fn=toStartOfMonth, minArgs=1, maxArgs=1), + "toStartOfWeek": STLFunction(fn=toStartOfWeek, minArgs=1, maxArgs=1), + "toYYYYMM": STLFunction(fn=toYYYYMM, minArgs=1, maxArgs=1), + "toYear": STLFunction(fn=toYear, minArgs=1, maxArgs=1), + "today": STLFunction(fn=today, minArgs=0, maxArgs=0), # only in python, async function in nodejs "sleep": STLFunction(fn=sleep, minArgs=1, maxArgs=1), "run": STLFunction(fn=run, minArgs=1, maxArgs=1), diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index 7d2fa7ad58ad3..c7e1035bd51e8 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.65", + "version": "1.0.66", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "source": "src/index.ts", diff --git a/hogvm/typescript/src/stl/stl.ts b/hogvm/typescript/src/stl/stl.ts index 032e88a1b774e..760d041522e1f 100644 --- a/hogvm/typescript/src/stl/stl.ts +++ b/hogvm/typescript/src/stl/stl.ts @@ -1,7 +1,7 @@ import { DateTime } from 'luxon' import { isHogCallable, isHogClosure, isHogDate, isHogDateTime, isHogError, newHogError } from '../objects' -import { AsyncSTLFunction, STLFunction } from '../types' +import { AsyncSTLFunction, STLFunction, HogInterval, HogDate, HogDateTime } from '../types' import { getNestedValue, like } from '../utils' import { md5Hex, sha256Hex, sha256HmacChainHex } from './crypto' import { @@ -33,6 +33,432 @@ function STLToString(args: any[]): string { return printHogStringOutput(args[0]) } +// Helper: HogInterval +function isHogInterval(obj: any): obj is HogInterval { + return obj && obj.__hogInterval__ === true +} + +function toHogInterval(value: number, unit: string): HogInterval { + return { + __hogInterval__: true, + value: value, + unit: unit, + } +} + +function applyIntervalToDateTime(base: HogDate | HogDateTime, interval: HogInterval): HogDate | HogDateTime { + let dt: DateTime + let zone = 'UTC' + if (isHogDateTime(base)) { + zone = base.zone + dt = DateTime.fromSeconds(base.dt, { zone }) + } else { + dt = DateTime.fromObject({ year: base.year, month: base.month, day: base.day }, { zone }) + } + + const { value, unit } = interval + // Expand certain units for uniformity + let effectiveUnit = unit + let effectiveValue = value + if (unit === 'week') { + effectiveUnit = 'day' + effectiveValue = value * 7 + } else if (unit === 'year') { + effectiveUnit = 'month' + effectiveValue = value * 12 + } + + // Note: Luxon doesn't have direct month addition that can handle overflow automatically to last day of month, + // but plus({ months: x }) will shift the date by x months and clamp automatically if needed. + let newDt: DateTime + switch (effectiveUnit) { + case 'day': + newDt = dt.plus({ days: effectiveValue }) + break + case 'hour': + newDt = dt.plus({ hours: effectiveValue }) + break + case 'minute': + newDt = dt.plus({ minutes: effectiveValue }) + break + case 'second': + newDt = dt.plus({ seconds: effectiveValue }) + break + case 'month': + newDt = dt.plus({ months: effectiveValue }) + break + default: + throw new Error(`Unsupported interval unit: ${unit}`) + } + + if (isHogDateTime(base)) { + return { + __hogDateTime__: true, + dt: newDt.toSeconds(), + zone: newDt.zoneName || 'UTC', + } + } else { + return { + __hogDate__: true, + year: newDt.year, + month: newDt.month, + day: newDt.day, + } + } +} + +// dateAdd(unit, amount, datetime) +function dateAddFn([unit, amount, datetime]: any[]): HogDate | HogDateTime { + return applyIntervalToDateTime(datetime, toHogInterval(amount, unit)) +} + +// dateDiff(unit, start, end) +function dateDiffFn([unit, startVal, endVal]: any[]): number { + function toDT(obj: any): DateTime { + if (isHogDateTime(obj)) { + return DateTime.fromSeconds(obj.dt, { zone: obj.zone }) + } else if (isHogDate(obj)) { + return DateTime.fromObject({ year: obj.year, month: obj.month, day: obj.day }, { zone: 'UTC' }) + } else { + // try parse ISO string + return DateTime.fromISO(obj, { zone: 'UTC' }) + } + } + + const start = toDT(startVal) + const end = toDT(endVal) + const diff = end.diff(start, ['years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds']) + + switch (unit) { + case 'day': + return Math.floor((end.toMillis() - start.toMillis()) / (1000 * 60 * 60 * 24)) + case 'hour': + return Math.floor(diff.as('hours')) + case 'minute': + return Math.floor(diff.as('minutes')) + case 'second': + return Math.floor(diff.as('seconds')) + case 'week': + return Math.floor(diff.as('days') / 7) + case 'month': + // Month difference approximated by counting month differences: + return (end.year - start.year) * 12 + (end.month - start.month) + case 'year': + return end.year - start.year + default: + throw new Error(`Unsupported unit for dateDiff: ${unit}`) + } +} + +// dateTrunc(unit, datetime) +function dateTruncFn([unit, val]: any[]): HogDateTime { + if (!isHogDateTime(val)) { + throw new Error('Expected a DateTime for dateTrunc') + } + const dt = DateTime.fromSeconds(val.dt, { zone: val.zone }) + let truncated: DateTime + switch (unit) { + case 'year': + truncated = DateTime.fromObject({ year: dt.year }, { zone: dt.zoneName }) + break + case 'month': + truncated = DateTime.fromObject({ year: dt.year, month: dt.month }, { zone: dt.zoneName }) + break + case 'day': + truncated = DateTime.fromObject({ year: dt.year, month: dt.month, day: dt.day }, { zone: dt.zoneName }) + break + case 'hour': + truncated = DateTime.fromObject({ year: dt.year, month: dt.month, day: dt.day, hour: dt.hour }, { zone: dt.zoneName }) + break + case 'minute': + truncated = DateTime.fromObject({ year: dt.year, month: dt.month, day: dt.day, hour: dt.hour, minute: dt.minute }, { zone: dt.zoneName }) + break + default: + throw new Error(`Unsupported unit for dateTrunc: ${unit}`) + } + return { + __hogDateTime__: true, + dt: truncated.toSeconds(), + zone: truncated.zoneName || 'UTC', + } +} + +function coalesceFn(args: any[]): any { + for (const a of args) { + if (a !== null && a !== undefined) return a + } + return null +} + +function assumeNotNullFn([val]: any[]): any { + if (val === null || val === undefined) { + throw new Error("Value is null in assumeNotNull") + } + return val +} + +function equalsFn([a, b]: any[]): boolean { + return a === b +} + +function greaterFn([a, b]: any[]): boolean { + return a > b +} + +function greaterOrEqualsFn([a, b]: any[]): boolean { + return a >= b +} + +function lessFn([a, b]: any[]): boolean { + return a < b +} + +function lessOrEqualsFn([a, b]: any[]): boolean { + return a <= b +} + +function notEqualsFn([a, b]: any[]): boolean { + return a !== b +} + +function notFn([a]: any[]): boolean { + return !a +} + +function andFn(args: any[]): boolean { + return args.every(Boolean) +} + +function orFn(args: any[]): boolean { + return args.some(Boolean) +} + +function ifFn([cond, thenVal, elseVal]: any[]): any { + return cond ? thenVal : elseVal +} + +function inFn([val, arr]: any[]): boolean { + return Array.isArray(arr) || (arr && arr.__isHogTuple) ? arr.includes(val) : false +} + +function min2Fn([a, b]: any[]): any { + return a < b ? a : b +} + +function plusFn([a, b]: any[]): any { + return a + b +} + +function minusFn([a, b]: any[]): any { + return a - b +} + +function multiIfFn(args: any[]): any { + // multiIf(cond1, val1, cond2, val2, ..., default) + const last = args[args.length - 1] + const pairs = args.slice(0, -1) + for (let i = 0; i < pairs.length; i += 2) { + const cond = pairs[i] + const val = pairs[i + 1] + if (cond) { + return val + } + } + return last +} + +function floorFn([a]: any[]): any { + return Math.floor(a) +} + +// extract(part, datetime) +function extractFn([part, val]: any[]): number { + function toDT(obj: any): DateTime { + if (isHogDateTime(obj)) { + return DateTime.fromSeconds(obj.dt, { zone: obj.zone }) + } else if (isHogDate(obj)) { + return DateTime.fromObject({ year: obj.year, month: obj.month, day: obj.day }, { zone: 'UTC' }) + } else { + return DateTime.fromISO(obj, { zone: 'UTC' }) + } + } + + const dt = toDT(val) + switch (part) { + case 'year': + return dt.year + case 'month': + return dt.month + case 'day': + return dt.day + case 'hour': + return dt.hour + case 'minute': + return dt.minute + case 'second': + return dt.second + default: + throw new Error(`Unknown extract part: ${part}`) + } +} + +function roundFn([a]: any[]): any { + return Math.round(a) +} + +function startsWithFn([str, prefix]: any[]): boolean { + return typeof str === 'string' && typeof prefix === 'string' && str.startsWith(prefix) +} + +function substringFn([s, start, length]: any[]): string { + if (typeof s !== 'string') return '' + const startIdx = start - 1 + if (startIdx < 0 || length < 0) return '' + const endIdx = startIdx + length + return startIdx < s.length ? s.slice(startIdx, endIdx) : '' +} + +function addDaysFn([dateOrDt, days]: any[]): HogDate | HogDateTime { + return applyIntervalToDateTime(dateOrDt, toHogInterval(days, 'day')) +} + +function toIntervalDayFn([val]: any[]): HogInterval { + return toHogInterval(val, 'day') +} + +function toIntervalHourFn([val]: any[]): HogInterval { + return toHogInterval(val, 'hour') +} + +function toIntervalMinuteFn([val]: any[]): HogInterval { + return toHogInterval(val, 'minute') +} + +function toIntervalMonthFn([val]: any[]): HogInterval { + return toHogInterval(val, 'month') +} + +function toYearFn([val]: any[]): number { + return extractFn(['year', val]) +} + +function toMonthFn([val]: any[]): number { + return extractFn(['month', val]) +} + +function toStartOfDayFn([val]: any[]): HogDateTime { + return dateTruncFn(['day', isHogDateTime(val) ? val : toDateTimeFromDate(val)]) +} + +function toStartOfHourFn([val]: any[]): HogDateTime { + return dateTruncFn(['hour', isHogDateTime(val) ? val : toDateTimeFromDate(val)]) +} + +function toStartOfMonthFn([val]: any[]): HogDateTime { + return dateTruncFn(['month', isHogDateTime(val) ? val : toDateTimeFromDate(val)]) +} + +function toStartOfWeekFn([val]: any[]): HogDateTime { + const dt = isHogDateTime(val) ? DateTime.fromSeconds(val.dt, { zone: val.zone }) : + DateTime.fromObject({ year: val.year, month: val.month, day: val.day }, { zone: 'UTC' }) + const weekday = dt.weekday // Monday=1, Sunday=7 + const startOfWeek = dt.minus({ days: weekday - 1 }).startOf('day') + return { + __hogDateTime__: true, + dt: startOfWeek.toSeconds(), + zone: startOfWeek.zoneName || 'UTC' + } +} + +function toYYYYMMFn([val]: any[]): number { + const y = toYearFn([val]) + const m = toMonthFn([val]) + return y * 100 + m +} + +function todayFn(): HogDate { + const now = DateTime.now().setZone('UTC') + return { + __hogDate__: true, + year: now.year, + month: now.month, + day: now.day, + } +} + +function toDateTimeFromDate(date: HogDate): HogDateTime { + const dt = DateTime.fromObject({ year: date.year, month: date.month, day: date.day }, { zone: 'UTC' }) + return { + __hogDateTime__: true, + dt: dt.toSeconds(), + zone: 'UTC', + } +} + +function rangeFn(args: any[]): any[] { + if (args.length === 1) { + return Array.from({ length: args[0] }, (_, i) => i) + } else { + return Array.from({ length: args[1] - args[0] }, (_, i) => args[0] + i) + } +} + +// JSON extraction +function JSONExtractArrayRawFn(args: any[]): any { + let [obj, ...path] = args + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch { + return null + } + const val = getNestedValue(obj, path, true) + return Array.isArray(val) ? val : null +} + +function JSONExtractFloatFn(args: any[]): number | null { + let [obj, ...path] = args + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch { + return null + } + const val = getNestedValue(obj, path, true) + const f = parseFloat(val) + return isNaN(f) ? null : f +} + +function JSONExtractIntFn(args: any[]): number | null { + let [obj, ...path] = args + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch { + return null + } + const val = getNestedValue(obj, path, true) + const i = parseInt(val) + return isNaN(i) ? null : i +} + +function JSONExtractStringFn(args: any[]): string | null { + let [obj, ...path] = args + try { + if (typeof obj === 'string') { + obj = JSON.parse(obj) + } + } catch { + return null + } + const val = getNestedValue(obj, path, true) + return val != null ? String(val) : null +} + + export const STL: Record = { concat: { fn: (args) => { @@ -789,6 +1215,50 @@ export const STL: Record = { minArgs: 1, maxArgs: 1, }, + + JSONExtractArrayRaw: { fn: JSONExtractArrayRawFn, minArgs: 1 }, + JSONExtractFloat: { fn: JSONExtractFloatFn, minArgs: 1 }, + JSONExtractInt: { fn: JSONExtractIntFn, minArgs: 1 }, + JSONExtractString: { fn: JSONExtractStringFn, minArgs: 1 }, + addDays: { fn: addDaysFn, minArgs: 2, maxArgs: 2 }, + assumeNotNull: { fn: assumeNotNullFn, minArgs: 1, maxArgs: 1 }, + coalesce: { fn: coalesceFn, minArgs: 1 }, + dateAdd: { fn: dateAddFn, minArgs: 3, maxArgs: 3 }, + dateDiff: { fn: dateDiffFn, minArgs: 3, maxArgs: 3 }, + dateTrunc: { fn: dateTruncFn, minArgs: 2, maxArgs: 2 }, + equals: { fn: equalsFn, minArgs: 2, maxArgs: 2 }, + extract: { fn: extractFn, minArgs: 2, maxArgs: 2 }, + floor: { fn: floorFn, minArgs: 1, maxArgs: 1 }, + greater: { fn: greaterFn, minArgs: 2, maxArgs: 2 }, + greaterOrEquals: { fn: greaterOrEqualsFn, minArgs: 2, maxArgs: 2 }, + if: { fn: ifFn, minArgs: 3, maxArgs: 3 }, + in: { fn: inFn, minArgs: 2, maxArgs: 2 }, + less: { fn: lessFn, minArgs: 2, maxArgs: 2 }, + lessOrEquals: { fn: lessOrEqualsFn, minArgs: 2, maxArgs: 2 }, + min2: { fn: min2Fn, minArgs: 2, maxArgs: 2 }, + minus: { fn: minusFn, minArgs: 2, maxArgs: 2 }, + multiIf: { fn: multiIfFn, minArgs: 3 }, + not: { fn: notFn, minArgs: 1, maxArgs: 1 }, + notEquals: { fn: notEqualsFn, minArgs: 2, maxArgs: 2 }, + and: { fn: andFn, minArgs: 2, maxArgs: 2 }, + or: { fn: orFn, minArgs: 2, maxArgs: 2 }, + plus: { fn: plusFn, minArgs: 2, maxArgs: 2 }, + range: { fn: rangeFn, minArgs: 1, maxArgs: 2 }, + round: { fn: roundFn, minArgs: 1, maxArgs: 1 }, + startsWith: { fn: startsWithFn, minArgs: 2, maxArgs: 2 }, + substring: { fn: substringFn, minArgs: 3, maxArgs: 3 }, + toIntervalDay: { fn: toIntervalDayFn, minArgs: 1, maxArgs: 1 }, + toIntervalHour: { fn: toIntervalHourFn, minArgs: 1, maxArgs: 1 }, + toIntervalMinute: { fn: toIntervalMinuteFn, minArgs: 1, maxArgs: 1 }, + toIntervalMonth: { fn: toIntervalMonthFn, minArgs: 1, maxArgs: 1 }, + toMonth: { fn: toMonthFn, minArgs: 1, maxArgs: 1 }, + toStartOfDay: { fn: toStartOfDayFn, minArgs: 1, maxArgs: 1 }, + toStartOfHour: { fn: toStartOfHourFn, minArgs: 1, maxArgs: 1 }, + toStartOfMonth: { fn: toStartOfMonthFn, minArgs: 1, maxArgs: 1 }, + toStartOfWeek: { fn: toStartOfWeekFn, minArgs: 1, maxArgs: 1 }, + toYYYYMM: { fn: toYYYYMMFn, minArgs: 1, maxArgs: 1 }, + toYear: { fn: toYearFn, minArgs: 1, maxArgs: 1 }, + today: { fn: todayFn, minArgs: 0, maxArgs: 0 }, } export const ASYNC_STL: Record = { diff --git a/hogvm/typescript/src/types.ts b/hogvm/typescript/src/types.ts index 50008b91b0d39..810815f63bd30 100644 --- a/hogvm/typescript/src/types.ts +++ b/hogvm/typescript/src/types.ts @@ -144,6 +144,12 @@ export interface HogClosure { upvalues: number[] } +export interface HogInterval { + __hogInterval__: true + value: number + unit: string +} + export interface STLFunction { fn: (args: any[], name: string, options?: ExecOptions) => any minArgs?: number diff --git a/jest.config.ts b/jest.config.ts index 53e5df5943413..39ab8232a5592 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -8,7 +8,7 @@ process.env.TZ = process.env.TZ || 'UTC' * https://jestjs.io/docs/en/configuration.html */ -const esmModules = ['query-selector-shadow-dom', 'react-syntax-highlighter', '@react-hook', '@medv', 'monaco-editor'] +const esmModules = ['query-selector-shadow-dom', 'react-syntax-highlighter', '@react-hook', '@medv', 'monaco-editor', 'jsoncrush'] const eeFolderExists = fs.existsSync('ee/frontend/exports.ts') function rootDirectories() { const rootDirectories = ['/frontend/src'] diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 4599145228cfa..58ea7ed8a90c2 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -1,4 +1,67 @@ posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] posthog/utils.py:0: note: Possible overload variants: posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] @@ -365,9 +428,40 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable] -posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap] -posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "with_only_columns" of "Select" matches argument type "ReadOnlyColumnCollection[str, Column[Any]]" [call-overload] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], /) -> Select[tuple[_T0]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], /) -> Select[tuple[_T0, _T1]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], /) -> Select[tuple[_T0, _T1, _T2]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], /) -> Select[tuple[_T0, _T1, _T2, _T3]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], TypedColumnsClauseRole[_T7] | SQLCoreOperations[_T7] | type[_T7], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def with_only_columns(self, *entities: TypedColumnsClauseRole[Any] | ColumnsClauseRole | SQLCoreOperations[Any] | Literal['*', 1] | type[Any] | Inspectable[_HasClauseElement[Any]] | _HasClauseElement[Any], maintain_column_froms: bool = ..., **Any) -> Select[Any] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "resource" matches argument types "Callable[[Engine, Table, int, Literal['sqlalchemy', 'pyarrow', 'pandas', 'connectorx'], Incremental[Any] | None, Any | None, bool, Callable[[Table], None] | None, Literal['minimal', 'full', 'full_with_precision'], dict[str, Any] | None, Callable[[TypeEngine[Any]], TypeEngine[Any] | type[TypeEngine[Any]] | None] | None, list[str] | None, Callable[[Select[Any], Table], Select[Any]] | None, list[str] | None], Iterator[Any]]", "str", "list[str] | None", "list[str] | None", "dict[str, TColumnSchema]", "Collection[str]", "str" [call-overload] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TResourceFunParams`-1, TDltResourceImpl: DltResource] resource(Callable[TResourceFunParams, Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: @@ -505,10 +599,12 @@ posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in a posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants: +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] @@ -622,6 +718,7 @@ posthog/api/test/dashboards/test_dashboard.py:0: error: Value of type variable " posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] +posthog/api/query.py:0: error: Statement is unreachable [unreachable] posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr] posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr] posthog/api/property_definition.py:0: error: Item "ForeignObjectRel" of "Field[Any, Any] | ForeignObjectRel | GenericForeignKey" has no attribute "cached_col" [union-attr] @@ -670,6 +767,16 @@ ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExper ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr] +posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] +posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] +posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] +posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -678,27 +785,13 @@ posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict k posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 20 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 21 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 22 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants: -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "FilesystemDestinationClientConfiguration" has no attribute "delta_jobs_per_write" [attr-defined] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "type[FilesystemDestinationClientConfiguration]" has no attribute "delta_jobs_per_write" [attr-defined] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] -posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] +posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Right operand of "and" is never evaluated [unreachable] +posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Name "raw_db_columns" already defined on line 0 [no-redef] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] @@ -726,23 +819,6 @@ posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_cursors" (hint: "_cursors: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "with_only_columns" of "Select" matches argument type "ReadOnlyColumnCollection[str, Column[Any]]" [call-overload] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], /) -> Select[tuple[_T0]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], /) -> Select[tuple[_T0, _T1]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], /) -> Select[tuple[_T0, _T1, _T2]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], /) -> Select[tuple[_T0, _T1, _T2, _T3]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], TypedColumnsClauseRole[_T7] | SQLCoreOperations[_T7] | type[_T7], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def with_only_columns(self, *entities: TypedColumnsClauseRole[Any] | ColumnsClauseRole | SQLCoreOperations[Any] | Literal['*', 1] | type[Any] | Inspectable[_HasClauseElement[Any]] | _HasClauseElement[Any], maintain_column_froms: bool = ..., **Any) -> Select[Any] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "resource" matches argument types "Callable[[Engine, Table, int, Literal['sqlalchemy', 'pyarrow', 'pandas', 'connectorx'], Incremental[Any] | None, bool, Callable[[Table], None] | None, Literal['minimal', 'full', 'full_with_precision'], dict[str, Any] | None, Callable[[TypeEngine[Any]], TypeEngine[Any] | type[TypeEngine[Any]] | None] | None, list[str] | None, Callable[[Select[Any], Table], Select[Any]] | None, list[str] | None], Iterator[Any]]", "str", "list[str] | None", "list[str] | None", "dict[str, TColumnSchema]", "Collection[str]", "str" [call-overload] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TResourceFunParams`-1, TDltResourceImpl: DltResource] resource(Callable[TResourceFunParams, Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] @@ -751,78 +827,33 @@ posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not define posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type] posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override] posthog/api/test/batch_exports/conftest.py:0: note: Superclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None] posthog/api/test/batch_exports/conftest.py:0: note: Subclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self, loop: Any) -> Any posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] +posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] +posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] +posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Unpacked dict entry 1 has incompatible type "str"; expected "SupportsKeysAndGetItem[str, str]" [dict-item] @@ -864,32 +895,3 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value] -posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] -posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] -posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] -posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] -posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/api/query.py:0: error: Statement is unreachable [unreachable] -posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] -posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] -posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] diff --git a/package.json b/package.json index 62b8edf179197..6944c5b335e23 100644 --- a/package.json +++ b/package.json @@ -27,11 +27,11 @@ "test:visual:ci:update": "test-storybook -u --no-index-json --maxWorkers=2", "test:visual:ci:verify": "test-storybook --ci --no-index-json --maxWorkers=2", "start": "concurrently -n ESBUILD,TYPEGEN -c yellow,green \"pnpm start-http\" \"pnpm run typegen:watch\"", - "start-http": "pnpm clean && pnpm copy-scripts && node frontend/build.mjs --dev", + "start-http": "pnpm clean && pnpm copy-scripts && pnpm build:esbuild --dev", "start-docker": "pnpm start-http --host 0.0.0.0", "clean": "rm -rf frontend/dist && mkdir frontend/dist", "build": "pnpm copy-scripts && pnpm build:esbuild", - "build:esbuild": "node frontend/build.mjs", + "build:esbuild": "DEBUG=0 node frontend/build.mjs", "schema:build": "pnpm run schema:build:json && pnpm run schema:build:python", "schema:build:json": "ts-node bin/build-schema-json.mjs && prettier --write frontend/src/queries/schema.json", "schema:build:python": "bash bin/build-schema-python.sh", @@ -77,7 +77,7 @@ "@microlink/react-json-view": "^1.21.3", "@microsoft/fetch-event-source": "^2.0.1", "@monaco-editor/react": "4.6.0", - "@posthog/hogvm": "^1.0.65", + "@posthog/hogvm": "^1.0.66", "@posthog/icons": "0.9.2", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", @@ -140,6 +140,7 @@ "hls.js": "^1.5.15", "husky": "^7.0.4", "image-blob-reduce": "^4.1.0", + "jsoncrush": "^1.1.8", "kea": "^3.1.5", "kea-forms": "^3.2.0", "kea-loaders": "^3.0.0", @@ -161,7 +162,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.201.1", + "posthog-js": "1.202.2", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -265,7 +266,7 @@ "axe-core": "^4.4.3", "babel-loader": "^8.0.6", "babel-plugin-import": "^1.13.0", - "caniuse-lite": "^1.0.30001687", + "caniuse-lite": "^1.0.30001689", "concurrently": "^5.3.0", "css-loader": "^3.4.2", "cypress": "^13.11.0", diff --git a/plugin-server/package.json b/plugin-server/package.json index ddf5d6c11235e..11df155e0757c 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -54,7 +54,7 @@ "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", "@posthog/cyclotron": "file:../rust/cyclotron-node", - "@posthog/hogvm": "^1.0.65", + "@posthog/hogvm": "^1.0.66", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", @@ -111,7 +111,7 @@ "@types/ioredis": "^4.26.4", "@types/jest": "^28.1.1", "@types/long": "4.x.x", - "@types/luxon": "^1.27.0", + "@types/luxon": "^3.4.2", "@types/node": "^16.0.0", "@types/node-fetch": "^2.5.10", "@types/node-schedule": "^2.1.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index df711c86c450a..c297462845d8e 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -47,8 +47,8 @@ dependencies: specifier: file:../rust/cyclotron-node version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.65 - version: 1.0.65(luxon@3.4.4) + specifier: ^1.0.66 + version: 1.0.66(luxon@3.4.4) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -214,8 +214,8 @@ devDependencies: specifier: 4.x.x version: 4.0.2 '@types/luxon': - specifier: ^1.27.0 - version: 1.27.1 + specifier: ^3.4.2 + version: 3.4.2 '@types/node': specifier: ^16.0.0 version: 16.18.25 @@ -2794,8 +2794,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.65(luxon@3.4.4): - resolution: {integrity: sha512-ZQ9Eh3scthXo4Am9F3iFaGXd1dIKXaA9aMP01GZLvyD1rYL+ktQNCYtQMrnyelaP5STvfxkzjTxiGgJ32PPyrw==} + /@posthog/hogvm@1.0.66(luxon@3.4.4): + resolution: {integrity: sha512-bczn4tB2rXRJVXihkRHGiNT+6ruYRLRtGRf9xhGlZmdFBL/QSJa5/gQqflp5de+N6UMofkyjdX8yvBwiTt3VHw==} peerDependencies: luxon: ^3.4.4 dependencies: @@ -3797,8 +3797,8 @@ packages: resolution: {integrity: sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==} dev: false - /@types/luxon@1.27.1: - resolution: {integrity: sha512-cPiXpOvPFDr2edMnOXlz3UBDApwUfR+cpizvxCy0n3vp9bz/qe8BWzHPIEFcy+ogUOyjKuCISgyq77ELZPmkkg==} + /@types/luxon@3.4.2: + resolution: {integrity: sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA==} dev: true /@types/markdown-it@12.2.3: diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts index 933921667db82..bf23716bc5c63 100644 --- a/plugin-server/src/utils/db/utils.ts +++ b/plugin-server/src/utils/db/utils.ts @@ -18,7 +18,10 @@ import { status } from '../../utils/status' import { areMapsEqual, castTimestampOrNow } from '../../utils/utils' export function unparsePersonPartial(person: Partial): Partial { - return { ...(person as BasePerson), ...(person.created_at ? { created_at: person.created_at.toISO() } : {}) } + return { + ...(person as BasePerson), + ...(person.created_at ? { created_at: person.created_at.toISO() ?? undefined } : {}), + } } export function escapeQuotes(input: string): string { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index dec116f705a09..f383926dd1807 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -53,8 +53,8 @@ dependencies: specifier: 4.6.0 version: 4.6.0(monaco-editor@0.49.0)(react-dom@18.2.0)(react@18.2.0) '@posthog/hogvm': - specifier: ^1.0.65 - version: 1.0.65(luxon@3.5.0) + specifier: ^1.0.66 + version: 1.0.66(luxon@3.5.0) '@posthog/icons': specifier: 0.9.2 version: 0.9.2(react-dom@18.2.0)(react@18.2.0) @@ -241,6 +241,9 @@ dependencies: image-blob-reduce: specifier: ^4.1.0 version: 4.1.0 + jsoncrush: + specifier: ^1.1.8 + version: 1.1.8 kea: specifier: ^3.1.5 version: 3.1.5(react@18.2.0) @@ -305,8 +308,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.201.1 - version: 1.201.1 + specifier: 1.202.2 + version: 1.202.2 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -609,8 +612,8 @@ devDependencies: specifier: ^1.13.0 version: 1.13.8 caniuse-lite: - specifier: ^1.0.30001687 - version: 1.0.30001687 + specifier: ^1.0.30001689 + version: 1.0.30001689 concurrently: specifier: ^5.3.0 version: 5.3.0 @@ -5446,8 +5449,8 @@ packages: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} dev: false - /@posthog/hogvm@1.0.65(luxon@3.5.0): - resolution: {integrity: sha512-ZQ9Eh3scthXo4Am9F3iFaGXd1dIKXaA9aMP01GZLvyD1rYL+ktQNCYtQMrnyelaP5STvfxkzjTxiGgJ32PPyrw==} + /@posthog/hogvm@1.0.66(luxon@3.5.0): + resolution: {integrity: sha512-bczn4tB2rXRJVXihkRHGiNT+6ruYRLRtGRf9xhGlZmdFBL/QSJa5/gQqflp5de+N6UMofkyjdX8yvBwiTt3VHw==} peerDependencies: luxon: ^3.4.4 dependencies: @@ -9638,7 +9641,7 @@ packages: postcss: ^8.1.0 dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 fraction.js: 4.2.0 normalize-range: 0.1.2 picocolors: 1.0.0 @@ -9654,7 +9657,7 @@ packages: postcss: ^8.1.0 dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 fraction.js: 4.3.7 normalize-range: 0.1.2 picocolors: 1.0.0 @@ -10018,7 +10021,7 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true dependencies: - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 electron-to-chromium: 1.4.492 node-releases: 2.0.13 update-browserslist-db: 1.0.11(browserslist@4.21.10) @@ -10028,7 +10031,7 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true dependencies: - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 electron-to-chromium: 1.4.609 node-releases: 2.0.14 update-browserslist-db: 1.0.13(browserslist@4.22.2) @@ -10178,13 +10181,13 @@ packages: resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 lodash.memoize: 4.1.2 lodash.uniq: 4.5.0 dev: false - /caniuse-lite@1.0.30001687: - resolution: {integrity: sha512-0S/FDhf4ZiqrTUiQ39dKeUjYRjkv7lOZU1Dgif2rIqrTzX/1wV2hfKu9TOm1IHkdSijfLswxTFzl/cvir+SLSQ==} + /caniuse-lite@1.0.30001689: + resolution: {integrity: sha512-CmeR2VBycfa+5/jOfnp/NpWPGd06nf1XYiefUvhXFfZE4GkRc9jv+eGPS4nT558WS/8lYCzV8SlANCIPvbWP1g==} /case-anything@2.1.10: resolution: {integrity: sha512-JczJwVrCP0jPKh05McyVsuOg6AYosrB9XWZKbQzXeDAm2ClE/PJE/BcrrQrVyGYH7Jg8V/LDupmyL4kFlVsVFQ==} @@ -12318,7 +12321,7 @@ packages: '@mdn/browser-compat-data': 5.3.16 ast-metadata-inferer: 0.8.0 browserslist: 4.21.10 - caniuse-lite: 1.0.30001687 + caniuse-lite: 1.0.30001689 eslint: 8.57.0 find-up: 5.0.0 lodash.memoize: 4.1.2 @@ -15412,6 +15415,10 @@ packages: resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} dev: true + /jsoncrush@1.1.8: + resolution: {integrity: sha512-lvIMGzMUA0fjuqwNcxlTNRq2bibPZ9auqT/LyGdlR5hvydJtA/BasSgkx4qclqTKVeTidrJvsS/oVjlTCPQ4Nw==} + dev: false + /jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} dependencies: @@ -16348,8 +16355,8 @@ packages: object-assign: 4.1.1 thenify-all: 1.6.0 - /nanoid@3.3.6: - resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} + /nanoid@3.3.8: + resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true @@ -17894,7 +17901,7 @@ packages: resolution: {integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==} engines: {node: ^10 || ^12 || >=14} dependencies: - nanoid: 3.3.6 + nanoid: 3.3.8 picocolors: 1.0.0 source-map-js: 1.0.2 @@ -17902,8 +17909,8 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.201.1: - resolution: {integrity: sha512-srzbJLIzGp0DirGFhadXE1BDB9JGsfaTKLNX3PWehtVf3TSd4i1nX75hQHJmqrzRkGyMNMArQAuVCQN3aWMn3A==} + /posthog-js@1.202.2: + resolution: {integrity: sha512-9p7dAWuCfoM0WrasubGwtC8i38HU3iMqK3gd0mhyAoTrEVMVozTQq64Toc2VEv8H69NGNn6ikk5t2LclHT9XFA==} dependencies: core-js: 3.39.0 fflate: 0.4.8 @@ -18520,7 +18527,7 @@ packages: react: '>=15' dependencies: react: 18.2.0 - unlayer-types: 1.182.0 + unlayer-types: 1.188.0 dev: false /react-error-boundary@3.1.4(react@18.2.0): @@ -21107,8 +21114,8 @@ packages: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} - /unlayer-types@1.182.0: - resolution: {integrity: sha512-x+YSeA7/Wb/znKDtRws8M3Mu6TyKP3d+MddPVX/iUyDPVEOapoPWk0QxjIaNYtWt6troADZdhzgr2EwsZ61HrA==} + /unlayer-types@1.188.0: + resolution: {integrity: sha512-tnn+FjUZv1qUOoRUYRFxSDz9kHfhy7dLxzMZgnU5+k6GDSBlpa8mA+r4+r0D83M+mUUd/XwuM+gvfRLGzrqZ+g==} dev: false /unpipe@1.0.0: diff --git a/posthog/api/hog_function.py b/posthog/api/hog_function.py index 4549f4f3a8bb5..3f50d710acc96 100644 --- a/posthog/api/hog_function.py +++ b/posthog/api/hog_function.py @@ -292,6 +292,10 @@ def get_serializer_class(self) -> type[BaseSerializer]: return HogFunctionMinimalSerializer if self.action == "list" else HogFunctionSerializer def safely_get_queryset(self, queryset: QuerySet) -> QuerySet: + if not (self.action == "partial_update" and self.request.data.get("deleted") is False): + # We only want to include deleted functions if we are un-deleting them + queryset = queryset.filter(deleted=False) + if self.action == "list": if "type" in self.request.GET: types = [self.request.GET.get("type", "destination")] @@ -299,7 +303,7 @@ def safely_get_queryset(self, queryset: QuerySet) -> QuerySet: types = self.request.GET.get("types", "destination").split(",") else: types = ["destination"] - queryset = queryset.filter(deleted=False, type__in=types) + queryset = queryset.filter(type__in=types) if self.request.GET.get("filters"): try: diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 049ef77b360b5..277d209486401 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -704,7 +704,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1672,7 +1673,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -2080,7 +2082,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -2708,7 +2711,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -3104,7 +3108,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -3402,7 +3407,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -3766,7 +3772,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -4903,7 +4910,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -5401,7 +5409,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -5759,7 +5768,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -6518,7 +6528,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -6815,7 +6826,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -7211,7 +7223,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -7557,7 +7570,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -8185,7 +8199,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -8482,7 +8497,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -8874,7 +8890,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -9212,7 +9229,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index c7820616d0cfd..874a79147df2c 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -434,7 +434,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1004,7 +1005,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1627,7 +1629,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -2132,7 +2135,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index 5b8721a2cd48f..55bc1f7121eb1 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -422,7 +422,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1726,7 +1727,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index f4e08a30e1622..aa3b526b3c9ee 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -465,7 +465,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -873,7 +874,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1339,7 +1341,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1733,7 +1736,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -2156,7 +2160,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index bbf74ee1ecb72..5de31b701d950 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -123,7 +123,8 @@ def _post_decide( if self.use_remote_config: # We test a lot with settings changes so the idea is to refresh the remote config remote_config = RemoteConfig.objects.get(team=self.team) - remote_config.sync() + # Force as sync as lots of the tests are clearing redis purposefully which messes with things + remote_config.sync(force=True) if groups is None: groups = {} diff --git a/posthog/api/test/test_hog_function.py b/posthog/api/test/test_hog_function.py index b988b53fdbbfb..414f5f19aa51f 100644 --- a/posthog/api/test/test_hog_function.py +++ b/posthog/api/test/test_hog_function.py @@ -367,6 +367,29 @@ def test_deletes_via_update(self, *args): ] assert filtered_actual_activities == expected_activities + def test_can_undelete_hog_function(self, *args): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={**EXAMPLE_FULL}, + ) + id = response.json()["id"] + + response = self.client.patch( + f"/api/projects/{self.team.id}/hog_functions/{id}/", + data={"deleted": True}, + ) + assert response.status_code == status.HTTP_200_OK, response.json() + assert ( + self.client.get(f"/api/projects/{self.team.id}/hog_functions/{id}").status_code == status.HTTP_404_NOT_FOUND + ) + + response = self.client.patch( + f"/api/projects/{self.team.id}/hog_functions/{id}/", + data={"deleted": False}, + ) + assert response.status_code == status.HTTP_200_OK, response.json() + assert self.client.get(f"/api/projects/{self.team.id}/hog_functions/{id}").status_code == status.HTTP_200_OK + def test_inputs_required(self, *args): payload = { "name": "Fetch URL", diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index d17bb3b1b69c3..c7e47003a4e5b 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -810,3 +810,27 @@ async def aupdate_records_total_count( data_interval_end=interval_end, ).aupdate(records_total_count=count) return rows_updated + + +async def afetch_batch_export_runs_in_range( + batch_export_id: UUID, + interval_start: dt.datetime, + interval_end: dt.datetime, +) -> list[BatchExportRun]: + """Async fetch all BatchExportRuns for a given batch export within a time interval. + + Arguments: + batch_export_id: The UUID of the BatchExport to fetch runs for. + interval_start: The start of the time interval to fetch runs from. + interval_end: The end of the time interval to fetch runs until. + + Returns: + A list of BatchExportRun objects within the given interval, ordered by data_interval_start. + """ + queryset = BatchExportRun.objects.filter( + batch_export_id=batch_export_id, + data_interval_start__gte=interval_start, + data_interval_end__lte=interval_end, + ).order_by("data_interval_start") + + return [run async for run in queryset] diff --git a/posthog/cdp/site_functions.py b/posthog/cdp/site_functions.py index f6ece18e28792..fa77c20a8f881 100644 --- a/posthog/cdp/site_functions.py +++ b/posthog/cdp/site_functions.py @@ -122,9 +122,13 @@ def get_transpiled_function(hog_function: HogFunction) -> str: callback(true); } - return { - processEvent: (globals) => processEvent(globals, posthog) + const response = {} + + if (processEvent) { + response.processEvent = (globals) => processEvent(globals, posthog) } + + return response } return { init: init };""" diff --git a/posthog/cdp/templates/zapier/template_zapier.py b/posthog/cdp/templates/zapier/template_zapier.py index 6f47b444ea2ac..bdd41ffa2a904 100644 --- a/posthog/cdp/templates/zapier/template_zapier.py +++ b/posthog/cdp/templates/zapier/template_zapier.py @@ -37,7 +37,7 @@ "hook": { "id": "{source.url}", "event": "{event}", - "target": "https://hooks.zapier.com/{inputs.hook}", + "target": "https://hooks.zapier.com", }, "data": { "eventUuid": "{event.uuid}", diff --git a/posthog/cdp/test/test_site_functions.py b/posthog/cdp/test/test_site_functions.py index 0b6c0bc1bb8a6..658b16ba41be0 100644 --- a/posthog/cdp/test/test_site_functions.py +++ b/posthog/cdp/test/test_site_functions.py @@ -98,9 +98,13 @@ def test_get_transpiled_function_basic(self): callback(true); } - return { - processEvent: (globals) => processEvent(globals, posthog) + const response = {} + + if (processEvent) { + response.processEvent = (globals) => processEvent(globals, posthog) } + + return response } return { init: init }; diff --git a/posthog/constants.py b/posthog/constants.py index af2ede6c13e68..7a04658989d6c 100644 --- a/posthog/constants.py +++ b/posthog/constants.py @@ -302,6 +302,7 @@ class FlagRequestType(StrEnum): ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER = "Feature Viewed" DATA_WAREHOUSE_TASK_QUEUE = "data-warehouse-task-queue" +DATA_WAREHOUSE_TASK_QUEUE_V2 = "v2-data-warehouse-task-queue" BATCH_EXPORTS_TASK_QUEUE = "batch-exports-task-queue" SYNC_BATCH_EXPORTS_TASK_QUEUE = "no-sandbox-python-django" GENERAL_PURPOSE_TASK_QUEUE = "general-purpose-task-queue" diff --git a/posthog/hogql/compiler/javascript.py b/posthog/hogql/compiler/javascript.py index a70b9eeb54a1f..8d54431a2c444 100644 --- a/posthog/hogql/compiler/javascript.py +++ b/posthog/hogql/compiler/javascript.py @@ -279,10 +279,10 @@ def visit_call(self, node: ast.Call): return f"(!{expr_code})" if node.name == "and" and len(node.args) > 1: exprs_code = " && ".join([self.visit(arg) for arg in node.args]) - return f"({exprs_code})" + return f"!!({exprs_code})" if node.name == "or" and len(node.args) > 1: exprs_code = " || ".join([self.visit(arg) for arg in node.args]) - return f"({exprs_code})" + return f"!!({exprs_code})" if node.name == "if" and len(node.args) >= 2: condition_code = self.visit(node.args[0]) then_code = self.visit(node.args[1]) diff --git a/posthog/hogql/compiler/javascript_stl.py b/posthog/hogql/compiler/javascript_stl.py index cf7e25e134f5e..1f681071c3828 100644 --- a/posthog/hogql/compiler/javascript_stl.py +++ b/posthog/hogql/compiler/javascript_stl.py @@ -857,6 +857,474 @@ """function __lambda (fn) { return fn }""", [], ], + "__toHogInterval": [ + """function __toHogInterval(value, unit) { + return { __hogInterval__: true, value: value, unit: unit }; +}""", + [], + ], + "__isHogInterval": [ + """function __isHogInterval(obj) { return obj && obj.__hogInterval__ === true }""", + [], + ], + "__applyIntervalToDateTime": [ + """function __applyIntervalToDateTime(base, interval) { + // base can be HogDate or HogDateTime + if (!(__isHogDate(base) || __isHogDateTime(base))) { + throw new Error("Expected a HogDate or HogDateTime"); + } + + let zone = __isHogDateTime(base) ? (base.zone || 'UTC') : 'UTC'; + + function toDate(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } + } + + const dt = toDate(base); + const value = interval.value; + let unit = interval.unit; + + // Expand weeks/years if needed + if (unit === 'week') { + unit = 'day'; + interval.value = value * 7; + } else if (unit === 'year') { + unit = 'month'; + interval.value = value * 12; + } + + let year = dt.getUTCFullYear(); + let month = dt.getUTCMonth() + 1; + let day = dt.getUTCDate(); + let hours = dt.getUTCHours(); + let minutes = dt.getUTCMinutes(); + let seconds = dt.getUTCSeconds(); + let ms = dt.getUTCMilliseconds(); + + if (unit === 'day') { + day += interval.value; + } else if (unit === 'hour') { + hours += interval.value; + } else if (unit === 'minute') { + minutes += interval.value; + } else if (unit === 'second') { + seconds += interval.value; + } else if (unit === 'month') { + month += interval.value; + // Adjust year and month + year += Math.floor((month - 1) / 12); + month = ((month - 1) % 12) + 1; + // If day is invalid for the new month, clamp it + let maxDay = new Date(Date.UTC(year, month, 0)).getUTCDate(); + if (day > maxDay) { day = maxDay; } + } else { + throw new Error("Unsupported interval unit: " + unit); + } + + const newDt = new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds, ms)); + + if (__isHogDate(base)) { + return __toHogDate(newDt.getUTCFullYear(), newDt.getUTCMonth() + 1, newDt.getUTCDate()); + } else { + return __toHogDateTime(newDt.getTime() / 1000, zone); + } +}""", + ["__isHogDate", "__isHogDateTime", "__toHogDate", "__toHogDateTime"], + ], + "JSONExtractArrayRaw": [ + """function JSONExtractArrayRaw(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + return Array.isArray(val) ? val : null; +}""", + ["__getNestedValue"], + ], + "JSONExtractFloat": [ + """function JSONExtractFloat(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + const f = parseFloat(val); + return isNaN(f) ? null : f; +}""", + ["__getNestedValue"], + ], + "JSONExtractInt": [ + """function JSONExtractInt(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + const i = parseInt(val); + return isNaN(i) ? null : i; +}""", + ["__getNestedValue"], + ], + "JSONExtractString": [ + """function JSONExtractString(obj, ...path) { + try { + if (typeof obj === 'string') { obj = JSON.parse(obj); } + } catch (e) { return null; } + const val = __getNestedValue(obj, path, true); + return val != null ? String(val) : null; +}""", + ["__getNestedValue"], + ], + "addDays": [ + """function addDays(dateOrDt, days) { + const interval = __toHogInterval(days, 'day'); + return __applyIntervalToDateTime(dateOrDt, interval); +}""", + ["__toHogInterval", "__applyIntervalToDateTime"], + ], + "assumeNotNull": [ + """function assumeNotNull(value) { + if (value === null || value === undefined) { + throw new Error("Value is null in assumeNotNull"); + } + return value; +}""", + [], + ], + "coalesce": [ + """function coalesce(...args) { + for (let a of args) { + if (a !== null && a !== undefined) return a; + } + return null; +}""", + [], + ], + "dateAdd": [ + """function dateAdd(unit, amount, datetime) { + // transform unit if needed (week -> day, year -> month) + if (unit === 'week') { + unit = 'day'; + amount = amount * 7; + } else if (unit === 'year') { + unit = 'month'; + amount = amount * 12; + } + const interval = __toHogInterval(amount, unit); + return __applyIntervalToDateTime(datetime, interval); +}""", + ["__toHogInterval", "__applyIntervalToDateTime"], + ], + "dateDiff": [ + """function dateDiff(unit, startVal, endVal) { + function toDateTime(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else if (__isHogDate(obj)) { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } else { + return new Date(obj); + } + } + const start = toDateTime(startVal); + const end = toDateTime(endVal); + const diffMs = end - start; + const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24)); + if (unit === 'day') { + return diffDays; + } else if (unit === 'hour') { + return Math.floor(diffMs / (1000 * 60 * 60)); + } else if (unit === 'minute') { + return Math.floor(diffMs / (1000 * 60)); + } else if (unit === 'second') { + return Math.floor(diffMs / 1000); + } else if (unit === 'week') { + return Math.floor(diffDays / 7); + } else if (unit === 'month') { + // Approx months difference + const sy = start.getUTCFullYear(); + const sm = start.getUTCMonth() + 1; + const ey = end.getUTCFullYear(); + const em = end.getUTCMonth() + 1; + return (ey - sy)*12 + (em - sm); + } else if (unit === 'year') { + return end.getUTCFullYear() - start.getUTCFullYear(); + } else { + throw new Error("Unsupported unit for dateDiff: " + unit); + } +}""", + ["__isHogDateTime", "__isHogDate"], + ], + "dateTrunc": [ + """function dateTrunc(unit, val) { + if (!__isHogDateTime(val)) { + throw new Error('Expected a DateTime for dateTrunc'); + } + const zone = val.zone || 'UTC'; + const date = new Date(val.dt * 1000); + let year = date.getUTCFullYear(); + let month = date.getUTCMonth(); + let day = date.getUTCDate(); + let hour = date.getUTCHours(); + let minute = date.getUTCMinutes(); + let second = 0; + let ms = 0; + + if (unit === 'year') { + month = 0; day = 1; hour = 0; minute = 0; second = 0; + } else if (unit === 'month') { + day = 1; hour = 0; minute = 0; second = 0; + } else if (unit === 'day') { + hour = 0; minute = 0; second = 0; + } else if (unit === 'hour') { + minute = 0; second = 0; + } else if (unit === 'minute') { + second = 0; + } else { + throw new Error("Unsupported unit for dateTrunc: " + unit); + } + + const truncated = new Date(Date.UTC(year, month, day, hour, minute, second, ms)); + return { __hogDateTime__: true, dt: truncated.getTime()/1000, zone: zone }; +}""", + ["__isHogDateTime"], + ], + "equals": [ + """function equals(a, b) { return a === b }""", + [], + ], + "extract": [ + """function extract(part, val) { + function toDate(obj) { + if (__isHogDateTime(obj)) { + return new Date(obj.dt * 1000); + } else if (__isHogDate(obj)) { + return new Date(Date.UTC(obj.year, obj.month - 1, obj.day)); + } else { + return new Date(obj); + } + } + const date = toDate(val); + if (part === 'year') return date.getUTCFullYear(); + else if (part === 'month') return date.getUTCMonth() + 1; + else if (part === 'day') return date.getUTCDate(); + else if (part === 'hour') return date.getUTCHours(); + else if (part === 'minute') return date.getUTCMinutes(); + else if (part === 'second') return date.getUTCSeconds(); + else throw new Error("Unknown extract part: " + part); +}""", + ["__isHogDateTime", "__isHogDate"], + ], + "floor": [ + "function floor(a) { return Math.floor(a) }", + [], + ], + "greater": [ + "function greater(a, b) { return a > b }", + [], + ], + "greaterOrEquals": [ + "function greaterOrEquals(a, b) { return a >= b }", + [], + ], + "if": [ + "function __x_if(condition, thenVal, elseVal) { return condition ? thenVal : elseVal }", + [], + ], + "in": [ + """function __x_in(val, arr) { + if (Array.isArray(arr) || (arr && arr.__isHogTuple)) { + return arr.includes(val); + } + return false; +}""", + [], + ], + "less": [ + "function less(a, b) { return a < b }", + [], + ], + "lessOrEquals": [ + "function lessOrEquals(a, b) { return a <= b }", + [], + ], + "min2": [ + "function min2(a, b) { return a < b ? a : b }", + [], + ], + "minus": [ + "function minus(a, b) { return a - b }", + [], + ], + "multiIf": [ + """function multiIf(...args) { + // multiIf(cond1,val1, cond2,val2, ..., default) + const defaultVal = args[args.length-1]; + const pairs = args.slice(0, -1); + for (let i=0; ii); + } else { + const start = args[0]; + const end = args[1]; + return Array.from({length:end - start}, (_,i)=>start+i); + } +}""", + [], + ], + "round": [ + "function round(a) { return Math.round(a) }", + [], + ], + "startsWith": [ + """function startsWith(str, prefix) { + return typeof str === 'string' && typeof prefix === 'string' && str.startsWith(prefix); +}""", + [], + ], + "substring": [ + """function substring(s, start, length) { + if (typeof s !== 'string') return ''; + const startIdx = start - 1; + if (startIdx < 0 || length < 0) return ''; + const endIdx = startIdx + length; + return startIdx < s.length ? s.slice(startIdx, endIdx) : ''; +}""", + [], + ], + "toIntervalDay": [ + """function toIntervalDay(val) { return __toHogInterval(val, 'day') }""", + ["__toHogInterval"], + ], + "toIntervalHour": [ + """function toIntervalHour(val) { return __toHogInterval(val, 'hour') }""", + ["__toHogInterval"], + ], + "toIntervalMinute": [ + """function toIntervalMinute(val) { return __toHogInterval(val, 'minute') }""", + ["__toHogInterval"], + ], + "toIntervalMonth": [ + """function toIntervalMonth(val) { return __toHogInterval(val, 'month') }""", + ["__toHogInterval"], + ], + "toMonth": [ + "function toMonth(value) { return extract('month', value) }", + ["extract"], + ], + "toStartOfDay": [ + """function toStartOfDay(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime for toStartOfDay'); + } + if (__isHogDate(value)) { + value = __toHogDateTime(Date.UTC(value.year, value.month-1, value.day)/1000, 'UTC'); + } + return dateTrunc('day', value); +}""", + ["__isHogDateTime", "__isHogDate", "__toHogDateTime", "dateTrunc"], + ], + "toStartOfHour": [ + """function toStartOfHour(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime for toStartOfHour'); + } + if (__isHogDate(value)) { + value = __toHogDateTime(Date.UTC(value.year, value.month-1, value.day)/1000, 'UTC'); + } + return dateTrunc('hour', value); +}""", + ["__isHogDateTime", "__isHogDate", "__toHogDateTime", "dateTrunc"], + ], + "toStartOfMonth": [ + """function toStartOfMonth(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime'); + } + if (__isHogDate(value)) { + value = __toHogDateTime(Date.UTC(value.year, value.month-1, value.day)/1000, 'UTC'); + } + return dateTrunc('month', value); +}""", + ["__isHogDateTime", "__isHogDate", "__toHogDateTime", "dateTrunc"], + ], + "toStartOfWeek": [ + """function toStartOfWeek(value) { + if (!__isHogDateTime(value) && !__isHogDate(value)) { + throw new Error('Expected HogDate or HogDateTime'); + } + let d; + if (__isHogDate(value)) { + d = new Date(Date.UTC(value.year, value.month - 1, value.day)); + } else { + d = new Date(value.dt * 1000); + } + // Monday=1,... Sunday=7 + // getUTCDay(): Sunday=0,... Saturday=6 + // We want ISO weekday: Monday=1,... Sunday=7 + let dayOfWeek = d.getUTCDay(); // Sunday=0,... + let isoWeekday = dayOfWeek === 0 ? 7 : dayOfWeek; + + // subtract isoWeekday-1 days + const start = new Date(d.getTime() - (isoWeekday - 1) * 24 * 3600 * 1000); + + // Zero out hours, minutes, seconds, ms + start.setUTCHours(0, 0, 0, 0); + + return { __hogDateTime__: true, dt: start.getTime() / 1000, zone: (__isHogDateTime(value) ? value.zone : 'UTC') }; +}""", + ["__isHogDateTime", "__isHogDate"], + ], + "toYYYYMM": [ + """function toYYYYMM(value) { + const y = extract('year', value); + const m = extract('month', value); + return y*100 + m; +}""", + ["extract"], + ], + "toYear": [ + "function toYear(value) { return extract('year', value) }", + ["extract"], + ], + "today": [ + """function today() { + const now = new Date(); + return __toHogDate(now.getUTCFullYear(), now.getUTCMonth()+1, now.getUTCDate()); +}""", + ["__toHogDate"], + ], } diff --git a/posthog/hogql/compiler/test/test_javascript.py b/posthog/hogql/compiler/test/test_javascript.py index c23707701c4ff..c8ca00fdbc39a 100644 --- a/posthog/hogql/compiler/test/test_javascript.py +++ b/posthog/hogql/compiler/test/test_javascript.py @@ -148,8 +148,8 @@ def test_javascript_create_2(self): self.assertEqual(to_js_expr("1 not in 2"), "(!2.includes(1))") self.assertEqual(to_js_expr("match('test', 'e.*')"), 'match("test", "e.*")') self.assertEqual(to_js_expr("not('test')"), '(!"test")') - self.assertEqual(to_js_expr("or('test', 'test2')"), '("test" || "test2")') - self.assertEqual(to_js_expr("and('test', 'test2')"), '("test" && "test2")') + self.assertEqual(to_js_expr("or('test', 'test2')"), '!!("test" || "test2")') + self.assertEqual(to_js_expr("and('test', 'test2')"), '!!("test" && "test2")') def test_javascript_code_generation(self): js_code = to_js_program(""" diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 69ebf975fe312..dd8ffc8a377a9 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -53,7 +53,6 @@ from posthog.hogql.database.schema.person_distinct_ids import ( PersonDistinctIdsTable, RawPersonDistinctIdsTable, - join_data_warehouse_experiment_table_with_person_distinct_ids_table, ) from posthog.hogql.database.schema.persons import ( PersonsTable, @@ -458,14 +457,6 @@ def define_mappings(warehouse: dict[str, Table], get_table: Callable): ), ) - if "events" in join.joining_table_name and join.configuration.get("experiments_optimized"): - source_table.fields["pdi"] = LazyJoin( - from_field=from_field, - join_table=PersonDistinctIdsTable(), - join_function=join_data_warehouse_experiment_table_with_person_distinct_ids_table, - ) - source_table.fields["person"] = FieldTraverser(chain=["pdi", "person"]) - if join.source_table_name == "persons": person_field = database.events.fields["person"] if isinstance(person_field, ast.FieldTraverser): diff --git a/posthog/hogql/database/s3_table.py b/posthog/hogql/database/s3_table.py index e5136bc2348cf..479969ae93bd1 100644 --- a/posthog/hogql/database/s3_table.py +++ b/posthog/hogql/database/s3_table.py @@ -1,5 +1,5 @@ import re -from typing import Optional +from typing import TYPE_CHECKING, Optional from posthog.clickhouse.client.escape import substitute_params from posthog.hogql.context import HogQLContext @@ -7,6 +7,9 @@ from posthog.hogql.errors import ExposedHogQLError from posthog.hogql.escape_sql import escape_hogql_identifier +if TYPE_CHECKING: + from posthog.warehouse.models import ExternalDataJob + def build_function_call( url: str, @@ -15,7 +18,10 @@ def build_function_call( access_secret: Optional[str] = None, structure: Optional[str] = None, context: Optional[HogQLContext] = None, + pipeline_version: Optional["ExternalDataJob.PipelineVersion"] = None, ) -> str: + from posthog.warehouse.models import ExternalDataJob + raw_params: dict[str, str] = {} def add_param(value: str, is_sensitive: bool = True) -> str: @@ -36,10 +42,18 @@ def return_expr(expr: str) -> str: # DeltaS3Wrapper format if format == "DeltaS3Wrapper": + query_folder = "__query_v2" if pipeline_version == ExternalDataJob.PipelineVersion.V2 else "__query" + if url.endswith("/"): - escaped_url = add_param(f"{url[:len(url) - 1]}__query/*.parquet") + if pipeline_version == ExternalDataJob.PipelineVersion.V2: + escaped_url = add_param(f"{url[:-5]}{query_folder}/*.parquet") + else: + escaped_url = add_param(f"{url[:-1]}{query_folder}/*.parquet") else: - escaped_url = add_param(f"{url}__query/*.parquet") + if pipeline_version == ExternalDataJob.PipelineVersion.V2: + escaped_url = add_param(f"{url[:-4]}{query_folder}/*.parquet") + else: + escaped_url = add_param(f"{url}{query_folder}/*.parquet") if structure: escaped_structure = add_param(structure, False) diff --git a/posthog/hogql/database/schema/person_distinct_ids.py b/posthog/hogql/database/schema/person_distinct_ids.py index 2fecbae4f5960..9ebfd9e17fde1 100644 --- a/posthog/hogql/database/schema/person_distinct_ids.py +++ b/posthog/hogql/database/schema/person_distinct_ids.py @@ -67,29 +67,6 @@ def join_with_person_distinct_ids_table( return join_expr -def join_data_warehouse_experiment_table_with_person_distinct_ids_table( - join_to_add: LazyJoinToAdd, - context: HogQLContext, - node: SelectQuery, -): - from posthog.hogql import ast - - if not join_to_add.fields_accessed: - raise ResolutionError("No fields requested from person_distinct_ids") - join_expr = ast.JoinExpr(table=select_from_person_distinct_ids_table(join_to_add.fields_accessed)) - join_expr.join_type = "LEFT JOIN" - join_expr.alias = join_to_add.to_table - join_expr.constraint = ast.JoinConstraint( - expr=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, - left=ast.Field(chain=[join_to_add.from_table, *join_to_add.lazy_join.from_field]), - right=ast.Field(chain=[join_to_add.to_table, "distinct_id"]), - ), - constraint_type="ON", - ) - return join_expr - - class RawPersonDistinctIdsTable(Table): fields: dict[str, FieldOrTable] = { **PERSON_DISTINCT_IDS_FIELDS, diff --git a/posthog/hogql_queries/ai/event_taxonomy_query_runner.py b/posthog/hogql_queries/ai/event_taxonomy_query_runner.py index c00f2a4f771c4..2b72e985b3a59 100644 --- a/posthog/hogql_queries/ai/event_taxonomy_query_runner.py +++ b/posthog/hogql_queries/ai/event_taxonomy_query_runner.py @@ -50,8 +50,9 @@ def calculate(self): ) def to_query(self) -> ast.SelectQuery | ast.SelectSetQuery: - query = parse_select( - """ + if not self.query.properties: + return parse_select( + """ SELECT key, -- Pick five latest distinct sample values. @@ -64,10 +65,23 @@ def to_query(self) -> ast.SelectQuery | ast.SelectSetQuery: ORDER BY total_count DESC LIMIT 500 """, - placeholders={"from_query": self._get_subquery(), "filter": self._get_omit_filter()}, - ) + placeholders={"from_query": self._get_subquery(), "filter": self._get_omit_filter()}, + ) - return query + return parse_select( + """ + SELECT + key, + arraySlice(arrayDistinct(groupArray(value)), 1, 5) AS values, + count(DISTINCT value) AS total_count + FROM {from_query} + GROUP BY key + LIMIT 500 + """, + placeholders={ + "from_query": self._get_subquery(), + }, + ) def _get_omit_filter(self): """ @@ -107,21 +121,72 @@ def _get_omit_filter(self): def _get_subquery_filter(self) -> ast.Expr: date_filter = parse_expr("timestamp >= now() - INTERVAL 30 DAY") - filter_expr = ast.And( - exprs=[ - date_filter, - ast.CompareOperation( - left=ast.Field(chain=["event"]), - right=ast.Constant(value=self.query.event), - op=ast.CompareOperationOp.Eq, - ), - ] - ) - return filter_expr + filter_expr: list[ast.Expr] = [ + date_filter, + ast.CompareOperation( + left=ast.Field(chain=["event"]), + right=ast.Constant(value=self.query.event), + op=ast.CompareOperationOp.Eq, + ), + ] + + if self.query.properties: + filter_expr.append( + ast.Or( + exprs=[ + ast.CompareOperation( + left=ast.Field(chain=["properties", prop]), + op=ast.CompareOperationOp.NotEq, + right=ast.Constant(value=""), + ) + for prop in self.query.properties + ] + ) + ) + + return ast.And(exprs=filter_expr) def _get_subquery(self) -> ast.SelectQuery: - query = parse_select( - """ + if self.query.properties: + query = parse_select( + """ + SELECT + key, + value, + count() as count + FROM ( + SELECT + {props} as kv + FROM + events + WHERE {filter} + ) + ARRAY JOIN kv.1 AS key, kv.2 AS value + WHERE value != '' + GROUP BY key, value + ORDER BY count DESC + """, + placeholders={ + "props": ast.Array( + exprs=[ + ast.Tuple( + exprs=[ + ast.Constant(value=prop), + ast.Call( + name="JSONExtractString", + args=[ast.Field(chain=["properties"]), ast.Constant(value=prop)], + ), + ] + ) + for prop in self.query.properties + ] + ), + "filter": self._get_subquery_filter(), + }, + ) + else: + query = parse_select( + """ SELECT JSONExtractKeysAndValues(properties, 'String') as kv FROM @@ -130,7 +195,7 @@ def _get_subquery(self) -> ast.SelectQuery: ORDER BY timestamp desc LIMIT 100 """, - placeholders={"filter": self._get_subquery_filter()}, - ) + placeholders={"filter": self._get_subquery_filter()}, + ) return cast(ast.SelectQuery, query) diff --git a/posthog/hogql_queries/ai/test/test_event_taxonomy_query_runner.py b/posthog/hogql_queries/ai/test/test_event_taxonomy_query_runner.py index 539033b40b21b..147bf5bcfdc14 100644 --- a/posthog/hogql_queries/ai/test/test_event_taxonomy_query_runner.py +++ b/posthog/hogql_queries/ai/test/test_event_taxonomy_query_runner.py @@ -251,3 +251,163 @@ def test_limit(self): response = EventTaxonomyQueryRunner(team=self.team, query=EventTaxonomyQuery(event="event1")).calculate() self.assertEqual(len(response.results), 500) + + def test_property_taxonomy_returns_unique_values_for_specified_property(self): + _create_person( + distinct_ids=["person1"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + _create_person( + distinct_ids=["person2"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "us.posthog.com"}, + team=self.team, + ) + + for _ in range(10): + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "posthog.com"}, + team=self.team, + ) + + for _ in range(3): + _create_event( + event="event1", + distinct_id="person2", + properties={"$host": "eu.posthog.com"}, + team=self.team, + ) + + response = EventTaxonomyQueryRunner( + team=self.team, query=EventTaxonomyQuery(event="event1", properties=["$host"]) + ).calculate() + self.assertEqual(len(response.results), 1) + self.assertEqual(response.results[0].property, "$host") + self.assertEqual(response.results[0].sample_values, ["posthog.com", "eu.posthog.com", "us.posthog.com"]) + self.assertEqual(response.results[0].sample_count, 3) + + def test_property_taxonomy_filters_events_by_event_name(self): + _create_person( + distinct_ids=["person1"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + _create_person( + distinct_ids=["person2"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "us.posthog.com", "$browser": "Chrome"}, + team=self.team, + ) + + for _ in range(10): + _create_event( + event="event2", + distinct_id="person1", + properties={"$host": "posthog.com", "prop": 10}, + team=self.team, + ) + + for _ in range(3): + _create_event( + event="event1", + distinct_id="person2", + team=self.team, + ) + + response = EventTaxonomyQueryRunner( + team=self.team, query=EventTaxonomyQuery(event="event1", properties=["$host"]) + ).calculate() + self.assertEqual(len(response.results), 1) + self.assertEqual(response.results[0].property, "$host") + self.assertEqual(response.results[0].sample_values, ["us.posthog.com"]) + self.assertEqual(response.results[0].sample_count, 1) + + def test_property_taxonomy_handles_multiple_properties_in_query(self): + _create_person( + distinct_ids=["person1"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + _create_person( + distinct_ids=["person2"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "us.posthog.com", "$browser": "Chrome"}, + team=self.team, + ) + + for _ in range(5): + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "posthog.com", "prop": 10}, + team=self.team, + ) + + for _ in range(3): + _create_event( + event="event1", + distinct_id="person2", + team=self.team, + ) + + response = EventTaxonomyQueryRunner( + team=self.team, query=EventTaxonomyQuery(event="event1", properties=["$host", "prop"]) + ).calculate() + self.assertEqual(len(response.results), 2) + self.assertEqual(response.results[0].property, "prop") + self.assertEqual(response.results[0].sample_values, ["10"]) + self.assertEqual(response.results[0].sample_count, 1) + self.assertEqual(response.results[1].property, "$host") + self.assertEqual(response.results[1].sample_values, ["posthog.com", "us.posthog.com"]) + self.assertEqual(response.results[1].sample_count, 2) + + def test_property_taxonomy_includes_events_with_partial_property_matches(self): + _create_person( + distinct_ids=["person1"], + properties={"email": "person1@example.com"}, + team=self.team, + ) + _create_event( + event="event1", + distinct_id="person1", + properties={"$host": "us.posthog.com"}, + team=self.team, + ) + _create_event( + event="event1", + distinct_id="person2", + properties={"prop": 10}, + team=self.team, + ) + + response = EventTaxonomyQueryRunner( + team=self.team, query=EventTaxonomyQuery(event="event1", properties=["$host", "prop"]) + ).calculate() + self.assertEqual(len(response.results), 2) + self.assertEqual(response.results[0].property, "prop") + self.assertEqual(response.results[0].sample_values, ["10"]) + self.assertEqual(response.results[0].sample_count, 1) + self.assertEqual(response.results[1].property, "$host") + self.assertEqual(response.results[1].sample_values, ["us.posthog.com"]) + self.assertEqual(response.results[1].sample_count, 1) diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py index 9198c2abaef99..d1f6c4905dd93 100644 --- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py @@ -812,7 +812,13 @@ def test_query_runner_with_data_warehouse_series_no_end_date_and_nested_id(self) "value": "@posthog.com", "operator": "not_icontains", "type": "person", - } + }, + { + "key": "$host", + "type": "event", + "value": "^(localhost|127\\.0\\.0\\.1)($|:)", + "operator": "not_regex", + }, ] self.team.save() count_query = TrendsQuery( @@ -938,7 +944,7 @@ def test_query_runner_with_data_warehouse_series_no_end_date_and_nested_id(self) ) # Assert the expected join condition in the clickhouse SQL - expected_join_condition = f"and(equals(events.team_id, {query_runner.count_query_runner.team.id}), equals(event, %(hogql_val_11)s), greaterOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_12)s, 6, %(hogql_val_13)s))), lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_14)s, 6, %(hogql_val_15)s))))) AS e__events ON" + expected_join_condition = f"and(equals(events.team_id, {query_runner.count_query_runner.team.id}), equals(event, %(hogql_val_9)s), greaterOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_10)s, 6, %(hogql_val_11)s))), lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_12)s, 6, %(hogql_val_13)s))))) AS e__events ON" self.assertIn( expected_join_condition, str(response.clickhouse), diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py index a0e1b185ce806..fb4e450908f16 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py @@ -698,7 +698,16 @@ def _events_filter( and len(self.team.test_account_filters) > 0 ): for property in self.team.test_account_filters: - filters.append(property_to_expr(property, self.team)) + if is_data_warehouse_series: + if property["type"] in ("event", "person"): + if property["type"] == "event": + property["key"] = f"events.properties.{property['key']}" + elif property["type"] == "person": + property["key"] = f"events.person.properties.{property['key']}" + property["type"] = "data_warehouse" + filters.append(property_to_expr(property, self.team)) + else: + filters.append(property_to_expr(property, self.team)) # Properties if self.query.properties is not None and self.query.properties != []: diff --git a/posthog/management/commands/start_temporal_worker.py b/posthog/management/commands/start_temporal_worker.py index 706516f3e5643..77701478f2ded 100644 --- a/posthog/management/commands/start_temporal_worker.py +++ b/posthog/management/commands/start_temporal_worker.py @@ -11,6 +11,7 @@ from posthog.constants import ( BATCH_EXPORTS_TASK_QUEUE, DATA_WAREHOUSE_TASK_QUEUE, + DATA_WAREHOUSE_TASK_QUEUE_V2, GENERAL_PURPOSE_TASK_QUEUE, SYNC_BATCH_EXPORTS_TASK_QUEUE, ) @@ -27,12 +28,14 @@ SYNC_BATCH_EXPORTS_TASK_QUEUE: BATCH_EXPORTS_WORKFLOWS, BATCH_EXPORTS_TASK_QUEUE: BATCH_EXPORTS_WORKFLOWS, DATA_WAREHOUSE_TASK_QUEUE: DATA_SYNC_WORKFLOWS + DATA_MODELING_WORKFLOWS, + DATA_WAREHOUSE_TASK_QUEUE_V2: DATA_SYNC_WORKFLOWS + DATA_MODELING_WORKFLOWS, GENERAL_PURPOSE_TASK_QUEUE: PROXY_SERVICE_WORKFLOWS, } ACTIVITIES_DICT = { SYNC_BATCH_EXPORTS_TASK_QUEUE: BATCH_EXPORTS_ACTIVITIES, BATCH_EXPORTS_TASK_QUEUE: BATCH_EXPORTS_ACTIVITIES, DATA_WAREHOUSE_TASK_QUEUE: DATA_SYNC_ACTIVITIES + DATA_MODELING_ACTIVITIES, + DATA_WAREHOUSE_TASK_QUEUE_V2: DATA_SYNC_ACTIVITIES + DATA_MODELING_ACTIVITIES, GENERAL_PURPOSE_TASK_QUEUE: PROXY_SERVICE_ACTIVITIES, } diff --git a/posthog/middleware.py b/posthog/middleware.py index af7b6768d9c54..3bba0124f8ecd 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -278,13 +278,10 @@ def can_switch_to_team(self, new_team: Team, request: HttpRequest): # :KLUDGE: This is more inefficient than needed, doing several expensive lookups # However this should be a rare operation! - if not user_access_control.check_access_level_for_object(new_team, "member"): - # Do something to indicate that they don't have access to the team... - return False - - # :KLUDGE: This is more inefficient than needed, doing several expensive lookups - # However this should be a rare operation! - if user_permissions.team(new_team).effective_membership_level is None: + if ( + not user_access_control.check_access_level_for_object(new_team, "member") + and user_permissions.team(new_team).effective_membership_level is None + ): if user.is_staff: # Staff users get a popup with suggested users to log in as, facilating support request.suggested_users_with_access = UserBasicSerializer( # type: ignore diff --git a/posthog/migrations/0533_externaldatajob_pipeline_version.py b/posthog/migrations/0533_externaldatajob_pipeline_version.py new file mode 100644 index 0000000000000..e5db9a99a6d8d --- /dev/null +++ b/posthog/migrations/0533_externaldatajob_pipeline_version.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.15 on 2024-11-23 14:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0532_taxonomy_unique_on_project"), + ] + + operations = [ + migrations.AddField( + model_name="externaldatajob", + name="pipeline_version", + field=models.CharField( + blank=True, + choices=[("v1-dlt-sync", "v1-dlt-sync"), ("v2-non-dlt", "v2-non-dlt")], + max_length=400, + null=True, + ), + ), + migrations.RunSQL( + """ + UPDATE posthog_externaldatajob + SET pipeline_version = 'v1-dlt-sync' + WHERE pipeline_version is null + """, + reverse_sql=migrations.RunSQL.noop, + ), + ] diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt index 01fc03d62a8a0..44547aebb012e 100644 --- a/posthog/migrations/max_migration.txt +++ b/posthog/migrations/max_migration.txt @@ -1 +1 @@ -0532_taxonomy_unique_on_project +0533_externaldatajob_pipeline_version diff --git a/posthog/models/remote_config.py b/posthog/models/remote_config.py index 17e6d45a40a8d..aa4ed7c59d379 100644 --- a/posthog/models/remote_config.py +++ b/posthog/models/remote_config.py @@ -1,7 +1,6 @@ import json import os from typing import Any, Optional -from collections.abc import Callable from django.conf import settings from django.db import models from django.http import HttpRequest @@ -69,8 +68,8 @@ def indent_js(js_content: str, indent: int = 4) -> str: return joined -def cache_key_for_team_token(team_token: str, suffix: str) -> str: - return f"remote_config/{team_token}/{suffix}" +def cache_key_for_team_token(team_token: str) -> str: + return f"remote_config/{team_token}/config" def sanitize_config_for_public_cdn(config: dict, request: Optional[HttpRequest] = None) -> dict: @@ -267,7 +266,7 @@ def _build_site_apps_js(self): ) site_functions = ( HogFunction.objects.select_related("team") - .filter(team=self.team, enabled=True, type__in=("site_destination", "site_app")) + .filter(team=self.team, enabled=True, deleted=False, type__in=("site_destination", "site_app")) .all() ) @@ -291,10 +290,8 @@ def _build_site_apps_js(self): return site_apps_js + site_functions_js @classmethod - def _get_via_cache( - cls, token: str, suffix: str, fn: Callable[["RemoteConfig"], dict | str], timeout: int = CACHE_TIMEOUT - ) -> Any: - key = cache_key_for_team_token(token, suffix) + def _get_config_via_cache(cls, token: str) -> dict: + key = cache_key_for_team_token(token) data = cache.get(key) if data == "404": @@ -309,25 +306,25 @@ def _get_via_cache( try: remote_config = cls.objects.select_related("team").get(team__api_token=token) except cls.DoesNotExist: - cache.set(key, "404", timeout=timeout) + cache.set(key, "404", timeout=CACHE_TIMEOUT) REMOTE_CONFIG_CACHE_COUNTER.labels(result="miss_but_missing").inc() raise - data = fn(remote_config) - cache.set(key, data, timeout=timeout) + data = remote_config.build_config() + cache.set(key, data, timeout=CACHE_TIMEOUT) return data @classmethod def get_config_via_token(cls, token: str, request: Optional[HttpRequest] = None) -> dict: - config = cls._get_via_cache(token, "config", lambda remote_config: remote_config.build_config()) + config = cls._get_config_via_cache(token) config = sanitize_config_for_public_cdn(config, request=request) return config @classmethod def get_config_js_via_token(cls, token: str, request: Optional[HttpRequest] = None) -> str: - config = cls._get_via_cache(token, "config", lambda remote_config: remote_config.build_config()) + config = cls._get_config_via_cache(token) # Get the site apps JS so we can render it in the JS site_apps_js = config.pop("siteAppsJS", None) # We don't want to include the minimal site apps content as we have the JS now @@ -352,7 +349,7 @@ def get_array_js_via_token(cls, token: str, request: Optional[HttpRequest] = Non return f"""{get_array_js_content()}\n\n{js_content}""" - def sync(self): + def sync(self, force: bool = False): """ When called we sync to any configured CDNs as well as redis for the /decide endpoint """ @@ -361,16 +358,21 @@ def sync(self): try: config = self.build_config() - self.config = config - - cache.set(cache_key_for_team_token(self.team.api_token, "config"), config, timeout=CACHE_TIMEOUT) - self._purge_cdn() + if not force and config == self.config: + CELERY_TASK_REMOTE_CONFIG_SYNC.labels(result="no_changes").inc() + logger.info(f"RemoteConfig for team {self.team_id} is unchanged") + return - # TODO: Invalidate caches - in particular this will be the Cloudflare CDN cache + self.config = config self.synced_at = timezone.now() self.save() + # Update the redis cache key for the config + cache.set(cache_key_for_team_token(self.team.api_token), config, timeout=CACHE_TIMEOUT) + # Invalidate Cloudflare CDN cache + self._purge_cdn() + CELERY_TASK_REMOTE_CONFIG_SYNC.labels(result="success").inc() except Exception as e: capture_exception(e) diff --git a/posthog/models/test/test_hog_function.py b/posthog/models/test/test_hog_function.py index 49d0ea9f27564..45f1bb25c680e 100644 --- a/posthog/models/test/test_hog_function.py +++ b/posthog/models/test/test_hog_function.py @@ -275,8 +275,8 @@ def test_hog_functions_reload_on_team_saved(self): {"key": "$pageview", "operator": "regex", "value": "test"}, ] # 1 update team, 1 load hog functions, 1 update hog functions - # 8 unrelated due to RemoteConfig refresh - with self.assertNumQueries(3 + 8): + # 7 unrelated due to RemoteConfig refresh + with self.assertNumQueries(3 + 7): self.team.save() hog_function_1.refresh_from_db() hog_function_2.refresh_from_db() diff --git a/posthog/models/test/test_remote_config.py b/posthog/models/test/test_remote_config.py index e90e3a9edd580..52bfc71821a79 100644 --- a/posthog/models/test/test_remote_config.py +++ b/posthog/models/test/test_remote_config.py @@ -280,7 +280,7 @@ def setUp(self): super().setUp() self.remote_config.refresh_from_db() # Clear the cache so we are properly testing each flow - assert cache.delete(cache_key_for_team_token(self.team.api_token, "config")) + assert cache.delete(cache_key_for_team_token(self.team.api_token)) def _assert_matches_config(self, data): assert data == snapshot( @@ -348,10 +348,15 @@ def test_syncs_if_changes(self): self.remote_config.sync() assert synced_at < self.remote_config.synced_at # type: ignore + def test_does_not_syncs_if_no_changes(self): + synced_at = self.remote_config.synced_at + self.remote_config.sync() + assert synced_at == self.remote_config.synced_at + def test_persists_data_to_redis_on_sync(self): self.remote_config.config["surveys"] = True self.remote_config.sync() - assert cache.get(cache_key_for_team_token(self.team.api_token, "config")) + assert cache.get(cache_key_for_team_token(self.team.api_token)) def test_gets_via_redis_cache(self): with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): @@ -453,6 +458,8 @@ def test_purges_cdn_cache_on_sync(self, mock_post): REMOTE_CONFIG_CDN_PURGE_TOKEN="MY_TOKEN", REMOTE_CONFIG_CDN_PURGE_DOMAINS=["cdn.posthog.com", "https://cdn2.posthog.com"], ): + # Force a change to the config + self.remote_config.config["token"] = "NOT" self.remote_config.sync() mock_post.assert_called_once_with( "https://api.cloudflare.com/client/v4/zones/MY_ZONE_ID/purge_cache", @@ -740,9 +747,13 @@ def test_renders_js_including_site_functions(self): callback(true); } - return { - processEvent: (globals) => processEvent(globals, posthog) + const response = {} + + if (processEvent) { + response.processEvent = (globals) => processEvent(globals, posthog) } + + return response } return { init: init }; @@ -786,9 +797,13 @@ def test_renders_js_including_site_functions(self): callback(true); } - return { - processEvent: (globals) => processEvent(globals, posthog) + const response = {} + + if (processEvent) { + response.processEvent = (globals) => processEvent(globals, posthog) } + + return response } return { init: init }; @@ -798,3 +813,25 @@ def test_renders_js_including_site_functions(self): })();\ """ # noqa: W291, W293 ) + + def test_removes_deleted_site_functions(self): + site_destination = HogFunction.objects.create( + name="Site destination", + type=HogFunctionType.SITE_DESTINATION, + team=self.team, + enabled=True, + filters={ + "events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], + "filter_test_accounts": True, + }, + ) + + js = self.remote_config.get_config_js_via_token(self.team.api_token) + + assert str(site_destination.id) in js + + site_destination.deleted = True + site_destination.save() + + js = self.remote_config.get_config_js_via_token(self.team.api_token) + assert str(site_destination.id) not in js diff --git a/posthog/schema.py b/posthog/schema.py index 003c22eac087b..564dcc321fa60 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -5582,6 +5582,7 @@ class EventTaxonomyQuery(BaseModel): modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) + properties: Optional[list[str]] = None response: Optional[EventTaxonomyQueryResponse] = None diff --git a/posthog/tasks/periodic_digest.py b/posthog/tasks/periodic_digest.py index c0b6995b1e91f..0c8161c1be9b2 100644 --- a/posthog/tasks/periodic_digest.py +++ b/posthog/tasks/periodic_digest.py @@ -212,7 +212,7 @@ def send_periodic_digest_report( full_report_dict = { "team_id": team_id, "team_name": team_name, - "template": "periodic_digest_report", + "template_name": "periodic_digest_report", "digest_items_with_data": digest_items_with_data, **periodic_digest_report, **instance_metadata, diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index 563d97146fefe..059600098cd50 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -483,7 +483,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1113,7 +1114,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1590,7 +1592,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/tasks/test/test_periodic_digest.py b/posthog/tasks/test/test_periodic_digest.py index 4d495eac6aa8d..a4fed64b0b8f6 100644 --- a/posthog/tasks/test/test_periodic_digest.py +++ b/posthog/tasks/test/test_periodic_digest.py @@ -113,7 +113,7 @@ def test_periodic_digest_report(self, mock_capture: MagicMock) -> None: expected_properties = { "team_id": self.team.id, "team_name": self.team.name, - "template": "periodic_digest_report", + "template_name": "periodic_digest_report", "users_who_logged_in": [], "users_who_logged_in_count": 0, "users_who_signed_up": [], @@ -227,7 +227,7 @@ def test_periodic_digest_report_custom_dates(self, mock_capture: MagicMock) -> N expected_properties = { "team_id": self.team.id, "team_name": self.team.name, - "template": "periodic_digest_report", + "template_name": "periodic_digest_report", "users_who_logged_in": [], "users_who_logged_in_count": 0, "users_who_signed_up": [], diff --git a/posthog/tasks/test/test_remote_config.py b/posthog/tasks/test/test_remote_config.py index 0f6a8500d4a7b..63a9fa9cf3c14 100644 --- a/posthog/tasks/test/test_remote_config.py +++ b/posthog/tasks/test/test_remote_config.py @@ -12,20 +12,42 @@ def setUp(self) -> None: organization=self.organization, name="Test project", ) - self.other_team = team + self.other_team_1 = team + + project, team = Project.objects.create_with_team( + initiating_user=self.user, + organization=self.organization, + name="Test project 2", + ) + self.other_team_2 = team def test_sync_task_syncs_all_remote_configs(self) -> None: # Delete one teams config - RemoteConfig.objects.get(team=self.other_team).delete() + remote_config_deleted = RemoteConfig.objects.get(team=self.team) + remote_config_deleted_synced_at = remote_config_deleted.synced_at + remote_config_deleted.delete() + configs = RemoteConfig.objects.all() - assert len(configs) == 1 + assert len(configs) == 2 - last_synced_at = RemoteConfig.objects.get(team=self.team).synced_at + # Modify the other team's config (indicate something didn't get synced properly) + remote_config_1 = RemoteConfig.objects.get(team=self.other_team_1) + remote_config_1.config["token"] = "MODIFIED" + remote_config_1.save() + remote_config_1_synced_at = remote_config_1.synced_at + + # No modifications to this one + remote_config_2 = RemoteConfig.objects.get(team=self.other_team_2) + remote_config_2_synced_at = remote_config_2.synced_at sync_all_remote_configs() configs = RemoteConfig.objects.all() - assert len(configs) == 2 + assert len(configs) == 3 - assert RemoteConfig.objects.get(team=self.other_team).synced_at > last_synced_at # type: ignore - assert RemoteConfig.objects.get(team=self.team).synced_at > last_synced_at # type: ignore + # This one is deleted so should be synced + assert RemoteConfig.objects.get(team=self.team).synced_at > remote_config_deleted_synced_at # type: ignore + # This one is modified so should be synced + assert RemoteConfig.objects.get(team=self.other_team_1).synced_at > remote_config_1_synced_at # type: ignore + # This one is unchanged so should not be synced + assert RemoteConfig.objects.get(team=self.other_team_2).synced_at == remote_config_2_synced_at diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index a0c14a08a82d0..6215af18f6821 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -43,6 +43,7 @@ _get_team_report, _get_teams_for_usage_reports, capture_event, + capture_report, get_instance_metadata, send_all_org_usage_reports, ) @@ -1313,11 +1314,23 @@ def test_external_data_rows_synced_response( for i in range(5): start_time = (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ") - ExternalDataJob.objects.create(team_id=3, created_at=start_time, rows_synced=10, pipeline=source) + ExternalDataJob.objects.create( + team_id=3, + created_at=start_time, + rows_synced=10, + pipeline=source, + pipeline_version=ExternalDataJob.PipelineVersion.V1, + ) for i in range(5): start_time = (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ") - ExternalDataJob.objects.create(team_id=4, created_at=start_time, rows_synced=10, pipeline=source) + ExternalDataJob.objects.create( + team_id=4, + created_at=start_time, + rows_synced=10, + pipeline=source, + pipeline_version=ExternalDataJob.PipelineVersion.V1, + ) period = get_previous_day(at=now() + relativedelta(days=1)) period_start, period_end = period @@ -1342,6 +1355,64 @@ def test_external_data_rows_synced_response( assert org_2_report["organization_name"] == "Org 2" assert org_2_report["rows_synced_in_period"] == 0 + @patch("posthog.tasks.usage_report.Client") + @patch("posthog.tasks.usage_report.send_report_to_billing_service") + def test_external_data_rows_synced_response_with_v2_jobs( + self, billing_task_mock: MagicMock, posthog_capture_mock: MagicMock + ) -> None: + self._setup_teams() + + source = ExternalDataSource.objects.create( + team=self.analytics_team, + source_id="source_id", + connection_id="connection_id", + status=ExternalDataSource.Status.COMPLETED, + source_type=ExternalDataSource.Type.STRIPE, + ) + + for i in range(5): + start_time = (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ") + ExternalDataJob.objects.create( + team_id=3, + created_at=start_time, + rows_synced=10, + pipeline=source, + pipeline_version=ExternalDataJob.PipelineVersion.V1, + ) + + for i in range(5): + start_time = (now() - relativedelta(hours=i)).strftime("%Y-%m-%dT%H:%M:%SZ") + ExternalDataJob.objects.create( + team_id=4, + created_at=start_time, + rows_synced=10, + pipeline=source, + pipeline_version=ExternalDataJob.PipelineVersion.V2, + ) + + period = get_previous_day(at=now() + relativedelta(days=1)) + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) + + assert len(all_reports) == 3 + + org_1_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period)) + ) + + org_2_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period)) + ) + + assert org_1_report["organization_name"] == "Org 1" + assert org_1_report["rows_synced_in_period"] == 50 + + assert org_1_report["teams"]["3"]["rows_synced_in_period"] == 50 + assert org_1_report["teams"]["4"]["rows_synced_in_period"] == 0 # V2 pipelines + + assert org_2_report["organization_name"] == "Org 2" + assert org_2_report["rows_synced_in_period"] == 0 + @freeze_time("2022-01-10T00:01:00Z") class TestHogFunctionUsageReports(ClickhouseDestroyTablesMixin, TestCase, ClickhouseTestMixin): @@ -1609,6 +1680,52 @@ def test_capture_event_called_with_string_timestamp(self, mock_client: MagicMock ) assert mock_client.capture.call_args[1]["timestamp"] == datetime(2021, 10, 10, 23, 1, tzinfo=tzutc()) + @patch("posthog.tasks.usage_report.Client") + def test_capture_report_transforms_team_id_to_org_id(self, mock_client: MagicMock) -> None: + mock_posthog = MagicMock() + mock_client.return_value = mock_posthog + + # Create a second team in the same organization to verify the mapping + team2 = Team.objects.create(organization=self.organization) + + # Create a report with team-level data + report = { + "organization_name": "Test Org", + "date": "2024-01-01", + } + + with self.is_cloud(True): + # Call capture_report + capture_report(capture_event_name="test event", team_id=team2.id, full_report_dict=report) + + # Verify the capture call was made with the organization ID + mock_posthog.capture.assert_called_once_with( + self.user.distinct_id, + "test event", + {**report, "scope": "user"}, + groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)}, + timestamp=None, + ) + + # now check with send_for_all_members=True + mock_posthog.reset_mock() + + with self.is_cloud(True): + capture_report( + capture_event_name="test event", + team_id=self.team.id, + full_report_dict=report, + send_for_all_members=True, + ) + + mock_posthog.capture.assert_called_once_with( + self.user.distinct_id, + "test event", + {**report, "scope": "user"}, + groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)}, + timestamp=None, + ) + class SendNoUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest): @freeze_time("2021-10-10T23:01:00Z") diff --git a/posthog/tasks/test/test_warehouse.py b/posthog/tasks/test/test_warehouse.py index c6150ef565336..ec7bce8c7832f 100644 --- a/posthog/tasks/test/test_warehouse.py +++ b/posthog/tasks/test/test_warehouse.py @@ -36,7 +36,12 @@ def test_capture_workspace_rows_synced_by_team_month_cutoff(self, mock_get_ph_cl with freeze_time("2023-11-07T16:50:49Z"): job = ExternalDataJob.objects.create( - pipeline=source, workflow_id="fake_workflow_id", team=self.team, status="Running", rows_synced=100000 + pipeline=source, + workflow_id="fake_workflow_id", + team=self.team, + status="Running", + rows_synced=100000, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) capture_workspace_rows_synced_by_team(self.team.pk) @@ -86,12 +91,22 @@ def test_capture_workspace_rows_synced_by_team_month_cutoff_field_set(self, mock with freeze_time("2023-10-30T18:32:41Z"): ExternalDataJob.objects.create( - pipeline=source, workflow_id="fake_workflow_id", team=self.team, status="Completed", rows_synced=97747 + pipeline=source, + workflow_id="fake_workflow_id", + team=self.team, + status="Completed", + rows_synced=97747, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) with freeze_time("2023-11-07T16:50:49Z"): job2 = ExternalDataJob.objects.create( - pipeline=source, workflow_id="fake_workflow_id", team=self.team, status="Completed", rows_synced=93353 + pipeline=source, + workflow_id="fake_workflow_id", + team=self.team, + status="Completed", + rows_synced=93353, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) capture_workspace_rows_synced_by_team(self.team.pk) diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index 41ca2d8c86f63..6e99b069d754d 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -375,10 +375,11 @@ def capture_event( elif team_id: team = Team.objects.get(id=team_id) distinct_ids = [user.distinct_id for user in team.all_users_with_access()] + organization_id = str(team.organization_id) else: if not organization_id: team = Team.objects.get(id=team_id) - organization_id = team.organization_id + organization_id = str(team.organization_id) org_owner = get_org_owner_or_first_user(organization_id) if organization_id else None distinct_ids.append( org_owner.distinct_id if org_owner and org_owner.distinct_id else f"org-{organization_id}" @@ -686,6 +687,7 @@ def get_teams_with_survey_responses_count_in_period( def get_teams_with_rows_synced_in_period(begin: datetime, end: datetime) -> list: return list( ExternalDataJob.objects.filter(created_at__gte=begin, created_at__lte=end) + .exclude(pipeline_version=ExternalDataJob.PipelineVersion.V2) .values("team_id") .annotate(total=Sum("rows_synced")) ) diff --git a/posthog/temporal/batch_exports/__init__.py b/posthog/temporal/batch_exports/__init__.py index a3616f1107c5b..169a19f4232d1 100644 --- a/posthog/temporal/batch_exports/__init__.py +++ b/posthog/temporal/batch_exports/__init__.py @@ -19,6 +19,7 @@ ) from posthog.temporal.batch_exports.monitoring import ( BatchExportMonitoringWorkflow, + check_for_missing_batch_export_runs, get_batch_export, get_event_counts, update_batch_export_runs, @@ -86,4 +87,5 @@ get_batch_export, get_event_counts, update_batch_export_runs, + check_for_missing_batch_export_runs, ] diff --git a/posthog/temporal/batch_exports/monitoring.py b/posthog/temporal/batch_exports/monitoring.py index 97eaf6c2430d9..c41d1076ed1e0 100644 --- a/posthog/temporal/batch_exports/monitoring.py +++ b/posthog/temporal/batch_exports/monitoring.py @@ -7,7 +7,10 @@ from temporalio.common import RetryPolicy from posthog.batch_exports.models import BatchExport -from posthog.batch_exports.service import aupdate_records_total_count +from posthog.batch_exports.service import ( + afetch_batch_export_runs_in_range, + aupdate_records_total_count, +) from posthog.batch_exports.sql import EVENT_COUNT_BY_INTERVAL from posthog.temporal.batch_exports.base import PostHogWorkflow from posthog.temporal.common.clickhouse import get_client @@ -161,14 +164,82 @@ async def update_batch_export_runs(inputs: UpdateBatchExportRunsInputs) -> int: return total_rows_updated +@dataclass +class CheckForMissingBatchExportRunsInputs: + """Inputs for checking missing batch export runs""" + + batch_export_id: UUID + overall_interval_start: str + overall_interval_end: str + interval: str + + +def _log_warning_for_missing_batch_export_runs( + batch_export_id: UUID, missing_runs: list[tuple[dt.datetime, dt.datetime]] +): + message = ( + f"Batch Exports Monitoring: Found {len(missing_runs)} missing run(s) for batch export {batch_export_id}:\n" + ) + for start, end in missing_runs: + message += f"- Run {start.strftime('%Y-%m-%d %H:%M:%S')} to {end.strftime('%Y-%m-%d %H:%M:%S')}\n" + + activity.logger.warning(message) + + +@activity.defn +async def check_for_missing_batch_export_runs(inputs: CheckForMissingBatchExportRunsInputs) -> int: + """Check for missing batch export runs and log a warning if any are found. + (We can then alert based on these log entries) + + Returns: + The number of missing batch export runs found. + """ + async with Heartbeater(): + interval_start = dt.datetime.strptime(inputs.overall_interval_start, "%Y-%m-%d %H:%M:%S").replace(tzinfo=dt.UTC) + interval_end = dt.datetime.strptime(inputs.overall_interval_end, "%Y-%m-%d %H:%M:%S").replace(tzinfo=dt.UTC) + # Get all runs in the interval + runs = await afetch_batch_export_runs_in_range( + batch_export_id=inputs.batch_export_id, + interval_start=interval_start, + interval_end=interval_end, + ) + + # for simplicity, we assume that the interval is 5 minutes, as this is the only interval supported for monitoring at this time + if inputs.interval != "every 5 minutes": + raise NoValidBatchExportsFoundError( + "Only intervals of 'every 5 minutes' are supported for monitoring at this time." + ) + expected_run_intervals: list[tuple[dt.datetime, dt.datetime]] = [] + current_run_start_interval = interval_start + while current_run_start_interval < interval_end: + expected_run_intervals.append( + (current_run_start_interval, current_run_start_interval + dt.timedelta(minutes=5)) + ) + current_run_start_interval += dt.timedelta(minutes=5) + + missing_runs: list[tuple[dt.datetime, dt.datetime]] = [] + for start, end in expected_run_intervals: + if start not in [run.data_interval_start for run in runs]: + missing_runs.append((start, end)) + + if missing_runs: + _log_warning_for_missing_batch_export_runs(inputs.batch_export_id, missing_runs) + + return len(missing_runs) + + @workflow.defn(name="batch-export-monitoring") class BatchExportMonitoringWorkflow(PostHogWorkflow): """Workflow to monitor batch exports. We have had some issues with batch exports in the past, where some events have been missing. The purpose of this workflow is to monitor the status of - batch exports for a given customer by reconciling the number of exported - events with the number of events in ClickHouse for a given interval. + a given batch export by: + 1. Checking for missing batch export runs (we've had an incident in the past + where Temporal has not scheduled a workflow for a particular time interval + for some reason). + 2. Reconciling the number of exported events with the number of events in + ClickHouse for a given interval. """ @staticmethod @@ -179,8 +250,7 @@ def parse_inputs(inputs: list[str]) -> BatchExportMonitoringInputs: @workflow.run async def run(self, inputs: BatchExportMonitoringInputs): - """Workflow implementation to monitor batch exports for a given team.""" - # TODO - check if this is the right way to do logging since there seems to be a few different ways + """Workflow implementation to monitor a given batch export.""" workflow.logger.info( "Starting batch exports monitoring workflow for batch export id %s", inputs.batch_export_id ) @@ -218,6 +288,19 @@ async def run(self, inputs: BatchExportMonitoringInputs): heartbeat_timeout=dt.timedelta(minutes=1), ) + await workflow.execute_activity( + check_for_missing_batch_export_runs, + CheckForMissingBatchExportRunsInputs( + batch_export_id=batch_export_details.id, + overall_interval_start=interval_start_str, + overall_interval_end=interval_end_str, + interval=batch_export_details.interval, + ), + start_to_close_timeout=dt.timedelta(minutes=10), + retry_policy=RetryPolicy(maximum_attempts=3, initial_interval=dt.timedelta(seconds=20)), + heartbeat_timeout=dt.timedelta(minutes=1), + ) + return await workflow.execute_activity( update_batch_export_runs, UpdateBatchExportRunsInputs(batch_export_id=batch_export_details.id, results=total_events.results), diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index d6e95ee28fc22..927d6436d634f 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -699,7 +699,7 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: # Until we figure it out, we set all fields to nullable. There are some fields we know # are not nullable, but I'm opting for the more flexible option until we out why schemas differ # between batches. - [field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"] + [field.with_nullable(True) for field in record_batch_schema] ) async with s3_upload as s3_upload: @@ -715,6 +715,7 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: writer_format=WriterFormat.from_str(inputs.file_format, "S3"), max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, s3_upload=s3_upload, + include_inserted_at=True, writer_file_kwargs={"compression": inputs.compression}, ) diff --git a/posthog/temporal/batch_exports/spmc.py b/posthog/temporal/batch_exports/spmc.py index 253935656b1e7..53171543db480 100644 --- a/posthog/temporal/batch_exports/spmc.py +++ b/posthog/temporal/batch_exports/spmc.py @@ -12,7 +12,10 @@ from django.conf import settings from posthog.temporal.batch_exports.heartbeat import BatchExportRangeHeartbeatDetails -from posthog.temporal.batch_exports.metrics import get_bytes_exported_metric, get_rows_exported_metric +from posthog.temporal.batch_exports.metrics import ( + get_bytes_exported_metric, + get_rows_exported_metric, +) from posthog.temporal.batch_exports.sql import ( SELECT_FROM_EVENTS_VIEW, SELECT_FROM_EVENTS_VIEW_BACKFILL, @@ -229,6 +232,7 @@ async def start( schema: pa.Schema, json_columns: collections.abc.Sequence[str], multiple_files: bool = False, + include_inserted_at: bool = False, **kwargs, ) -> int: """Start consuming record batches from queue. @@ -261,7 +265,7 @@ async def start( record_batches_count += 1 record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) - await writer.write_record_batch(record_batch, flush=False) + await writer.write_record_batch(record_batch, flush=False, include_inserted_at=include_inserted_at) if writer.should_flush(): records_count += writer.records_since_last_flush @@ -333,6 +337,7 @@ async def run_consumer_loop( json_columns: collections.abc.Sequence[str] = ("properties", "person_properties", "set", "set_once"), writer_file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None, multiple_files: bool = False, + include_inserted_at: bool = False, **kwargs, ) -> int: """Run record batch consumers in a loop. @@ -341,6 +346,10 @@ async def run_consumer_loop( a loop. Once there is nothing left to consumer from the `RecordBatchQueue`, no more consumers will be started, and any pending consumers are awaited. + NOTE: We're starting to include the `_inserted_at` column in the record + batches, one destination at a time, so once we've added it to all + destinations, we can remove the `include_inserted_at` argument. + Returns: Number of records exported. Not the number of record batches, but the number of records in all record batches. @@ -380,6 +389,7 @@ def consumer_done_callback(task: asyncio.Task): schema=schema, json_columns=json_columns, multiple_files=multiple_files, + include_inserted_at=include_inserted_at, **writer_file_kwargs or {}, ), name=f"record_batch_consumer_{consumer_number}", diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index afe91d42412a3..9b23b6f5c9692 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -408,7 +408,9 @@ def track_bytes_written(self, batch_export_file: BatchExportTemporaryFile) -> No self.bytes_total = batch_export_file.bytes_total self.bytes_since_last_flush = batch_export_file.bytes_since_last_reset - async def write_record_batch(self, record_batch: pa.RecordBatch, flush: bool = True) -> None: + async def write_record_batch( + self, record_batch: pa.RecordBatch, flush: bool = True, include_inserted_at: bool = False + ) -> None: """Issue a record batch write tracking progress and flushing if required.""" record_batch = record_batch.sort_by("_inserted_at") @@ -429,7 +431,8 @@ async def write_record_batch(self, record_batch: pa.RecordBatch, flush: bool = T self.end_at_since_last_flush = raw_end_at column_names = record_batch.column_names - column_names.pop(column_names.index("_inserted_at")) + if not include_inserted_at: + column_names.pop(column_names.index("_inserted_at")) await asyncio.to_thread(self._write_record_batch, record_batch.select(column_names)) diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index c59f20b05d8cf..aab0a74ac554c 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -6,6 +6,7 @@ update_external_data_job_model, check_billing_limits_activity, sync_new_schemas_activity, + trigger_pipeline_v2, ) WORKFLOWS = [ExternalDataJobWorkflow] @@ -17,4 +18,5 @@ create_source_templates, check_billing_limits_activity, sync_new_schemas_activity, + trigger_pipeline_v2, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 916da24a1dbb2..62a1e1bc834ed 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -1,15 +1,22 @@ +import asyncio import dataclasses import datetime as dt import json import re +from django.conf import settings from django.db import close_old_connections import posthoganalytics from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy +from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE_V2 + # TODO: remove dependency +from posthog.settings.base_variables import TEST from posthog.temporal.batch_exports.base import PostHogWorkflow +from posthog.temporal.common.client import sync_connect +from posthog.temporal.data_imports.util import is_posthog_team from posthog.temporal.data_imports.workflow_activities.check_billing_limits import ( CheckBillingLimitsActivityInputs, check_billing_limits_activity, @@ -131,6 +138,30 @@ def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInputs) -> ) +@activity.defn +def trigger_pipeline_v2(inputs: ExternalDataWorkflowInputs): + logger = bind_temporal_worker_logger_sync(team_id=inputs.team_id) + logger.debug("Triggering V2 pipeline") + + temporal = sync_connect() + + asyncio.run( + temporal.start_workflow( + workflow="external-data-job", + arg=dataclasses.asdict(inputs), + id=f"{inputs.external_data_schema_id}-V2", + task_queue=str(DATA_WAREHOUSE_TASK_QUEUE_V2), + retry_policy=RetryPolicy( + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=1, + non_retryable_error_types=["NondeterminismError"], + ), + ) + ) + + logger.debug("V2 pipeline triggered") + + @dataclasses.dataclass class CreateSourceTemplateInputs: team_id: int @@ -154,6 +185,18 @@ def parse_inputs(inputs: list[str]) -> ExternalDataWorkflowInputs: async def run(self, inputs: ExternalDataWorkflowInputs): assert inputs.external_data_schema_id is not None + if ( + settings.TEMPORAL_TASK_QUEUE != DATA_WAREHOUSE_TASK_QUEUE_V2 + and not TEST + and is_posthog_team(inputs.team_id) + ): + await workflow.execute_activity( + trigger_pipeline_v2, + inputs, + start_to_close_timeout=dt.timedelta(minutes=1), + retry_policy=RetryPolicy(maximum_attempts=1), + ) + update_inputs = UpdateExternalDataJobStatusInputs( job_id=None, status=ExternalDataJob.Status.COMPLETED, diff --git a/posthog/temporal/data_imports/pipelines/chargebee/__init__.py b/posthog/temporal/data_imports/pipelines/chargebee/__init__.py index 245afb6e5d880..7a093e65f7364 100644 --- a/posthog/temporal/data_imports/pipelines/chargebee/__init__.py +++ b/posthog/temporal/data_imports/pipelines/chargebee/__init__.py @@ -218,7 +218,13 @@ def update_request(self, request: Request) -> None: @dlt.source(max_table_nesting=0) def chargebee_source( - api_key: str, site_name: str, endpoint: str, team_id: int, job_id: str, is_incremental: bool = False + api_key: str, + site_name: str, + endpoint: str, + team_id: int, + job_id: str, + db_incremental_field_last_value: Optional[Any], + is_incremental: bool = False, ): config: RESTAPIConfig = { "client": { @@ -242,7 +248,7 @@ def chargebee_source( "resources": [get_resource(endpoint, is_incremental)], } - yield from rest_api_resources(config, team_id, job_id) + yield from rest_api_resources(config, team_id, job_id, db_incremental_field_last_value) def validate_credentials(api_key: str, site_name: str) -> bool: diff --git a/posthog/temporal/data_imports/pipelines/pipeline/delta_table_helper.py b/posthog/temporal/data_imports/pipelines/pipeline/delta_table_helper.py new file mode 100644 index 0000000000000..64cbbda922863 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/pipeline/delta_table_helper.py @@ -0,0 +1,116 @@ +from collections.abc import Sequence +from conditional_cache import lru_cache +from typing import Any +import pyarrow as pa +from dlt.common.libs.deltalake import ensure_delta_compatible_arrow_schema +from dlt.common.normalizers.naming.snake_case import NamingConvention +import deltalake as deltalake +from django.conf import settings +from posthog.settings.base_variables import TEST +from posthog.warehouse.models import ExternalDataJob + + +class DeltaTableHelper: + _resource_name: str + _job: ExternalDataJob + + def __init__(self, resource_name: str, job: ExternalDataJob) -> None: + self._resource_name = resource_name + self._job = job + + def _get_credentials(self): + if TEST: + return { + "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, + "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, + "endpoint_url": settings.OBJECT_STORAGE_ENDPOINT, + "region_name": settings.AIRBYTE_BUCKET_REGION, + "AWS_DEFAULT_REGION": settings.AIRBYTE_BUCKET_REGION, + "AWS_ALLOW_HTTP": "true", + "AWS_S3_ALLOW_UNSAFE_RENAME": "true", + } + + return { + "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, + "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, + "region_name": settings.AIRBYTE_BUCKET_REGION, + "AWS_DEFAULT_REGION": settings.AIRBYTE_BUCKET_REGION, + "AWS_S3_ALLOW_UNSAFE_RENAME": "true", + } + + def _get_delta_table_uri(self) -> str: + normalized_resource_name = NamingConvention().normalize_identifier(self._resource_name) + # Appended __v2 on to the end of the url so that data of the V2 pipeline isn't the same as V1 + return f"{settings.BUCKET_URL}/{self._job.folder_path()}/{normalized_resource_name}__v2" + + def _evolve_delta_schema(self, schema: pa.Schema) -> deltalake.DeltaTable: + delta_table = self.get_delta_table() + if delta_table is None: + raise Exception("Deltalake table not found") + + delta_table_schema = delta_table.schema().to_pyarrow() + + new_fields = [ + deltalake.Field.from_pyarrow(field) + for field in ensure_delta_compatible_arrow_schema(schema) + if field.name not in delta_table_schema.names + ] + if new_fields: + delta_table.alter.add_columns(new_fields) + + return delta_table + + @lru_cache(maxsize=1, condition=lambda result: result is not None) + def get_delta_table(self) -> deltalake.DeltaTable | None: + delta_uri = self._get_delta_table_uri() + storage_options = self._get_credentials() + + if deltalake.DeltaTable.is_deltatable(table_uri=delta_uri, storage_options=storage_options): + return deltalake.DeltaTable(table_uri=delta_uri, storage_options=storage_options) + + return None + + def write_to_deltalake( + self, data: pa.Table, is_incremental: bool, chunk_index: int, primary_keys: Sequence[Any] | None + ) -> deltalake.DeltaTable: + delta_table = self.get_delta_table() + + if delta_table: + delta_table = self._evolve_delta_schema(data.schema) + + if is_incremental and delta_table is not None: + if not primary_keys or len(primary_keys) == 0: + raise Exception("Primary key required for incremental syncs") + + delta_table.merge( + source=data, + source_alias="source", + target_alias="target", + predicate=" AND ".join([f"source.{c} = target.{c}" for c in primary_keys]), + ).when_matched_update_all().when_not_matched_insert_all().execute() + else: + mode = "append" + schema_mode = "merge" + if chunk_index == 0 or delta_table is None: + mode = "overwrite" + schema_mode = "overwrite" + + if delta_table is None: + storage_options = self._get_credentials() + delta_table = deltalake.DeltaTable.create( + table_uri=self._get_delta_table_uri(), schema=data.schema, storage_options=storage_options + ) + + deltalake.write_deltalake( + table_or_uri=delta_table, + data=data, + partition_by=None, + mode=mode, + schema_mode=schema_mode, + engine="rust", + ) # type: ignore + + delta_table = self.get_delta_table() + assert delta_table is not None + + return delta_table diff --git a/posthog/temporal/data_imports/pipelines/pipeline/hogql_schema.py b/posthog/temporal/data_imports/pipelines/pipeline/hogql_schema.py new file mode 100644 index 0000000000000..383a3296f0435 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/pipeline/hogql_schema.py @@ -0,0 +1,63 @@ +import pyarrow as pa +import deltalake as deltalake +from posthog.hogql.database.models import ( + BooleanDatabaseField, + DatabaseField, + DateDatabaseField, + DateTimeDatabaseField, + FloatDatabaseField, + IntegerDatabaseField, + StringDatabaseField, + StringJSONDatabaseField, +) + + +class HogQLSchema: + schema: dict[str, str] + + def __init__(self): + self.schema = {} + + def add_pyarrow_table(self, table: pa.Table) -> None: + for field in table.schema: + self.add_field(field, table.column(field.name)) + + def add_field(self, field: pa.Field, column: pa.ChunkedArray) -> None: + existing_type = self.schema.get(field.name) + if existing_type is not None and existing_type != StringDatabaseField.__name__: + return + + hogql_type: type[DatabaseField] = DatabaseField + + if pa.types.is_time(field.type): + hogql_type = DateTimeDatabaseField + elif pa.types.is_timestamp(field.type): + hogql_type = DateTimeDatabaseField + elif pa.types.is_date(field.type): + hogql_type = DateDatabaseField + elif pa.types.is_decimal(field.type): + hogql_type = FloatDatabaseField + elif pa.types.is_floating(field.type): + hogql_type = FloatDatabaseField + elif pa.types.is_boolean(field.type): + hogql_type = BooleanDatabaseField + elif pa.types.is_integer(field.type): + hogql_type = IntegerDatabaseField + elif pa.types.is_binary(field.type): + raise Exception("Type 'binary' is not a supported column type") + elif pa.types.is_string(field.type): + hogql_type = StringDatabaseField + + # Checking for JSON string columns with the first non-null value in the column + for value in column: + value_str = value.as_py() + if value_str is not None: + assert isinstance(value_str, str) + if value_str.startswith("{") or value_str.startswith("["): + hogql_type = StringJSONDatabaseField + break + + self.schema[field.name] = hogql_type.__name__ + + def to_hogql_types(self) -> dict[str, str]: + return self.schema diff --git a/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py new file mode 100644 index 0000000000000..a69d60501601b --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/pipeline/pipeline.py @@ -0,0 +1,139 @@ +import time +from typing import Any +import pyarrow as pa +from dlt.sources import DltSource, DltResource +import deltalake as deltalake +from posthog.temporal.common.logger import FilteringBoundLogger +from posthog.temporal.data_imports.pipelines.pipeline.utils import ( + _update_incremental_state, + _get_primary_keys, + _evolve_pyarrow_schema, + _append_debug_column_to_pyarrows_table, + _update_job_row_count, +) +from posthog.temporal.data_imports.pipelines.pipeline.delta_table_helper import DeltaTableHelper +from posthog.temporal.data_imports.pipelines.pipeline.hogql_schema import HogQLSchema +from posthog.temporal.data_imports.pipelines.pipeline_sync import validate_schema_and_update_table_sync +from posthog.temporal.data_imports.util import prepare_s3_files_for_querying +from posthog.warehouse.models import DataWarehouseTable, ExternalDataJob, ExternalDataSchema + + +class PipelineNonDLT: + _resource: DltResource + _resource_name: str + _job: ExternalDataJob + _schema: ExternalDataSchema + _logger: FilteringBoundLogger + _is_incremental: bool + _delta_table_helper: DeltaTableHelper + _internal_schema = HogQLSchema() + _load_id: int + + def __init__(self, source: DltSource, logger: FilteringBoundLogger, job_id: str, is_incremental: bool) -> None: + resources = list(source.resources.items()) + assert len(resources) == 1 + resource_name, resource = resources[0] + + self._resource = resource + self._resource_name = resource_name + self._job = ExternalDataJob.objects.prefetch_related("schema").get(id=job_id) + self._is_incremental = is_incremental + self._logger = logger + self._load_id = time.time_ns() + + schema: ExternalDataSchema | None = self._job.schema + assert schema is not None + self._schema = schema + + self._delta_table_helper = DeltaTableHelper(resource_name, self._job) + self._internal_schema = HogQLSchema() + + def run(self): + buffer: list[Any] = [] + chunk_size = 5000 + row_count = 0 + chunk_index = 0 + + for item in self._resource: + py_table = None + + if isinstance(item, list): + if len(buffer) > 0: + buffer.extend(item) + if len(buffer) >= chunk_size: + py_table = pa.Table.from_pylist(buffer) + buffer = [] + else: + if len(item) >= chunk_size: + py_table = pa.Table.from_pylist(item) + else: + buffer.extend(item) + continue + elif isinstance(item, dict): + buffer.append(item) + if len(buffer) < chunk_size: + continue + + py_table = pa.Table.from_pylist(buffer) + buffer = [] + elif isinstance(item, pa.Table): + py_table = item + else: + raise Exception(f"Unhandled item type: {item.__class__.__name__}") + + assert py_table is not None + + self._process_pa_table(pa_table=py_table, index=chunk_index) + + row_count += py_table.num_rows + chunk_index += 1 + + if len(buffer) > 0: + py_table = pa.Table.from_pylist(buffer) + self._process_pa_table(pa_table=py_table, index=chunk_index) + row_count += py_table.num_rows + + self._post_run_operations(row_count=row_count) + + def _process_pa_table(self, pa_table: pa.Table, index: int): + delta_table = self._delta_table_helper.get_delta_table() + + pa_table = _append_debug_column_to_pyarrows_table(pa_table, self._load_id) + pa_table = _evolve_pyarrow_schema(pa_table, delta_table.schema() if delta_table is not None else None) + + table_primary_keys = _get_primary_keys(self._resource) + delta_table = self._delta_table_helper.write_to_deltalake( + pa_table, self._is_incremental, index, table_primary_keys + ) + + self._internal_schema.add_pyarrow_table(pa_table) + + _update_incremental_state(self._schema, pa_table, self._logger) + _update_job_row_count(self._job.id, pa_table.num_rows, self._logger) + + def _post_run_operations(self, row_count: int): + delta_table = self._delta_table_helper.get_delta_table() + + assert delta_table is not None + + self._logger.info("Compacting delta table") + delta_table.optimize.compact() + delta_table.vacuum(retention_hours=24, enforce_retention_duration=False, dry_run=False) + + file_uris = delta_table.file_uris() + self._logger.info(f"Preparing S3 files - total parquet files: {len(file_uris)}") + prepare_s3_files_for_querying( + self._job.folder_path(), self._resource_name, file_uris, ExternalDataJob.PipelineVersion.V2 + ) + + self._logger.debug("Validating schema and updating table") + + validate_schema_and_update_table_sync( + run_id=str(self._job.id), + team_id=self._job.team_id, + schema_id=self._schema.id, + table_schema={}, + table_schema_dict=self._internal_schema.to_hogql_types(), + row_count=row_count, + table_format=DataWarehouseTable.TableFormat.DeltaS3Wrapper, + ) diff --git a/posthog/temporal/data_imports/pipelines/pipeline/utils.py b/posthog/temporal/data_imports/pipelines/pipeline/utils.py new file mode 100644 index 0000000000000..fadb6ec02a868 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/pipeline/utils.py @@ -0,0 +1,105 @@ +import json +from collections.abc import Sequence +from typing import Any +import pyarrow as pa +from dlt.common.libs.deltalake import ensure_delta_compatible_arrow_schema +from dlt.sources import DltResource +import deltalake as deltalake +from django.db.models import F +from posthog.temporal.common.logger import FilteringBoundLogger +from posthog.warehouse.models import ExternalDataJob, ExternalDataSchema + + +def _get_primary_keys(resource: DltResource) -> list[Any] | None: + primary_keys = resource._hints.get("primary_key") + + if primary_keys is None: + return None + + if isinstance(primary_keys, list): + return primary_keys + + if isinstance(primary_keys, Sequence): + return list(primary_keys) + + raise Exception(f"primary_keys of type {primary_keys.__class__.__name__} are not supported") + + +def _evolve_pyarrow_schema(table: pa.Table, delta_schema: deltalake.Schema | None) -> pa.Table: + py_table_field_names = table.schema.names + + # Change pa.structs to JSON string + for column_name in table.column_names: + column = table.column(column_name) + if pa.types.is_struct(column.type) or pa.types.is_list(column.type): + json_column = pa.array([json.dumps(row.as_py()) if row.as_py() is not None else None for row in column]) + table = table.set_column(table.schema.get_field_index(column_name), column_name, json_column) + + if delta_schema: + for field in delta_schema.to_pyarrow(): + if field.name not in py_table_field_names: + if field.nullable: + new_column_data = pa.array([None] * table.num_rows, type=field.type) + else: + new_column_data = pa.array( + [_get_default_value_from_pyarrow_type(field.type)] * table.num_rows, type=field.type + ) + table = table.append_column(field, new_column_data) + + # Change types based on what deltalake tables support + return table.cast(ensure_delta_compatible_arrow_schema(table.schema)) + + +def _append_debug_column_to_pyarrows_table(table: pa.Table, load_id: int) -> pa.Table: + debug_info = f'{{"load_id": {load_id}}}' + + column = pa.array([debug_info] * table.num_rows, type=pa.string()) + return table.append_column("_ph_debug", column) + + +def _get_default_value_from_pyarrow_type(pyarrow_type: pa.DataType): + """ + Returns a default value for the given PyArrow type. + """ + if pa.types.is_integer(pyarrow_type): + return 0 + elif pa.types.is_floating(pyarrow_type): + return 0.0 + elif pa.types.is_string(pyarrow_type): + return "" + elif pa.types.is_boolean(pyarrow_type): + return False + elif pa.types.is_binary(pyarrow_type): + return b"" + elif pa.types.is_timestamp(pyarrow_type): + return pa.scalar(0, type=pyarrow_type).as_py() + elif pa.types.is_date(pyarrow_type): + return pa.scalar(0, type=pyarrow_type).as_py() + elif pa.types.is_time(pyarrow_type): + return pa.scalar(0, type=pyarrow_type).as_py() + else: + raise ValueError(f"No default value defined for type: {pyarrow_type}") + + +def _update_incremental_state(schema: ExternalDataSchema | None, table: pa.Table, logger: FilteringBoundLogger) -> None: + if schema is None or schema.sync_type != ExternalDataSchema.SyncType.INCREMENTAL: + return + + incremental_field_name: str | None = schema.sync_type_config.get("incremental_field") + if incremental_field_name is None: + return + + column = table[incremental_field_name] + numpy_arr = column.combine_chunks().to_pandas().to_numpy() + + # TODO(@Gilbert09): support different operations here (e.g. min) + last_value = numpy_arr.max() + + logger.debug(f"Updating incremental_field_last_value_v2 with {last_value}") + + schema.update_incremental_field_last_value(last_value) + + +def _update_job_row_count(job_id: str, count: int, logger: FilteringBoundLogger) -> None: + logger.debug(f"Updating rows_synced with +{count}") + ExternalDataJob.objects.filter(id=job_id).update(rows_synced=F("rows_synced") + count) diff --git a/posthog/temporal/data_imports/pipelines/pipeline_sync.py b/posthog/temporal/data_imports/pipelines/pipeline_sync.py index 8d2cbd6cac2ee..3fca1a7a49c82 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline_sync.py +++ b/posthog/temporal/data_imports/pipelines/pipeline_sync.py @@ -455,6 +455,7 @@ def validate_schema_and_update_table_sync( table_schema: TSchemaTables, row_count: int, table_format: DataWarehouseTable.TableFormat, + table_schema_dict: Optional[dict[str, str]] = None, ) -> None: """ @@ -479,6 +480,18 @@ def validate_schema_and_update_table_sync( "pipeline", Prefetch("schema", queryset=ExternalDataSchema.objects.prefetch_related("source")) ).get(pk=run_id) + using_v2_pipeline = job.pipeline_version == ExternalDataJob.PipelineVersion.V2 + pipeline_version = ( + ExternalDataJob.PipelineVersion.V1 + if job.pipeline_version is None + else ExternalDataJob.PipelineVersion(job.pipeline_version) + ) + + # Temp so we dont create a bunch of orphaned Table objects + if using_v2_pipeline: + logger.debug("Using V2 pipeline - dont create table object or get columns") + return + credential = get_or_create_datawarehouse_credential( team_id=team_id, access_key=settings.AIRBYTE_BUCKET_KEY, @@ -528,41 +541,63 @@ def validate_schema_and_update_table_sync( assert isinstance(table_created, DataWarehouseTable) and table_created is not None # Temp fix #2 for Delta tables without table_format - try: - table_created.get_columns() - except Exception as e: - if table_format == DataWarehouseTable.TableFormat.DeltaS3Wrapper: - logger.exception("get_columns exception with DeltaS3Wrapper format - trying Delta format", exc_info=e) - - table_created.format = DataWarehouseTable.TableFormat.Delta + if not using_v2_pipeline: + try: table_created.get_columns() - table_created.save() + except Exception as e: + if table_format == DataWarehouseTable.TableFormat.DeltaS3Wrapper: + logger.exception( + "get_columns exception with DeltaS3Wrapper format - trying Delta format", exc_info=e + ) - logger.info("Delta format worked - updating table to use Delta") - else: - raise - - for schema in table_schema.values(): - if schema.get("resource") == _schema_name: - schema_columns = schema.get("columns") or {} - raw_db_columns: dict[str, dict[str, str]] = table_created.get_columns() - db_columns = {key: column.get("clickhouse", "") for key, column in raw_db_columns.items()} - - columns = {} - for column_name, db_column_type in db_columns.items(): - dlt_column = schema_columns.get(column_name) - if dlt_column is not None: - dlt_data_type = dlt_column.get("data_type") - hogql_type = dlt_to_hogql_type(dlt_data_type) - else: - hogql_type = dlt_to_hogql_type(None) + table_created.format = DataWarehouseTable.TableFormat.Delta + table_created.get_columns() + table_created.save() + + logger.info("Delta format worked - updating table to use Delta") + else: + raise + + # If using new non-DLT pipeline + if using_v2_pipeline and table_schema_dict is not None: + raw_db_columns: dict[str, dict[str, str]] = table_created.get_columns(pipeline_version=pipeline_version) + db_columns = {key: column.get("clickhouse", "") for key, column in raw_db_columns.items()} + + columns = {} + for column_name, db_column_type in db_columns.items(): + hogql_type = table_schema_dict.get(column_name) + + if hogql_type is None: + raise Exception(f"HogQL type not found for column: {column_name}") + + columns[column_name] = { + "clickhouse": db_column_type, + "hogql": hogql_type, + } + table_created.columns = columns + else: + # If using DLT pipeline + for schema in table_schema.values(): + if schema.get("resource") == _schema_name: + schema_columns = schema.get("columns") or {} + raw_db_columns: dict[str, dict[str, str]] = table_created.get_columns() + db_columns = {key: column.get("clickhouse", "") for key, column in raw_db_columns.items()} + + columns = {} + for column_name, db_column_type in db_columns.items(): + dlt_column = schema_columns.get(column_name) + if dlt_column is not None: + dlt_data_type = dlt_column.get("data_type") + hogql_type = dlt_to_hogql_type(dlt_data_type) + else: + hogql_type = dlt_to_hogql_type(None) - columns[column_name] = { - "clickhouse": db_column_type, - "hogql": hogql_type, - } - table_created.columns = columns - break + columns[column_name] = { + "clickhouse": db_column_type, + "hogql": hogql_type, + } + table_created.columns = columns + break table_created.save() @@ -573,7 +608,7 @@ def validate_schema_and_update_table_sync( .get(id=_schema_id, team_id=team_id) ) - if schema_model: + if not using_v2_pipeline and schema_model: schema_model.table = table_created schema_model.save() diff --git a/posthog/temporal/data_imports/pipelines/rest_source/__init__.py b/posthog/temporal/data_imports/pipelines/rest_source/__init__.py index 4fd019ce76753..9a8599882c652 100644 --- a/posthog/temporal/data_imports/pipelines/rest_source/__init__.py +++ b/posthog/temporal/data_imports/pipelines/rest_source/__init__.py @@ -46,6 +46,7 @@ def rest_api_source( config: RESTAPIConfig, team_id: int, job_id: str, + db_incremental_field_last_value: Optional[Any] = None, name: Optional[str] = None, section: Optional[str] = None, max_table_nesting: Optional[int] = None, @@ -108,10 +109,12 @@ def rest_api_source( spec, ) - return decorated(config, team_id, job_id) + return decorated(config, team_id, job_id, db_incremental_field_last_value) -def rest_api_resources(config: RESTAPIConfig, team_id: int, job_id: str) -> list[DltResource]: +def rest_api_resources( + config: RESTAPIConfig, team_id: int, job_id: str, db_incremental_field_last_value: Optional[Any] +) -> list[DltResource]: """Creates a list of resources from a REST API configuration. Args: @@ -193,6 +196,7 @@ def rest_api_resources(config: RESTAPIConfig, team_id: int, job_id: str) -> list resolved_param_map, team_id=team_id, job_id=job_id, + db_incremental_field_last_value=db_incremental_field_last_value, ) return list(resources.values()) @@ -205,6 +209,7 @@ def create_resources( resolved_param_map: dict[str, Optional[ResolvedParam]], team_id: int, job_id: str, + db_incremental_field_last_value: Optional[Any] = None, ) -> dict[str, DltResource]: resources = {} @@ -264,6 +269,7 @@ async def paginate_resource( incremental_object, incremental_param, incremental_cursor_transform, + db_incremental_field_last_value, ) yield client.paginate( @@ -317,6 +323,7 @@ async def paginate_dependent_resource( incremental_object, incremental_param, incremental_cursor_transform, + db_incremental_field_last_value, ) for item in items: @@ -358,6 +365,7 @@ def _set_incremental_params( incremental_object: Incremental[Any], incremental_param: Optional[IncrementalParam], transform: Optional[Callable[..., Any]], + db_incremental_field_last_value: Optional[Any] = None, ) -> dict[str, Any]: def identity_func(x: Any) -> Any: return x @@ -368,7 +376,13 @@ def identity_func(x: Any) -> Any: if incremental_param is None: return params - params[incremental_param.start] = transform(incremental_object.last_value) + last_value = ( + db_incremental_field_last_value + if db_incremental_field_last_value is not None + else incremental_object.last_value + ) + + params[incremental_param.start] = transform(last_value) if incremental_param.end: params[incremental_param.end] = transform(incremental_object.end_value) return params diff --git a/posthog/temporal/data_imports/pipelines/salesforce/__init__.py b/posthog/temporal/data_imports/pipelines/salesforce/__init__.py index ec2dc7647b606..129c8d1550be4 100644 --- a/posthog/temporal/data_imports/pipelines/salesforce/__init__.py +++ b/posthog/temporal/data_imports/pipelines/salesforce/__init__.py @@ -6,7 +6,6 @@ from posthog.temporal.data_imports.pipelines.rest_source import RESTAPIConfig, rest_api_resources from posthog.temporal.data_imports.pipelines.rest_source.typing import EndpointResource from posthog.temporal.data_imports.pipelines.salesforce.auth import SalseforceAuth -import pendulum import re @@ -352,6 +351,7 @@ def salesforce_source( endpoint: str, team_id: int, job_id: str, + db_incremental_field_last_value: Optional[Any], is_incremental: bool = False, ): config: RESTAPIConfig = { @@ -366,4 +366,4 @@ def salesforce_source( "resources": [get_resource(endpoint, is_incremental)], } - yield from rest_api_resources(config, team_id, job_id) + yield from rest_api_resources(config, team_id, job_id, db_incremental_field_last_value) diff --git a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py index 04aa7c9678c0b..ae81f9fa61fe6 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py @@ -51,6 +51,7 @@ def sql_source_for_type( sslmode: str, schema: str, table_names: list[str], + db_incremental_field_last_value: Optional[Any], using_ssl: Optional[bool] = True, team_id: Optional[int] = None, incremental_field: Optional[str] = None, @@ -99,12 +100,13 @@ def sql_source_for_type( raise Exception("Unsupported source_type") db_source = sql_database( - credentials, + credentials=credentials, schema=schema, table_names=table_names, incremental=incremental, team_id=team_id, connect_args=connect_args, + db_incremental_field_last_value=db_incremental_field_last_value, ) return db_source @@ -121,6 +123,7 @@ def snowflake_source( warehouse: str, schema: str, table_names: list[str], + db_incremental_field_last_value: Optional[Any], role: Optional[str] = None, incremental_field: Optional[str] = None, incremental_field_type: Optional[IncrementalFieldType] = None, @@ -172,7 +175,13 @@ def snowflake_source( }, ) - db_source = sql_database(credentials, schema=schema, table_names=table_names, incremental=incremental) + db_source = sql_database( + credentials=credentials, + schema=schema, + table_names=table_names, + incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, + ) return db_source @@ -186,6 +195,7 @@ def bigquery_source( token_uri: str, table_name: str, bq_destination_table_id: str, + db_incremental_field_last_value: Optional[Any], incremental_field: Optional[str] = None, incremental_field_type: Optional[IncrementalFieldType] = None, ) -> DltSource: @@ -210,7 +220,13 @@ def bigquery_source( credentials_info=credentials_info, ) - return sql_database(engine, schema=None, table_names=[table_name], incremental=incremental) + return sql_database( + credentials=engine, + schema=None, + table_names=[table_name], + incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, + ) # Temp while DLT doesn't support `interval` columns @@ -231,6 +247,7 @@ def internal_remove(doc: dict) -> dict: @dlt.source(max_table_nesting=0) def sql_database( + db_incremental_field_last_value: Optional[Any], credentials: Union[ConnectionStringCredentials, Engine, str] = dlt.secrets.value, schema: Optional[str] = dlt.config.value, metadata: Optional[MetaData] = None, @@ -290,6 +307,7 @@ def sql_database( table=table, incremental=incremental, connect_args=connect_args, + db_incremental_field_last_value=db_incremental_field_last_value, ) ) diff --git a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py index 50577b6b04d17..0400a60b32fd5 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py @@ -27,6 +27,7 @@ def __init__( chunk_size: int = 1000, incremental: Optional[dlt.sources.incremental[Any]] = None, connect_args: Optional[list[str]] = None, + db_incremental_field_last_value: Optional[Any] = None, ) -> None: self.engine = engine self.table = table @@ -43,7 +44,11 @@ def __init__( raise KeyError( f"Cursor column '{incremental.cursor_path}' does not exist in table '{table.name}'" ) from e - self.last_value = incremental.last_value + self.last_value = ( + db_incremental_field_last_value + if db_incremental_field_last_value is not None + else incremental.last_value + ) else: self.cursor_column = None self.last_value = None @@ -90,6 +95,7 @@ def table_rows( chunk_size: int = DEFAULT_CHUNK_SIZE, incremental: Optional[dlt.sources.incremental[Any]] = None, connect_args: Optional[list[str]] = None, + db_incremental_field_last_value: Optional[Any] = None, ) -> Iterator[TDataItem]: """ A DLT source which loads data from an SQL database using SQLAlchemy. @@ -106,7 +112,14 @@ def table_rows( """ yield dlt.mark.materialize_table_schema() # type: ignore - loader = TableLoader(engine, table, incremental=incremental, chunk_size=chunk_size, connect_args=connect_args) + loader = TableLoader( + engine, + table, + incremental=incremental, + chunk_size=chunk_size, + connect_args=connect_args, + db_incremental_field_last_value=db_incremental_field_last_value, + ) yield from loader.load_rows() engine.dispose() diff --git a/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py index bcab4c3e19282..a3fc1c6b2838b 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py +++ b/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py @@ -67,6 +67,7 @@ def sql_source_for_type( sslmode: str, schema: str, table_names: list[str], + db_incremental_field_last_value: Optional[Any], using_ssl: Optional[bool] = True, team_id: Optional[int] = None, incremental_field: Optional[str] = None, @@ -115,10 +116,11 @@ def sql_source_for_type( raise Exception("Unsupported source_type") db_source = sql_database( - credentials, + credentials=credentials, schema=schema, table_names=table_names, incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, team_id=team_id, connect_args=connect_args, ) @@ -137,6 +139,7 @@ def snowflake_source( warehouse: str, schema: str, table_names: list[str], + db_incremental_field_last_value: Optional[Any], role: Optional[str] = None, incremental_field: Optional[str] = None, incremental_field_type: Optional[IncrementalFieldType] = None, @@ -188,7 +191,13 @@ def snowflake_source( }, ) - db_source = sql_database(credentials, schema=schema, table_names=table_names, incremental=incremental) + db_source = sql_database( + credentials=credentials, + schema=schema, + table_names=table_names, + incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, + ) return db_source @@ -202,6 +211,7 @@ def bigquery_source( token_uri: str, table_name: str, bq_destination_table_id: str, + db_incremental_field_last_value: Optional[Any], incremental_field: Optional[str] = None, incremental_field_type: Optional[IncrementalFieldType] = None, ) -> DltSource: @@ -226,11 +236,18 @@ def bigquery_source( credentials_info=credentials_info, ) - return sql_database(engine, schema=None, table_names=[table_name], incremental=incremental) + return sql_database( + credentials=engine, + schema=None, + table_names=[table_name], + incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, + ) @dlt.source(max_table_nesting=0) def sql_database( + db_incremental_field_last_value: Optional[Any], credentials: Union[ConnectionStringCredentials, Engine, str] = dlt.secrets.value, schema: Optional[str] = dlt.config.value, metadata: Optional[MetaData] = None, @@ -317,6 +334,7 @@ def sql_database( backend_kwargs=backend_kwargs, type_adapter_callback=type_adapter_callback, incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, team_id=team_id, connect_args=connect_args, ) @@ -341,6 +359,7 @@ def internal_remove(table: pa.Table) -> pa.Table: @dlt.resource(name=lambda args: args["table"], standalone=True, spec=SqlTableResourceConfiguration) def sql_table( + db_incremental_field_last_value: Optional[Any], credentials: Union[ConnectionStringCredentials, Engine, str] = dlt.secrets.value, table: str = dlt.config.value, schema: Optional[str] = dlt.config.value, @@ -438,6 +457,7 @@ def query_adapter_callback(query: SelectAny, table: Table): chunk_size=chunk_size, backend=backend, incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, reflection_level=reflection_level, defer_table_reflect=defer_table_reflect, table_adapter_callback=table_adapter_callback, diff --git a/posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py b/posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py index 46f59929beb47..acd64c97aae99 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py +++ b/posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py @@ -46,6 +46,7 @@ def __init__( columns: TTableSchemaColumns, chunk_size: int = 1000, incremental: Optional[dlt.sources.incremental[Any]] = None, + db_incremental_field_last_value: Optional[Any] = None, query_adapter_callback: Optional[TQueryAdapter] = None, connect_args: Optional[list[str]] = None, ) -> None: @@ -64,7 +65,11 @@ def __init__( raise KeyError( f"Cursor column '{incremental.cursor_path}' does not exist in table '{table.name}'" ) from e - self.last_value = incremental.last_value + self.last_value = ( + db_incremental_field_last_value + if db_incremental_field_last_value is not None + else incremental.last_value + ) self.end_value = incremental.end_value self.row_order: TSortOrder = self.incremental.row_order else: @@ -183,6 +188,7 @@ def table_rows( chunk_size: int, backend: TableBackend, incremental: Optional[dlt.sources.incremental[Any]] = None, + db_incremental_field_last_value: Optional[Any] = None, defer_table_reflect: bool = False, table_adapter_callback: Optional[Callable[[Table], None]] = None, reflection_level: ReflectionLevel = "minimal", @@ -226,6 +232,7 @@ def table_rows( table, columns, incremental=incremental, + db_incremental_field_last_value=db_incremental_field_last_value, chunk_size=chunk_size, query_adapter_callback=query_adapter_callback, connect_args=connect_args, diff --git a/posthog/temporal/data_imports/pipelines/stripe/__init__.py b/posthog/temporal/data_imports/pipelines/stripe/__init__.py index 5b386aa10adba..da9af92c191dc 100644 --- a/posthog/temporal/data_imports/pipelines/stripe/__init__.py +++ b/posthog/temporal/data_imports/pipelines/stripe/__init__.py @@ -325,7 +325,13 @@ def update_request(self, request: Request) -> None: @dlt.source(max_table_nesting=0) def stripe_source( - api_key: str, account_id: Optional[str], endpoint: str, team_id: int, job_id: str, is_incremental: bool = False + api_key: str, + account_id: Optional[str], + endpoint: str, + team_id: int, + job_id: str, + db_incremental_field_last_value: Optional[Any], + is_incremental: bool = False, ): config: RESTAPIConfig = { "client": { @@ -355,7 +361,7 @@ def stripe_source( "resources": [get_resource(endpoint, is_incremental)], } - yield from rest_api_resources(config, team_id, job_id) + yield from rest_api_resources(config, team_id, job_id, db_incremental_field_last_value) def validate_credentials(api_key: str) -> bool: diff --git a/posthog/temporal/data_imports/pipelines/test/test_pipeline_sync.py b/posthog/temporal/data_imports/pipelines/test/test_pipeline_sync.py index fcd84903b7249..5b765e35cea14 100644 --- a/posthog/temporal/data_imports/pipelines/test/test_pipeline_sync.py +++ b/posthog/temporal/data_imports/pipelines/test/test_pipeline_sync.py @@ -66,6 +66,7 @@ def _create_pipeline(self, schema_name: str, incremental: bool): status=ExternalDataJob.Status.RUNNING, rows_synced=0, workflow_id=str(uuid.uuid4()), + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) pipeline = DataImportPipelineSync( @@ -84,6 +85,7 @@ def _create_pipeline(self, schema_name: str, incremental: bool): is_incremental=incremental, team_id=self.team.pk, job_id=str(job.pk), + db_incremental_field_last_value=0, ), logger=structlog.get_logger(), incremental=incremental, diff --git a/posthog/temporal/data_imports/pipelines/vitally/__init__.py b/posthog/temporal/data_imports/pipelines/vitally/__init__.py index 223513d439d7c..3f070c48653f2 100644 --- a/posthog/temporal/data_imports/pipelines/vitally/__init__.py +++ b/posthog/temporal/data_imports/pipelines/vitally/__init__.py @@ -55,6 +55,7 @@ def get_resource(name: str, is_incremental: bool) -> EndpointResource: "params": { "limit": 100, "sortBy": "updatedAt", + "status": "activeOrChurned", "updatedAt": { "type": "incremental", "cursor_path": "updatedAt", @@ -323,6 +324,7 @@ def vitally_source( endpoint: str, team_id: int, job_id: str, + db_incremental_field_last_value: Optional[Any], is_incremental: bool = False, ): config: RESTAPIConfig = { @@ -347,7 +349,7 @@ def vitally_source( "resources": [get_resource(endpoint, is_incremental)], } - yield from rest_api_resources(config, team_id, job_id) + yield from rest_api_resources(config, team_id, job_id, db_incremental_field_last_value) def validate_credentials(secret_token: str, region: str, subdomain: Optional[str]) -> bool: diff --git a/posthog/temporal/data_imports/pipelines/zendesk/__init__.py b/posthog/temporal/data_imports/pipelines/zendesk/__init__.py index 36d842e4d3889..55b6be994f006 100644 --- a/posthog/temporal/data_imports/pipelines/zendesk/__init__.py +++ b/posthog/temporal/data_imports/pipelines/zendesk/__init__.py @@ -289,6 +289,7 @@ def zendesk_source( endpoint: str, team_id: int, job_id: str, + db_incremental_field_last_value: Optional[Any], is_incremental: bool = False, ): config: RESTAPIConfig = { @@ -312,7 +313,7 @@ def zendesk_source( "resources": [get_resource(endpoint, is_incremental)], } - yield from rest_api_resources(config, team_id, job_id) + yield from rest_api_resources(config, team_id, job_id, db_incremental_field_last_value) def validate_credentials(subdomain: str, api_key: str, email_address: str) -> bool: diff --git a/posthog/temporal/data_imports/util.py b/posthog/temporal/data_imports/util.py index cc8a4892b0aaa..4a133ef336b42 100644 --- a/posthog/temporal/data_imports/util.py +++ b/posthog/temporal/data_imports/util.py @@ -1,18 +1,33 @@ +from typing import Optional from posthog.settings.utils import get_from_env from posthog.utils import str_to_bool +from posthog.warehouse.models import ExternalDataJob from posthog.warehouse.s3 import get_s3_client from django.conf import settings from dlt.common.normalizers.naming.snake_case import NamingConvention -def prepare_s3_files_for_querying(folder_path: str, table_name: str, file_uris: list[str]): +def prepare_s3_files_for_querying( + folder_path: str, + table_name: str, + file_uris: list[str], + pipeline_version: Optional[ExternalDataJob.PipelineVersion] = None, +): s3 = get_s3_client() normalized_table_name = NamingConvention().normalize_identifier(table_name) s3_folder_for_job = f"{settings.BUCKET_URL}/{folder_path}" - s3_folder_for_schema = f"{s3_folder_for_job}/{normalized_table_name}" - s3_folder_for_querying = f"{s3_folder_for_job}/{normalized_table_name}__query" + + if pipeline_version == ExternalDataJob.PipelineVersion.V2: + s3_folder_for_schema = f"{s3_folder_for_job}/{normalized_table_name}__v2" + else: + s3_folder_for_schema = f"{s3_folder_for_job}/{normalized_table_name}" + + if pipeline_version == ExternalDataJob.PipelineVersion.V2: + s3_folder_for_querying = f"{s3_folder_for_job}/{normalized_table_name}__query_v2" + else: + s3_folder_for_querying = f"{s3_folder_for_job}/{normalized_table_name}__query" if s3.exists(s3_folder_for_querying): s3.delete(s3_folder_for_querying, recursive=True) diff --git a/posthog/temporal/data_imports/workflow_activities/create_job_model.py b/posthog/temporal/data_imports/workflow_activities/create_job_model.py index b62f0c9cc2063..b404c610c1cad 100644 --- a/posthog/temporal/data_imports/workflow_activities/create_job_model.py +++ b/posthog/temporal/data_imports/workflow_activities/create_job_model.py @@ -1,11 +1,13 @@ import dataclasses import uuid +from django.conf import settings from django.db import close_old_connections from temporalio import activity # TODO: remove dependency +from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE_V2 from posthog.warehouse.models import ExternalDataJob, ExternalDataSource from posthog.warehouse.models.external_data_schema import ( ExternalDataSchema, @@ -20,6 +22,13 @@ class CreateExternalDataJobModelActivityInputs: source_id: uuid.UUID +def get_pipeline_version() -> str: + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE_V2: + return ExternalDataJob.PipelineVersion.V2 + + return ExternalDataJob.PipelineVersion.V1 + + @activity.defn def create_external_data_job_model_activity( inputs: CreateExternalDataJobModelActivityInputs, @@ -37,6 +46,7 @@ def create_external_data_job_model_activity( rows_synced=0, workflow_id=activity.info().workflow_id, workflow_run_id=activity.info().workflow_run_id, + pipeline_version=get_pipeline_version(), ) schema = ExternalDataSchema.objects.get(team_id=inputs.team_id, id=inputs.schema_id) diff --git a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py index 81a4a943d2c4c..135d6b8d5fb89 100644 --- a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py +++ b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py @@ -1,17 +1,21 @@ import dataclasses import uuid from datetime import datetime +from dateutil import parser from typing import Any +from django.conf import settings from django.db import close_old_connections from django.db.models import Prefetch, F from temporalio import activity +from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE_V2 from posthog.models.integration import Integration from posthog.temporal.common.heartbeat_sync import HeartbeaterSync from posthog.temporal.data_imports.pipelines.bigquery import delete_all_temp_destination_tables, delete_table +from posthog.temporal.data_imports.pipelines.pipeline.pipeline import PipelineNonDLT from posthog.temporal.data_imports.pipelines.pipeline_sync import DataImportPipelineSync, PipelineInputs from posthog.temporal.data_imports.util import is_posthog_team from posthog.warehouse.models import ( @@ -22,6 +26,7 @@ from structlog.typing import FilteringBoundLogger from posthog.warehouse.models.external_data_schema import ExternalDataSchema from posthog.warehouse.models.ssh_tunnel import SSHTunnel +from posthog.warehouse.types import IncrementalFieldType @dataclasses.dataclass @@ -32,6 +37,20 @@ class ImportDataActivityInputs: run_id: str +def process_incremental_last_value(value: Any | None, field_type: IncrementalFieldType | None) -> Any | None: + if value is None or field_type is None: + return None + + if field_type == IncrementalFieldType.Integer or field_type == IncrementalFieldType.Numeric: + return value + + if field_type == IncrementalFieldType.DateTime or field_type == IncrementalFieldType.Timestamp: + return parser.parse(value) + + if field_type == IncrementalFieldType.Date: + return parser.parse(value).date() + + @activity.defn def import_data_activity_sync(inputs: ImportDataActivityInputs): logger = bind_temporal_worker_logger_sync(team_id=inputs.team_id) @@ -64,6 +83,24 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): endpoints = [schema.name] + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE_V2: + # Get the V2 last value, if it's not set yet (e.g. the first run), then fallback to the V1 value + processed_incremental_last_value = process_incremental_last_value( + schema.sync_type_config.get("incremental_field_last_value_v2"), + schema.sync_type_config.get("incremental_field_type"), + ) + + if processed_incremental_last_value is None: + processed_incremental_last_value = process_incremental_last_value( + schema.sync_type_config.get("incremental_field_last_value"), + schema.sync_type_config.get("incremental_field_type"), + ) + else: + processed_incremental_last_value = process_incremental_last_value( + schema.sync_type_config.get("incremental_field_last_value"), + schema.sync_type_config.get("incremental_field_type"), + ) + source = None if model.pipeline.source_type == ExternalDataSource.Type.STRIPE: from posthog.temporal.data_imports.pipelines.stripe import stripe_source @@ -80,6 +117,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): team_id=inputs.team_id, job_id=inputs.run_id, is_incremental=schema.is_incremental, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -178,6 +216,9 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): incremental_field_type=schema.sync_type_config.get("incremental_field_type") if schema.is_incremental else None, + db_incremental_field_last_value=processed_incremental_last_value + if schema.is_incremental + else None, team_id=inputs.team_id, using_ssl=using_ssl, ) @@ -205,6 +246,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): incremental_field_type=schema.sync_type_config.get("incremental_field_type") if schema.is_incremental else None, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, team_id=inputs.team_id, using_ssl=using_ssl, ) @@ -255,6 +297,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): incremental_field_type=schema.sync_type_config.get("incremental_field_type") if schema.is_incremental else None, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -299,6 +342,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): team_id=inputs.team_id, job_id=inputs.run_id, is_incremental=schema.is_incremental, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -321,6 +365,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): team_id=inputs.team_id, job_id=inputs.run_id, is_incremental=schema.is_incremental, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -342,6 +387,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): team_id=inputs.team_id, job_id=inputs.run_id, is_incremental=schema.is_incremental, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -398,6 +444,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): incremental_field_type=schema.sync_type_config.get("incremental_field_type") if schema.is_incremental else None, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) _run( @@ -433,6 +480,7 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs): team_id=inputs.team_id, job_id=inputs.run_id, is_incremental=schema.is_incremental, + db_incremental_field_last_value=processed_incremental_last_value if schema.is_incremental else None, ) return _run( @@ -455,12 +503,18 @@ def _run( schema: ExternalDataSchema, reset_pipeline: bool, ): - table_row_counts = DataImportPipelineSync(job_inputs, source, logger, reset_pipeline, schema.is_incremental).run() - total_rows_synced = sum(table_row_counts.values()) + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE_V2: + PipelineNonDLT(source, logger, job_inputs.run_id, schema.is_incremental).run() + else: + table_row_counts = DataImportPipelineSync( + job_inputs, source, logger, reset_pipeline, schema.is_incremental + ).run() + total_rows_synced = sum(table_row_counts.values()) + + ExternalDataJob.objects.filter(id=inputs.run_id, team_id=inputs.team_id).update( + rows_synced=F("rows_synced") + total_rows_synced + ) - ExternalDataJob.objects.filter(id=inputs.run_id, team_id=inputs.team_id).update( - rows_synced=F("rows_synced") + total_rows_synced - ) source = ExternalDataSource.objects.get(id=inputs.source_id) source.job_inputs.pop("reset_pipeline", None) source.save() diff --git a/posthog/temporal/tests/batch_exports/test_import_data.py b/posthog/temporal/tests/batch_exports/test_import_data.py index c201be4470a14..abf9bb56b094e 100644 --- a/posthog/temporal/tests/batch_exports/test_import_data.py +++ b/posthog/temporal/tests/batch_exports/test_import_data.py @@ -48,6 +48,7 @@ def _setup(team: Team, job_inputs: dict[Any, Any]) -> ImportDataActivityInputs: status=ExternalDataJob.Status.RUNNING, rows_synced=0, workflow_id="some_workflow_id", + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) return ImportDataActivityInputs(team_id=team.pk, schema_id=schema.pk, source_id=source.pk, run_id=str(job.pk)) @@ -86,6 +87,7 @@ def test_postgres_source_without_ssh_tunnel(activity_environment, team, **kwargs table_names=["table_1"], incremental_field=None, incremental_field_type=None, + db_incremental_field_last_value=None, team_id=team.id, using_ssl=True, ) @@ -127,6 +129,7 @@ def test_postgres_source_with_ssh_tunnel_disabled(activity_environment, team, ** table_names=["table_1"], incremental_field=None, incremental_field_type=None, + db_incremental_field_last_value=None, team_id=team.id, using_ssl=True, ) @@ -186,6 +189,7 @@ def __exit__(self, exc_type, exc_value, exc_traceback): table_names=["table_1"], incremental_field=None, incremental_field_type=None, + db_incremental_field_last_value=None, team_id=team.id, using_ssl=True, ) diff --git a/posthog/temporal/tests/batch_exports/test_monitoring.py b/posthog/temporal/tests/batch_exports/test_monitoring.py index cab50c25d3177..3f84960f4504d 100644 --- a/posthog/temporal/tests/batch_exports/test_monitoring.py +++ b/posthog/temporal/tests/batch_exports/test_monitoring.py @@ -1,5 +1,6 @@ import datetime as dt import uuid +from unittest.mock import patch import pytest import pytest_asyncio @@ -9,9 +10,12 @@ from posthog import constants from posthog.batch_exports.models import BatchExportRun +from posthog.batch_exports.service import afetch_batch_export_runs_in_range from posthog.temporal.batch_exports.monitoring import ( BatchExportMonitoringInputs, BatchExportMonitoringWorkflow, + _log_warning_for_missing_batch_export_runs, + check_for_missing_batch_export_runs, get_batch_export, get_event_counts, update_batch_export_runs, @@ -118,6 +122,7 @@ async def test_monitoring_workflow_when_no_event_data(batch_export): activities=[ get_batch_export, get_event_counts, + check_for_missing_batch_export_runs, update_batch_export_runs, ], workflow_runner=UnsandboxedWorkflowRunner(), @@ -148,7 +153,12 @@ async def test_monitoring_workflow_when_no_event_data(batch_export): ["every 5 minutes"], indirect=True, ) +@pytest.mark.parametrize( + "simulate_missing_batch_export_runs", + [True, False], +) async def test_monitoring_workflow( + simulate_missing_batch_export_runs, batch_export, generate_test_data, data_interval_start, @@ -158,44 +168,81 @@ async def test_monitoring_workflow( ): """Test the monitoring workflow with a batch export that has data. - We generate 2 hours of data between 13:00 and 15:00, and then run the - monitoring workflow at 15:30. The monitoring workflow should check the data - between 14:00 and 15:00, and update the batch export runs. - We generate some dummy batch export runs based on the event data we generated and assert that the expected records count matches the records completed. """ + + expected_missing_runs: list[tuple[dt.datetime, dt.datetime]] = [] + if simulate_missing_batch_export_runs: + # simulate a missing batch export run by deleting the batch export run for the first 5 minutes + runs: list[BatchExportRun] = await afetch_batch_export_runs_in_range( + batch_export_id=batch_export.id, + interval_start=data_interval_start, + interval_end=data_interval_start + dt.timedelta(minutes=5), + ) + assert len(runs) == 1 + for run in runs: + assert run.data_interval_start is not None + expected_missing_runs.append((run.data_interval_start, run.data_interval_end)) + await run.adelete() + workflow_id = str(uuid.uuid4()) inputs = BatchExportMonitoringInputs(batch_export_id=batch_export.id) - async with await WorkflowEnvironment.start_time_skipping() as activity_environment: - async with Worker( - activity_environment.client, - # TODO - not sure if this is the right task queue - task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, - workflows=[BatchExportMonitoringWorkflow], - activities=[ - get_batch_export, - get_event_counts, - update_batch_export_runs, - ], - workflow_runner=UnsandboxedWorkflowRunner(), - ): - await activity_environment.client.execute_workflow( - BatchExportMonitoringWorkflow.run, - inputs, - id=workflow_id, + with patch( + "posthog.temporal.batch_exports.monitoring._log_warning_for_missing_batch_export_runs" + ) as mock_log_warning: + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, - retry_policy=RetryPolicy(maximum_attempts=1), - execution_timeout=dt.timedelta(seconds=30), - ) - - batch_export_runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) - - for run in batch_export_runs: - if run.records_completed == 0: - # TODO: in the actual monitoring activity it would be better to - # update the actual count to 0 rather than None - assert run.records_total_count is None + workflows=[BatchExportMonitoringWorkflow], + activities=[ + get_batch_export, + get_event_counts, + check_for_missing_batch_export_runs, + update_batch_export_runs, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + await activity_environment.client.execute_workflow( + BatchExportMonitoringWorkflow.run, + inputs, + id=workflow_id, + task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=30), + ) + + if simulate_missing_batch_export_runs: + # check that the warning was logged + mock_log_warning.assert_called_once_with(batch_export.id, expected_missing_runs) else: - assert run.records_completed == run.records_total_count + # check that the warning was not logged + mock_log_warning.assert_not_called() + + # check that the batch export runs were updated correctly + batch_export_runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + + for run in batch_export_runs: + if run.records_completed == 0: + # TODO: in the actual monitoring activity it would be better to + # update the actual count to 0 rather than None + assert run.records_total_count is None + else: + assert run.records_completed == run.records_total_count + + +def test_log_warning_for_missing_batch_export_runs(): + missing_runs = [ + (dt.datetime(2024, 1, 1, 10, 0), dt.datetime(2024, 1, 1, 10, 5)), + (dt.datetime(2024, 1, 1, 10, 5), dt.datetime(2024, 1, 1, 10, 10)), + ] + with patch("posthog.temporal.batch_exports.monitoring.activity") as mock_activity: + batch_export_id = uuid.uuid4() + _log_warning_for_missing_batch_export_runs(batch_export_id, missing_runs) + mock_activity.logger.warning.assert_called_once_with( + f"Batch Exports Monitoring: Found 2 missing run(s) for batch export {batch_export_id}:\n" + "- Run 2024-01-01 10:00:00 to 2024-01-01 10:05:00\n" + "- Run 2024-01-01 10:05:00 to 2024-01-01 10:10:00\n" + ) diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 76a3c20599518..3dab18c67b283 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -251,10 +251,6 @@ async def assert_clickhouse_records_in_s3( for record in record_batch.to_pylist(): expected_record = {} for k, v in record.items(): - if k not in schema_column_names or k == "_inserted_at": - # _inserted_at is not exported, only used for tracking progress. - continue - if k in json_columns and v is not None: expected_record[k] = json.loads(v) elif isinstance(v, dt.datetime): diff --git a/posthog/temporal/tests/data_imports/test_end_to_end.py b/posthog/temporal/tests/data_imports/test_end_to_end.py index fce2047cd1c28..06c198ec5b2d5 100644 --- a/posthog/temporal/tests/data_imports/test_end_to_end.py +++ b/posthog/temporal/tests/data_imports/test_end_to_end.py @@ -19,7 +19,7 @@ from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker -from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE +from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE, DATA_WAREHOUSE_TASK_QUEUE_V2 from posthog.hogql.modifiers import create_default_modifiers_for_team from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.insights.funnels.funnel import Funnel @@ -99,6 +99,19 @@ async def minio_client(): yield minio_client +def pytest_generate_tests(metafunc): + if "task_queue" in metafunc.fixturenames: + metafunc.parametrize("task_queue", [DATA_WAREHOUSE_TASK_QUEUE, DATA_WAREHOUSE_TASK_QUEUE_V2], indirect=True) + + +@pytest.fixture(autouse=True) +def task_queue(request): + queue = getattr(request, "param", None) + + with override_settings(TEMPORAL_TASK_QUEUE=queue): + yield + + async def _run( team: Team, schema_name: str, @@ -142,18 +155,23 @@ async def _run( assert run.status == ExternalDataJob.Status.COMPLETED await sync_to_async(schema.refresh_from_db)() - assert schema.last_synced_at == run.created_at - res = await sync_to_async(execute_hogql_query)(f"SELECT * FROM {table_name}", team) - assert len(res.results) == 1 + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + assert schema.last_synced_at == run.created_at + else: + assert schema.last_synced_at is None - for name, field in external_tables.get(table_name, {}).items(): - if field.hidden: - continue - assert name in (res.columns or []) + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)(f"SELECT * FROM {table_name}", team) + assert len(res.results) == 1 - await sync_to_async(source.refresh_from_db)() - assert source.job_inputs.get("reset_pipeline", None) is None + for name, field in external_tables.get(table_name, {}).items(): + if field.hidden: + continue + assert name in (res.columns or []) + + await sync_to_async(source.refresh_from_db)() + assert source.job_inputs.get("reset_pipeline", None) is None return workflow_id, inputs @@ -203,11 +221,12 @@ def mock_to_object_store_rs_credentials(class_self): ), mock.patch.object(AwsCredentials, "to_session_credentials", mock_to_session_credentials), mock.patch.object(AwsCredentials, "to_object_store_rs_credentials", mock_to_object_store_rs_credentials), + mock.patch("posthog.temporal.data_imports.external_data_job.trigger_pipeline_v2"), ): async with await WorkflowEnvironment.start_time_skipping() as activity_environment: async with Worker( activity_environment.client, - task_queue=DATA_WAREHOUSE_TASK_QUEUE, + task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[ExternalDataJobWorkflow], activities=ACTIVITIES, # type: ignore workflow_runner=UnsandboxedWorkflowRunner(), @@ -218,7 +237,7 @@ def mock_to_object_store_rs_credentials(class_self): ExternalDataJobWorkflow.run, inputs, id=workflow_id, - task_queue=DATA_WAREHOUSE_TASK_QUEUE, + task_queue=settings.TEMPORAL_TASK_QUEUE, retry_policy=RetryPolicy(maximum_attempts=1), ) @@ -525,12 +544,13 @@ async def test_postgres_binary_columns(team, postgres_config, postgres_connectio mock_data_response=[], ) - res = await sync_to_async(execute_hogql_query)(f"SELECT * FROM postgres_binary_col_test", team) - columns = res.columns + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)(f"SELECT * FROM postgres_binary_col_test", team) + columns = res.columns - assert columns is not None - assert len(columns) == 1 - assert columns[0] == "id" + assert columns is not None + assert len(columns) == 1 + assert columns[0] == "id" @pytest.mark.django_db(transaction=True) @@ -558,9 +578,14 @@ def get_jobs(): latest_job = jobs[0] folder_path = await sync_to_async(latest_job.folder_path)() - s3_objects = await minio_client.list_objects_v2( - Bucket=BUCKET_NAME, Prefix=f"{folder_path}/balance_transaction__query/" - ) + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + s3_objects = await minio_client.list_objects_v2( + Bucket=BUCKET_NAME, Prefix=f"{folder_path}/balance_transaction__query/" + ) + else: + s3_objects = await minio_client.list_objects_v2( + Bucket=BUCKET_NAME, Prefix=f"{folder_path}/balance_transaction__query_v2/" + ) assert len(s3_objects["Contents"]) != 0 @@ -587,23 +612,24 @@ async def test_funnels_lazy_joins_ordering(team, stripe_customer): field_name="stripe_customer", ) - query = FunnelsQuery( - series=[EventsNode(), EventsNode()], - breakdownFilter=BreakdownFilter( - breakdown_type=BreakdownType.DATA_WAREHOUSE_PERSON_PROPERTY, breakdown="stripe_customer.email" - ), - ) - funnel_class = Funnel(context=FunnelQueryContext(query=query, team=team)) - - query_ast = funnel_class.get_query() - await sync_to_async(execute_hogql_query)( - query_type="FunnelsQuery", - query=query_ast, - team=team, - modifiers=create_default_modifiers_for_team( - team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED) - ), - ) + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + query = FunnelsQuery( + series=[EventsNode(), EventsNode()], + breakdownFilter=BreakdownFilter( + breakdown_type=BreakdownType.DATA_WAREHOUSE_PERSON_PROPERTY, breakdown="stripe_customer.email" + ), + ) + funnel_class = Funnel(context=FunnelQueryContext(query=query, team=team)) + + query_ast = funnel_class.get_query() + await sync_to_async(execute_hogql_query)( + query_type="FunnelsQuery", + query=query_ast, + team=team, + modifiers=create_default_modifiers_for_team( + team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED) + ), + ) @pytest.mark.django_db(transaction=True) @@ -636,12 +662,13 @@ async def test_postgres_schema_evolution(team, postgres_config, postgres_connect sync_type_config={"incremental_field": "id", "incremental_field_type": "integer"}, ) - res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) - columns = res.columns + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns - assert columns is not None - assert len(columns) == 1 - assert any(x == "id" for x in columns) + assert columns is not None + assert len(columns) == 1 + assert any(x == "id" for x in columns) # Evole schema await postgres_connection.execute( @@ -655,18 +682,20 @@ async def test_postgres_schema_evolution(team, postgres_config, postgres_connect # Execute the same schema again - load await _execute_run(str(uuid.uuid4()), inputs, []) - res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) - columns = res.columns + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns - assert columns is not None - assert len(columns) == 2 - assert any(x == "id" for x in columns) - assert any(x == "new_col" for x in columns) + assert columns is not None + assert len(columns) == 2 + assert any(x == "id" for x in columns) + assert any(x == "new_col" for x in columns) @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_sql_database_missing_incremental_values(team, postgres_config, postgres_connection): + await postgres_connection.execute("CREATE SCHEMA IF NOT EXISTS {schema}".format(schema=postgres_config["schema"])) await postgres_connection.execute( "CREATE TABLE IF NOT EXISTS {schema}.test_table (id integer)".format(schema=postgres_config["schema"]) ) @@ -697,15 +726,16 @@ async def test_sql_database_missing_incremental_values(team, postgres_config, po sync_type_config={"incremental_field": "id", "incremental_field_type": "integer"}, ) - res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) - columns = res.columns + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns - assert columns is not None - assert len(columns) == 1 - assert any(x == "id" for x in columns) + assert columns is not None + assert len(columns) == 1 + assert any(x == "id" for x in columns) - # Exclude rows that don't have the incremental cursor key set - assert len(res.results) == 1 + # Exclude rows that don't have the incremental cursor key set + assert len(res.results) == 1 @pytest.mark.django_db(transaction=True) @@ -739,15 +769,16 @@ async def test_sql_database_incremental_initial_value(team, postgres_config, pos sync_type_config={"incremental_field": "id", "incremental_field_type": "integer"}, ) - res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) - columns = res.columns + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + res = await sync_to_async(execute_hogql_query)("SELECT * FROM postgres_test_table", team) + columns = res.columns - assert columns is not None - assert len(columns) == 1 - assert any(x == "id" for x in columns) + assert columns is not None + assert len(columns) == 1 + assert any(x == "id" for x in columns) - # Include rows that have the same incremental value as the `initial_value` - assert len(res.results) == 1 + # Include rows that have the same incremental value as the `initial_value` + assert len(res.results) == 1 @pytest.mark.django_db(transaction=True) @@ -1007,7 +1038,8 @@ async def test_delta_table_deleted(team, stripe_balance_transaction): sync_type=ExternalDataSchema.SyncType.FULL_REFRESH, ) - with mock.patch.object(DeltaTable, "delete") as mock_delta_table_delete: - await _execute_run(str(uuid.uuid4()), inputs, stripe_balance_transaction["data"]) + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE: + with mock.patch.object(DeltaTable, "delete") as mock_delta_table_delete: + await _execute_run(str(uuid.uuid4()), inputs, stripe_balance_transaction["data"]) - mock_delta_table_delete.assert_called_once() + mock_delta_table_delete.assert_called_once() diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index f931c97f93943..103513662daeb 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -149,6 +149,7 @@ def _create_external_data_job( rows_synced=0, workflow_id=workflow_id, workflow_run_id=workflow_run_id, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) return job @@ -391,6 +392,7 @@ def setup_job_1(): status=ExternalDataJob.Status.RUNNING, rows_synced=0, schema=customer_schema, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) new_job = ExternalDataJob.objects.get(id=new_job.id) @@ -423,6 +425,7 @@ def setup_job_2(): status=ExternalDataJob.Status.RUNNING, rows_synced=0, schema=charge_schema, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) new_job = ExternalDataJob.objects.get(id=new_job.id) @@ -565,6 +568,7 @@ def setup_job_1(): status=ExternalDataJob.Status.RUNNING, rows_synced=0, schema=customer_schema, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) new_job = ( @@ -764,6 +768,7 @@ async def setup_job_1(): status=ExternalDataJob.Status.RUNNING, rows_synced=0, schema=posthog_test_schema, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) new_job = await sync_to_async( diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index 26e3b9acfa02a..32352422e1f1a 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -558,7 +558,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -992,7 +993,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1351,7 +1353,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -1839,7 +1842,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) @@ -2593,7 +2597,8 @@ "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."enabled" + WHERE (NOT "posthog_hogfunction"."deleted" + AND "posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index 2d987bc2795e1..a66d26b8332bf 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -164,7 +164,7 @@ def setUp(self): def test_project_switched_when_accessing_dashboard_of_another_accessible_team(self): dashboard = Dashboard.objects.create(team=self.second_team) - with self.assertNumQueries(self.base_app_num_queries + 7): # AutoProjectMiddleware adds 4 queries + with self.assertNumQueries(self.base_app_num_queries + 6): # AutoProjectMiddleware adds 4 queries response_app = self.client.get(f"/dashboard/{dashboard.id}") response_users_api = self.client.get(f"/api/users/@me/") response_users_api_data = response_users_api.json() @@ -282,7 +282,7 @@ def test_project_switched_when_accessing_cohort_of_another_accessible_team(self) def test_project_switched_when_accessing_feature_flag_of_another_accessible_team(self): feature_flag = FeatureFlag.objects.create(team=self.second_team, created_by=self.user) - with self.assertNumQueries(self.base_app_num_queries + 7): + with self.assertNumQueries(self.base_app_num_queries + 6): response_app = self.client.get(f"/feature_flags/{feature_flag.id}") response_users_api = self.client.get(f"/api/users/@me/") response_users_api_data = response_users_api.json() diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 448c06533bf19..28b1ebda1bf2e 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -1229,7 +1229,11 @@ def jobs(self, request: Request, *arg: Any, **kwargs: Any): after = request.query_params.get("after", None) before = request.query_params.get("before", None) - jobs = instance.jobs.prefetch_related("schema").order_by("-created_at") + jobs = ( + instance.jobs.exclude(pipeline_version=ExternalDataJob.PipelineVersion.V2) + .prefetch_related("schema") + .order_by("-created_at") + ) if after: after_date = parser.parse(after) diff --git a/posthog/warehouse/api/test/test_external_data_source.py b/posthog/warehouse/api/test/test_external_data_source.py index f638700822af8..3fede72455ebd 100644 --- a/posthog/warehouse/api/test/test_external_data_source.py +++ b/posthog/warehouse/api/test/test_external_data_source.py @@ -704,6 +704,7 @@ def test_source_jobs(self): status=ExternalDataJob.Status.COMPLETED, rows_synced=100, workflow_run_id="test_run_id", + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) response = self.client.get( @@ -720,6 +721,28 @@ def test_source_jobs(self): assert data[0]["schema"]["id"] == str(schema.pk) assert data[0]["workflow_run_id"] is not None + def test_source_jobs_v2_job(self): + source = self._create_external_data_source() + schema = self._create_external_data_schema(source.pk) + ExternalDataJob.objects.create( + team=self.team, + pipeline=source, + schema=schema, + status=ExternalDataJob.Status.COMPLETED, + rows_synced=100, + workflow_run_id="test_run_id", + pipeline_version=ExternalDataJob.PipelineVersion.V2, + ) + + response = self.client.get( + f"/api/projects/{self.team.pk}/external_data_sources/{source.pk}/jobs", + ) + + data = response.json() + + assert response.status_code, status.HTTP_200_OK + assert len(data) == 0 + def test_source_jobs_pagination(self): source = self._create_external_data_source() schema = self._create_external_data_schema(source.pk) @@ -731,6 +754,7 @@ def test_source_jobs_pagination(self): status=ExternalDataJob.Status.COMPLETED, rows_synced=100, workflow_run_id="test_run_id", + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) response = self.client.get( @@ -752,6 +776,7 @@ def test_source_jobs_pagination(self): status=ExternalDataJob.Status.COMPLETED, rows_synced=100, workflow_run_id="test_run_id", + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) response = self.client.get( @@ -773,6 +798,7 @@ def test_source_jobs_pagination(self): status=ExternalDataJob.Status.COMPLETED, rows_synced=100, workflow_run_id="test_run_id", + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) response = self.client.get( diff --git a/posthog/warehouse/api/test/test_log_entry.py b/posthog/warehouse/api/test/test_log_entry.py index c7ed98c572f72..14564015c230d 100644 --- a/posthog/warehouse/api/test/test_log_entry.py +++ b/posthog/warehouse/api/test/test_log_entry.py @@ -91,7 +91,13 @@ def external_data_resources(client, organization, team): # No status but should be completed because a data warehouse table already exists ) job = ExternalDataJob.objects.create( - pipeline=source, schema=schema, workflow_id="fake_workflow_id", team=team, status="Running", rows_synced=100000 + pipeline=source, + schema=schema, + workflow_id="fake_workflow_id", + team=team, + status="Running", + rows_synced=100000, + pipeline_version=ExternalDataJob.PipelineVersion.V1, ) return { diff --git a/posthog/warehouse/models/external_data_job.py b/posthog/warehouse/models/external_data_job.py index ae7b642494966..d9949e00d4423 100644 --- a/posthog/warehouse/models/external_data_job.py +++ b/posthog/warehouse/models/external_data_job.py @@ -15,6 +15,10 @@ class Status(models.TextChoices): COMPLETED = "Completed", "Completed" CANCELLED = "Cancelled", "Cancelled" + class PipelineVersion(models.TextChoices): + V1 = "v1-dlt-sync", "v1-dlt-sync" + V2 = "v2-non-dlt", "v2-non-dlt" + team = models.ForeignKey(Team, on_delete=models.CASCADE) pipeline = models.ForeignKey("posthog.ExternalDataSource", related_name="jobs", on_delete=models.CASCADE) schema = models.ForeignKey("posthog.ExternalDataSchema", on_delete=models.CASCADE, null=True, blank=True) @@ -25,6 +29,8 @@ class Status(models.TextChoices): workflow_id = models.CharField(max_length=400, null=True, blank=True) workflow_run_id = models.CharField(max_length=400, null=True, blank=True) + pipeline_version = models.CharField(max_length=400, choices=PipelineVersion.choices, null=True, blank=True) + __repr__ = sane_repr("id") def folder_path(self) -> str: @@ -35,9 +41,17 @@ def folder_path(self) -> str: def url_pattern_by_schema(self, schema: str) -> str: if TEST: - return f"http://{settings.AIRBYTE_BUCKET_DOMAIN}/{settings.BUCKET}/{self.folder_path()}/{schema.lower()}/" + if self.pipeline_version == ExternalDataJob.PipelineVersion.V1: + return ( + f"http://{settings.AIRBYTE_BUCKET_DOMAIN}/{settings.BUCKET}/{self.folder_path()}/{schema.lower()}/" + ) + else: + return f"http://{settings.AIRBYTE_BUCKET_DOMAIN}/{settings.BUCKET}/{self.folder_path()}/{schema.lower()}__v2/" + + if self.pipeline_version == ExternalDataJob.PipelineVersion.V1: + return f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/{self.folder_path()}/{schema.lower()}/" - return f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/{self.folder_path()}/{schema.lower()}/" + return f"https://{settings.AIRBYTE_BUCKET_DOMAIN}/dlt/{self.folder_path()}/{schema.lower()}__v2/" @database_sync_to_async diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 3cb3fcfbce33c..ba07884346912 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -8,6 +8,7 @@ import numpy import snowflake.connector from django.conf import settings +from posthog.constants import DATA_WAREHOUSE_TASK_QUEUE_V2 from posthog.models.team import Team from posthog.models.utils import CreatedMetaFields, DeletedMetaFields, UUIDModel, UpdatedMetaFields, sane_repr import uuid @@ -51,6 +52,8 @@ class SyncFrequency(models.TextChoices): status = models.CharField(max_length=400, null=True, blank=True) last_synced_at = models.DateTimeField(null=True, blank=True) sync_type = models.CharField(max_length=128, choices=SyncType.choices, null=True, blank=True) + + # { "incremental_field": string, "incremental_field_type": string, "incremental_field_last_value": any, "incremental_field_last_value_v2": any } sync_type_config = models.JSONField( default=dict, blank=True, @@ -70,11 +73,6 @@ def folder_path(self) -> str: def is_incremental(self): return self.sync_type == self.SyncType.INCREMENTAL - def soft_delete(self): - self.deleted = True - self.deleted_at = datetime.now() - self.save() - def update_incremental_field_last_value(self, last_value: Any) -> None: incremental_field_type = self.sync_type_config.get("incremental_field_type") @@ -93,7 +91,17 @@ def update_incremental_field_last_value(self, last_value: Any) -> None: else: last_value_json = str(last_value_py) - self.sync_type_config["incremental_field_last_value"] = last_value_json + if settings.TEMPORAL_TASK_QUEUE == DATA_WAREHOUSE_TASK_QUEUE_V2: + key = "incremental_field_last_value_v2" + else: + key = "incremental_field_last_value" + + self.sync_type_config[key] = last_value_json + self.save() + + def soft_delete(self): + self.deleted = True + self.deleted_at = datetime.now() self.save() diff --git a/posthog/warehouse/models/external_table_definitions.py b/posthog/warehouse/models/external_table_definitions.py index 00704ec6c3994..4294cc6003836 100644 --- a/posthog/warehouse/models/external_table_definitions.py +++ b/posthog/warehouse/models/external_table_definitions.py @@ -16,6 +16,7 @@ "*": { "__dlt_id": StringDatabaseField(name="_dlt_id", hidden=True), "__dlt_load_id": StringDatabaseField(name="_dlt_load_id", hidden=True), + "__ph_debug": StringJSONDatabaseField(name="_ph_debug", hidden=True), }, "stripe_account": { "id": StringDatabaseField(name="id"), diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index f5bdb94b246eb..0f960d2648c8d 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Optional, TypeAlias +from typing import TYPE_CHECKING, Optional, TypeAlias from django.db import models from posthog.client import sync_execute @@ -29,6 +29,9 @@ from .external_table_definitions import external_tables from posthog.hogql.context import HogQLContext +if TYPE_CHECKING: + from posthog.warehouse.models import ExternalDataJob + SERIALIZED_FIELD_TO_CLICKHOUSE_MAPPING: dict[DatabaseSerializedFieldType, str] = { DatabaseSerializedFieldType.INTEGER: "Int64", DatabaseSerializedFieldType.FLOAT: "Float64", @@ -138,7 +141,11 @@ def validate_column_type(self, column_key) -> bool: except: return False - def get_columns(self, safe_expose_ch_error=True) -> DataWarehouseTableColumns: + def get_columns( + self, + pipeline_version: Optional["ExternalDataJob.PipelineVersion"] = None, + safe_expose_ch_error: bool = True, + ) -> DataWarehouseTableColumns: try: placeholder_context = HogQLContext(team_id=self.team.pk) s3_table_func = build_function_call( @@ -147,6 +154,7 @@ def get_columns(self, safe_expose_ch_error=True) -> DataWarehouseTableColumns: access_key=self.credential.access_key, access_secret=self.credential.access_secret, context=placeholder_context, + pipeline_version=pipeline_version, ) result = sync_execute( diff --git a/requirements.in b/requirements.in index 16ee79fb66e03..faefd16d9294d 100644 --- a/requirements.in +++ b/requirements.in @@ -14,6 +14,7 @@ celery==5.3.4 celery-redbeat==2.1.1 clickhouse-driver==0.2.7 clickhouse-pool==0.5.3 +conditional-cache==1.2 cryptography==39.0.2 dj-database-url==0.5.0 Django~=4.2.15 diff --git a/requirements.txt b/requirements.txt index cd9ac20220391..639c98066ccd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -95,6 +95,8 @@ charset-normalizer==2.1.0 # via # requests # snowflake-connector-python +circular-dict==1.9 + # via conditional-cache click==8.1.7 # via # celery @@ -115,6 +117,8 @@ clickhouse-driver==0.2.7 # sentry-sdk clickhouse-pool==0.5.3 # via -r requirements.in +conditional-cache==1.2 + # via -r requirements.in cryptography==39.0.2 # via # -r requirements.in