Skip to content

FSSTCompressor

FSSTCompressor #682

Workflow file for this run

name: PR Benchmarks
on:
pull_request:
types: [ labeled, synchronize ]
branches: [ "develop" ]
workflow_dispatch: { }
permissions:
actions: write
contents: read
pull-requests: write
jobs:
bench:
runs-on: ubuntu-latest-large
if: ${{ contains(github.event.head_commit.message, '[benchmark]') || github.event.label.name == 'benchmark' && github.event_name == 'pull_request' }}
steps:
# We remove the benchmark label first so that the workflow can be re-triggered.
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: benchmark
- uses: actions/checkout@v4
- uses: ./.github/actions/cleanup
- uses: ./.github/actions/setup-rust
- uses: ./.github/actions/setup-python
# The compression benchmarks rely on DuckDB being installed to convert CSV to Parquet
- name: Install DuckDB
uses: opt-nc/[email protected]
with:
version: v1.0.0
- name: Run benchmark
run: cargo bench --bench tpch_benchmark -- --output-format bencher | tee output.txt
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: Vortex benchmarks
tool: 'cargo'
gh-pages-branch: gh-pages-bench
github-token: ${{ secrets.GITHUB_TOKEN }}
output-file-path: output.txt
summary-always: true
comment-always: true
auto-push: false
save-data-file: false
fail-on-alert: false
env:
# AWS Credentials for R2 storage tests
AWS_BUCKET: vortex-test
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}