-
Notifications
You must be signed in to change notification settings - Fork 3
201 lines (196 loc) · 7.34 KB
/
benchmark.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
name: "Running benchmark"
on:
workflow_dispatch:
inputs:
warmups:
description: 'number of warmups run before the actual benchmark'
type: number
default: 5
required: false
iterations:
description: 'number of iterations in the benchmark'
type: number
default: 10
required: false
iteration-time:
description: 'duration of individual integration in benchmark'
type: number
default: 1
required: false
iteration-time-unit:
description: 'timeunit for iteration-time parameter'
default: 's'
type: string
required: false
schedule:
- cron: "0 2 * * 1"
push:
branches:
- main
paths:
- 'build.gradle.kts'
- 'gradle.properties'
- 'json-schema-validator/**'
- 'gradle/**'
- 'generator/**'
- '.github/workflows/benchmark.yml'
pull_request:
env:
REPORT_FORMAT: ${{ (github.event_name == 'push' || github.event_name == 'pull_request' ) && 'json' || 'csv' }}
concurrency:
cancel-in-progress: true
group: bench-${{ github.event_name }}-${{ github.event.pull_request.number || github.event.after }}
jobs:
benchmark-matrix:
strategy:
matrix:
include:
- os: ubuntu-latest
additional-task: ':benchmark:jvmComparisonBenchmark'
- os: macos-latest
additional-task: '-x :benchmark:jvmBenchmark'
- os: macos-13 # for macosX64
additional-task: '-x :benchmark:jvmBenchmark'
- os: windows-latest
additional-task: '-x :benchmark:jvmBenchmark'
runs-on: ${{ matrix.os }}
name: Run benchmarks on ${{ matrix.os }}
env:
BENCHMARK_RESULTS: 'benchmark/build/reports/benchmarks'
steps:
- name: 'Install native dependencies'
run: sudo apt-get install -y libunistring-dev
if: matrix.os == 'ubuntu-latest'
- name: 'Checkout Repository'
uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: temurin
java-version-file: .java-version
- uses: actions/setup-python@v5
with:
python-version-file: .python-version
- name: Validate Gradle Wrapper
uses: gradle/actions/wrapper-validation@v4
- name: Cache konan
uses: actions/cache@v4
with:
path: ~/.konan
key: ${{ runner.os }}-gradle-${{ hashFiles('*.gradle.kts') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Setup Gradle
uses: gradle/actions/setup-gradle@v4
with:
gradle-version: wrapper
- name: Run benchmarks
run: >
./gradlew --no-daemon :benchmark:benchmark ${{ matrix.additional-task }}
-Pbenchmark_warmups=${{ inputs.warmups }}
-Pbenchmark_iterations=${{ inputs.iterations }}
-Pbenchmark_iteration_time=${{ inputs.iteration-time }}
-Pbenchmark_iteration_time_unit=${{ inputs.iteration-time-unit }}
-Pbenchmark_report_format=${{ env.REPORT_FORMAT }}
- name: Install CSV to MD converter
if: env.REPORT_FORMAT == 'csv'
run: pip install csv2md
- name: Add benchmark results to summary
shell: bash
if: env.REPORT_FORMAT == 'csv'
run: |
for report in $(find ./${{ env.BENCHMARK_RESULTS }} -type f -name "*.csv")
do
file_name=$(basename "$report")
platform="${file_name%.*}"
echo "File $file_name"
# remove empty lines
sed -i -e '/^[[:space:]]*$/d' $report
echo "::group::Report CSV"
cat "$report"
echo "::endgroup::"
markdown_table=$(csv2md "$report")
echo "::group::Report Markdown"
echo "$markdown_table"
echo "::endgroup::"
echo "# Platform ${platform}" >> $GITHUB_STEP_SUMMARY
echo "$markdown_table" >> $GITHUB_STEP_SUMMARY
done
- name: Store results as artifact
if: env.REPORT_FORMAT == 'json'
uses: actions/upload-artifact@v4
with:
name: bench-result-${{ matrix.os }}
path: ${{ env.BENCHMARK_RESULTS }}/main/**/*.json
- name: Store comparison results as artifact
if: env.REPORT_FORMAT == 'json' && matrix.os == 'ubuntu-latest'
uses: actions/upload-artifact@v4
with:
name: bench-comparison-result-${{ matrix.os }}
path: ${{ env.BENCHMARK_RESULTS }}/comparison/**/*.json
upload-benchmark-results:
if: (github.event_name == 'push' || github.event_name == 'pull_request') && github.repository == 'OptimumCode/json-schema-validator'
needs:
- benchmark-matrix
runs-on: ubuntu-latest
env:
RESULTS_DIR: bench-results
permissions:
# deployments permission to deploy GitHub pages website
deployments: write
# contents permission to update benchmark contents in gh-pages branch
contents: write
# pull-requests permission to create comments on PR in case of alert
pull-requests: write
strategy:
# to make sure results are submitted one by one
max-parallel: 1
matrix:
include:
- artifact-pattern: 'bench-result-*'
results-name: KMP JSON schema validator
alert: true
- artifact-pattern: 'bench-comparison-result-*'
results-name: Compare KMP JSON schema validator
alert: false
name: 'Process benchmark results for ${{ matrix.results-name }}'
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v4
- name: Download benchmark results
uses: actions/download-artifact@v4
with:
pattern: ${{ matrix.artifact-pattern }}
path: ${{ env.RESULTS_DIR }}
merge-multiple: true
- name: Show downloaded artifacts
run: tree ${{ env.RESULTS_DIR }}
- name: Prepare and join benchmark reports
id: prep
run: |
for report in $(find ./${{ env.RESULTS_DIR }} -type f -name "*.json")
do
file_name=$(basename "$report")
platform="${file_name%.*}"
jq "[ .[] | .benchmark |= \"${platform}.\" + ltrimstr(\"io.github.optimumcode.json.schema.benchmark.\") | .params |= map_values(. |= split(\"/\")[-1]) ]" $report > ${{ env.RESULTS_DIR }}/$platform.json
done
AGGREGATED_REPORT=aggregated.json
# Joined reports looks like this: [[{},{}], [{},{}]]
# We need to transform them into this: [{},{}]
ls ${{ env.RESULTS_DIR }}/*.json
jq -s '[ .[] | .[] ]' ${{ env.RESULTS_DIR }}/*.json > $AGGREGATED_REPORT
echo "report=$AGGREGATED_REPORT" >> $GITHUB_OUTPUT
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: ${{ matrix.results-name }}
tool: 'jmh'
output-file-path: ${{ steps.prep.outputs.report }}
alert-comment-cc-users: "@OptimumCode"
comment-on-alert: ${{ matrix.alert }}
summary-always: true
alert-threshold: '150%'
fail-threshold: '200%'
max-items-in-chart: 50
github-token: ${{ secrets.GITHUB_TOKEN }}
# Push and deploy GitHub pages branch automatically only if run in main repo and not in PR
auto-push: ${{ github.event_name != 'pull_request' }}