Skip to content

Commit

Permalink
use grafana dashboard for benchmarks
Browse files Browse the repository at this point in the history
Signed-off-by: Sahil Yeole <[email protected]>
  • Loading branch information
beelchester committed Jul 13, 2024
1 parent d5e8c9b commit 1325b58
Show file tree
Hide file tree
Showing 7 changed files with 49 additions and 81 deletions.
22 changes: 14 additions & 8 deletions .github/workflows/bench.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@ jobs:
if: github.event.head_commit.message != 'Update performance results in README.md'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
K6_CLOUD_API_TOKEN: ${{ secrets.K6_CLOUD_API_TOKEN }}
GRAFANA_API_KEY: ${{ secrets.GRAFANA_API_KEY }}
INFLUXDB_TOKEN: ${{ secrets.INFLUXDB_TOKEN }}
INFLUXDB_ORG: ${{ secrets.INFLUXDB_ORG }}
INFLUXDB_URL: ${{ secrets.INFLUXDB_URL }}
steps:
- name: Checkout (GitHub)
uses: actions/checkout@v4
Expand All @@ -35,15 +38,18 @@ jobs:
- name: Setup k6
uses: grafana/setup-k6-action@v1

- name: Setup k6 cloud environment
- name: Setup influxdb-cli
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: |
if [ -z "$K6_CLOUD_API_TOKEN" ]; then
echo "K6_CLOUD_API_TOKEN is not set, results will not be uploaded to k6 cloud"
else
k6 login cloud --token $K6_CLOUD_API_TOKEN
echo "IS_K6_CLOUD_ENABLED=true" >> $GITHUB_ENV
fi
echo "UPLOAD_TO_CLOUD=true" >> $GITHUB_ENV
mkdir -p influxdb
cd influxdb
wget https://download.influxdata.com/influxdb/releases/influxdb2-client-2.7.5-linux-amd64.tar.gz
tar xvzf influxdb2-client-2.7.5-linux-amd64.tar.gz
cp influx /usr/local/bin/
influx config create --config-name benchmark --host-url $INFLUXDB_URL --org $INFLUXDB_ORG --token $INFLUXDB_TOKEN --active
cd ..
rm -rf influxdb
- name: Run benchmarks
run: |
Expand Down
43 changes: 5 additions & 38 deletions analyze.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,6 @@ for idx in "${!servers[@]}"; do
avgLatencies[${servers[$idx]}]=$(average "${latencyVals[@]}")
done

# Generating data files for gnuplot
reqSecData="/tmp/reqSec.dat"
latencyData="/tmp/latency.dat"

echo "Server Value" >"$reqSecData"
for server in "${servers[@]}"; do
echo "$server ${avgReqSecs[$server]}" >>"$reqSecData"
Expand All @@ -62,36 +58,6 @@ elif [[ $1 == bench3* ]]; then
whichBench=3
fi

reqSecHistogramFile="req_sec_histogram${whichBench}.png"
latencyHistogramFile="latency_histogram${whichBench}.png"

# Plotting using gnuplot
gnuplot <<-EOF
set term pngcairo size 1280,720 enhanced font "Courier,12"
set output "$reqSecHistogramFile"
set style data histograms
set style histogram cluster gap 1
set style fill solid border -1
set xtics rotate by -45
set boxwidth 0.9
set title "Requests/Sec"
stats "$reqSecData" using 2 nooutput
set yrange [0:STATS_max*1.2]
set key outside right top
plot "$reqSecData" using 2:xtic(1) title "Req/Sec"
set output "$latencyHistogramFile"
set title "Latency (in ms)"
stats "$latencyData" using 2 nooutput
set yrange [0:STATS_max*1.2]
plot "$latencyData" using 2:xtic(1) title "Latency"
EOF

# Move PNGs to assets
mkdir -p assets
mv $reqSecHistogramFile assets/
mv $latencyHistogramFile assets/

# Declare an associative array for server RPS
declare -A serverRPS

Expand Down Expand Up @@ -120,6 +86,11 @@ fi
for server in "${sortedServers[@]}"; do
formattedReqSecs=$(printf "%.2f" ${avgReqSecs[$server]} | perl -pe 's/(?<=\d)(?=(\d{3})+(\.\d*)?$)/,/g')
formattedLatencies=$(printf "%.2f" ${avgLatencies[$server]} | perl -pe 's/(?<=\d)(?=(\d{3})+(\.\d*)?$)/,/g')
echo "Writing to influx for $server and benchmark $whichBench with ${avgReqSecs[$server]} and ${avgLatencies[$server]}"
influx write -b bench "
http_reqs,test_name=$server,benchmark=$whichBench value=${avgReqSecs[$server]}
latency,test_name=$server,benchmark=$whichBench value=${avgLatencies[$server]}
"
# Calculate the relative performance
relativePerformance=$(echo "${avgReqSecs[$server]} $lastServerReqSecs" | awk '{printf "%.2f", $1 / $2}')

Expand Down Expand Up @@ -154,10 +125,6 @@ if [[ $whichBench == 3 ]]; then
fi
fi

# Move the generated images to the assets folder
mv $reqSecHistogramFile assets/
mv $latencyHistogramFile assets/

# Delete the result TXT files
for file in "${resultFiles[@]}"; do
rm "$file"
Expand Down
16 changes: 7 additions & 9 deletions k6/bench.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,17 @@ import http from 'k6/http';
import { check } from 'k6';

const whichBenchmark = Number(__ENV.BENCHMARK);
const benchmarkName = whichBenchmark === 2 ? 'posts' : 'posts+users';
const benchmarkName = whichBenchmark === 1 ? 'posts_users' : 2 ? 'posts' : 'greet';
const duration = whichBenchmark === 1 ? '30' : '10';

export const options = {
scenarios: {
posts: {
[__ENV.TEST_NAME + '-' + benchmarkName]: {
executor: 'constant-vus',
duration: whichBenchmark === 2 ? '10s' : '30s',
duration: duration+'s',
gracefulStop: '0s',
vus: 100,
}
},
cloud: {
name: __ENV.TEST_NAME + '-' + benchmarkName,
},
},
};

Expand All @@ -31,7 +29,7 @@ export default function() {
const payload = JSON.stringify({
operationName: null,
variables: {},
query: whichBenchmark === 2 ? '{posts{title}}' : '{posts{id,userId,title,user{id,name,email}}}',
query: whichBenchmark === 1 ? '{posts{id,userId,title,user{id,name,email}}}' : 2 ? '{posts{title}}' : '{greet}',
});

const res = http.post(url, payload, params);
Expand All @@ -41,7 +39,7 @@ export default function() {
}

export function handleSummary(data) {
const requestCount = data.metrics.http_reqs.values.count;
const requestCount = (data.metrics.http_reqs.values.count/duration).toFixed(0);
const avgLatency = Math.round(data.metrics.http_req_duration.values.avg * 100) / 100;
const requestCountMessage = `Requests/sec: ${requestCount}\n`;
const latencyMessage = `Latency: ${avgLatency} ms\n`;
Expand Down
11 changes: 3 additions & 8 deletions k6/bench.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
graphql_endpoint=$1
type=$2
test_name=$3
benchmark=$4
test_name=$2
benchmark=$3

if [ "$IS_K6_CLOUD_ENABLED" == "true" ] && [ "$type" == "upload" ]; then
k6 run k6/bench.js --quiet --out cloud --env TEST_NAME=$test_name --env BENCHMARK=$benchmark --env GRAPHQL_ENDPOINT=$graphql_endpoint
else
k6 run k6/bench.js --quiet --env TEST_NAME=$test_name --env BENCHMARK=$benchmark --env GRAPHQL_ENDPOINT=$graphql_endpoint
fi
k6 run k6/bench.js --env TEST_NAME=$test_name --env BENCHMARK=$benchmark --env GRAPHQL_ENDPOINT=$graphql_endpoint --quiet
24 changes: 20 additions & 4 deletions run_benchmarks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ sh nginx/run.sh
function runBenchmark() {
killServerOnPort 8000
sleep 5
local serviceScript="$1"
local service="$1"
local serviceScript="graphql/${service}/run.sh"
local benchmarks=(1 2 3)

if [[ "$service" == "hasura" ]]; then
Expand All @@ -45,7 +46,7 @@ function runBenchmark() {

local resultFiles=("result1_${sanitizedServiceScriptName}.txt" "result2_${sanitizedServiceScriptName}.txt" "result3_${sanitizedServiceScriptName}.txt")

bash "test_query${bench}.sh" "$graphqlEndpoint"
bash "test_query${bench}.sh" "$graphqlEndpoint"

# Warmup run
bash "$benchmarkScript" "$graphqlEndpoint" "$bench" >/dev/null
Expand All @@ -58,7 +59,7 @@ function runBenchmark() {
# 3 benchmark runs
for resultFile in "${resultFiles[@]}"; do
echo "Running benchmark $bench for $serviceScript"
bash "$benchmarkScript" "$graphqlEndpoint" "$bench" >"bench${bench}_${resultFile}"
bash "$benchmarkScript" "$graphqlEndpoint" "$service" "$bench" > "bench${bench}_${resultFile}"
if [ "$bench" == "1" ]; then
bench1Results+=("bench1_${resultFile}")
elif [ "$bench" == "2" ]; then
Expand All @@ -73,7 +74,7 @@ function runBenchmark() {
rm "results.md"

for service in "apollo_server" "caliban" "netflix_dgs" "gqlgen" "tailcall" "async_graphql" "hasura" "graphql_jit"; do
runBenchmark "graphql/${service}/run.sh"
runBenchmark "$service"
if [ "$service" == "apollo_server" ]; then
cd graphql/apollo_server/
npm stop
Expand All @@ -86,3 +87,18 @@ done
bash analyze.sh "${bench1Results[@]}"
bash analyze.sh "${bench2Results[@]}"
bash analyze.sh "${bench3Results[@]}"

if [[ "$UPLOAD_TO_CLOUD" == "true" ]]; then
# Wait for 5 seconds to ensure the results are uploaded to influxdb
sleep 5

# Get rendered panels from grafana
from=$(date -u -d "-30 minutes" +"%Y-%m-%dT%H:%M:%S.%3NZ")
now=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
curl -o assets/posts_users_req.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-1&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
curl -o assets/posts_users_latency.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-2&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
curl -o assets/posts_req.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-3&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
curl -o assets/posts_latency.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-4&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
curl -o assets/greet_req.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-5&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
curl -o assets/greet_latency.png -H "Authorization: Bearer $GRAFANA_API_KEY" "https://tailcall.grafana.net/render/d-solo/cdqucydulbfggb?tab=queries&from=$from&to=$now&panelId=panel-6&__feature.dashboardSceneSolo&width=1000&height=500&tz=Asia%2FCalcutta" --connect-timeout 120
fi
8 changes: 0 additions & 8 deletions wrk/bench.sh

This file was deleted.

6 changes: 0 additions & 6 deletions wrk/wrk3.lua

This file was deleted.

2 comments on commit 1325b58

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Query Server Requests/sec Latency (ms) Relative
1 { posts { id userId title user { id name email }}}
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx
2 { posts { title }}
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx
3 { greet }
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Query Server Requests/sec Latency (ms) Relative
1 { posts { id userId title user { id name email }}}
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx
2 { posts { title }}
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx
3 { greet }
[Tailcall] -nan -nan -nanx
[Netflix DGS] -nan -nan -nanx
[Hasura] -nan -nan -nanx
[GraphQL JIT] -nan -nan -nanx
[Gqlgen] -nan -nan -nanx
[Caliban] -nan -nan -nanx
[async-graphql] -nan -nan -nanx
[Apollo GraphQL] -nan -nan -nanx

Please sign in to comment.