Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/libp2p/test-plans into ma…
Browse files Browse the repository at this point in the history
…rco/update-go-rust-perf
  • Loading branch information
p-shahi committed Oct 1, 2024
2 parents 03fbb68 + 257aee3 commit e710545
Show file tree
Hide file tree
Showing 30 changed files with 11,585 additions and 11,670 deletions.
5 changes: 5 additions & 0 deletions .github/actions/run-interop-hole-punch-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
5 changes: 5 additions & 0 deletions .github/actions/run-interop-ping-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
120 changes: 120 additions & 0 deletions .github/actions/run-perf-benchmark/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
name: "libp2p ping interop test"
description: "Run the libp2p ping interoperability test suite"
inputs:
test-filter:
description: "Filter which tests to run, only these implementations will be run"
required: false
default: "all"
s3-access-key-id:
description: "S3 Access key id for the terraform infrastructure"
required: true
default: ""
s3-secret-access-key:
description: "S3 secret key id for the terraform infrastructure"
required: true
default: ""
runs:
using: "composite"
steps:
- id: ssh
shell: bash
name: Generate SSH key
working-directory: perf
run: |
make ssh-keygen
echo "key<<EOF" >> $GITHUB_OUTPUT
while read -r line; do
echo "::add-mask::$line"
echo "$line" >> $GITHUB_OUTPUT
done < terraform/modules/short_lived/files/perf
echo "EOF" >> $GITHUB_OUTPUT
- name: Configure SSH
uses: webfactory/ssh-agent@d4b9b8ff72958532804b70bbe600ad43b36d5f2e # v0.8.0
with:
ssh-private-key: ${{ steps.ssh.outputs.key }}

- name: Configure git
shell: bash
run: |
git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com>"
git config --global user.name "${GITHUB_ACTOR}"
- name: Configure terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 # v2.0.3

- name: Init terraform
id: init
shell: bash
env:
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
TF_IN_AUTOMATION: "1"
TF_INPUT: "0"
run: terraform init
working-directory: perf/terraform/configs/local

- name: Apply terraform
env:
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
TF_IN_AUTOMATION: "1"
TF_INPUT: "0"
shell: bash
run: terraform apply -auto-approve
working-directory: perf/terraform/configs/local

- id: server
name: Retrieve server's IP
shell: bash
run: terraform output -raw server_ip
working-directory: perf/terraform/configs/local

- id: client
name: Retrieve client's IP
shell: bash
run: terraform output -raw client_ip
working-directory: perf/terraform/configs/local

- name: Download dependencies
shell: bash
run: npm ci
working-directory: perf/runner

- name: Run tests
shell: bash
env:
SERVER_IP: ${{ steps.server.outputs.stdout }}
CLIENT_IP: ${{ steps.client.outputs.stdout }}
run: npm run start -- --client-public-ip $CLIENT_IP --server-public-ip $SERVER_IP --test-filter ${{ inputs.test-filter }}
working-directory: perf/runner

- name: Push
shell: bash
if: github.event.inputs.push == 'true'
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
git add benchmark-results.json
git commit -m "perf: update benchmark results"
git push
gh pr comment --body "See new metrics at https://observablehq.com/@libp2p-workspace/performance-dashboard?branch=$(git rev-parse HEAD)" || true
working-directory: perf/runner

- name: Archive
if: github.event.intputs.push == 'false'
uses: actions/upload-artifact@v3
with:
name: benchmark-results
path: perf/runner/benchmark-results.json

- name: Destroy terraform
shell: bash
if: always() && steps.init.outputs.exitcode == 0
env:
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
TF_IN_AUTOMATION: "1"
TF_INPUT: "0"
run: terraform destroy -auto-approve
working-directory: perf/terraform/configs/local
5 changes: 5 additions & 0 deletions .github/actions/run-transport-interop-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,11 @@ runs:

- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- load
shell: bash

Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/hole-punch-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-interop-hole-punch-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
75 changes: 5 additions & 70 deletions .github/workflows/perf.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ on:
workflow_dispatch:
inputs:
push:
description: 'Push the benchmark results to the repository'
description: "Push the benchmark results to the repository"
required: false
default: 'true'
default: "true"

jobs:
perf:
Expand All @@ -27,78 +27,13 @@ jobs:
run:
shell: bash
working-directory: perf
env:
AWS_ACCESS_KEY_ID: ${{ vars.PERF_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.PERF_AWS_SECRET_ACCESS_KEY }}
TF_IN_AUTOMATION: 1
TF_INPUT: 0
steps:
- name: Checkout test-plans
uses: actions/checkout@v3
with:
repository: ${{ github.repository }}
ref: ${{ github.ref }}
- id: ssh
name: Generate SSH key
run: |
make ssh-keygen
echo "key<<EOF" >> $GITHUB_OUTPUT
while read -r line; do
echo "::add-mask::$line"
echo "$line" >> $GITHUB_OUTPUT
done < terraform/modules/short_lived/files/perf
echo "EOF" >> $GITHUB_OUTPUT
- name: Configure SSH
uses: webfactory/ssh-agent@d4b9b8ff72958532804b70bbe600ad43b36d5f2e # v0.8.0
- uses: ./.github/actions/run-perf-benchmark
with:
ssh-private-key: ${{ steps.ssh.outputs.key }}
- name: Configure git
run: |
git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com>"
git config --global user.name "${GITHUB_ACTOR}"
- name: Configure terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 # v2.0.3
- name: Init terraform
id: init
run: terraform init
working-directory: perf/terraform/configs/local
- name: Apply terraform
run: terraform apply -auto-approve
working-directory: perf/terraform/configs/local
- id: server
name: Retrieve server's IP
run: terraform output -raw server_ip
working-directory: perf/terraform/configs/local
- id: client
name: Retrieve client's IP
run: terraform output -raw client_ip
working-directory: perf/terraform/configs/local
- name: Download dependencies
run: npm ci
working-directory: perf/runner
- name: Run tests
env:
SERVER_IP: ${{ steps.server.outputs.stdout }}
CLIENT_IP: ${{ steps.client.outputs.stdout }}
run: npm run start -- --client-public-ip $CLIENT_IP --server-public-ip $SERVER_IP
working-directory: perf/runner
- name: Push
if: github.event.inputs.push == 'true'
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
git add benchmark-results.json
git commit -m "perf: update benchmark results"
git push
gh pr comment --body "See new metrics at https://observablehq.com/@libp2p-workspace/performance-dashboard?branch=$(git rev-parse HEAD)" || true
working-directory: perf/runner
- name: Archive
if: github.event.intputs.push == 'false'
uses: actions/upload-artifact@v2
with:
name: benchmark-results
path: perf/runner/benchmark-results.json
- name: Destroy terraform
if: always() && steps.init.outputs.exitcode == 0
run: terraform destroy -auto-approve
working-directory: perf/terraform/configs/local
s3-access-key-id: ${{ vars.PERF_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.PERF_AWS_SECRET_ACCESS_KEY }}
8 changes: 4 additions & 4 deletions .github/workflows/transport-interop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ jobs:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-transport-interop-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
worker-count: 16
build-without-secrets:
runs-on: ubuntu-latest
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
steps:
- uses: actions/checkout@v3
# Purposely not using secrets to replicate how forks will behave.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Interoperability/end to end test-plans & performance benchmarking for libp2p

[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%20transport%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/9474606542/attempts/1#summary-26104530371)
[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%20transport%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/11113730746/attempts/1#summary-30878708031)

[![Made by Protocol Labs](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)

Expand Down
5 changes: 5 additions & 0 deletions funding.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"opRetro": {
"projectId": "0x966804cb492e1a4bde5d781a676a44a23d69aa5dd2562fa7a4f95bb606021c8b"
}
}
36 changes: 20 additions & 16 deletions hole-punch-interop/helpers/cache.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
const AWS_BUCKET = process.env.AWS_BUCKET;
const scriptDir = __dirname;

import * as crypto from 'crypto';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
Expand Down Expand Up @@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
if (mode == Mode.PushCache) {
console.log("Pushing cache")
try {
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
try {
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
console.log("Cache already exists")
} else {
} catch (e) {
console.log("Cache doesn't exist", e)
// Read image id from image.json
const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID;
console.log(`Pushing cache for ${dir}: ${imageID}`)
Expand All @@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
console.log("Loading cache")
let cacheHit = false
try {
// Check if the cache exists
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
if (res.ok) {
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} else {
console.log("Cache not found")
if (!AWS_BUCKET) {
throw new Error("AWS_BUCKET not set")
}
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
const archivePath = path.join(cachePath, 'archive.tar.gz')
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
if (loadedImageId) {
console.log(`Cache hit for ${loadedImageId}`);
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
cacheHit = true
}
} catch (e) {
console.log("Cache not found:", e)
Expand Down
15 changes: 15 additions & 0 deletions perf/impl/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,32 @@ QUIC_GO_SUBDIRS := $(wildcard quic-go/*/.)
JS_SUBDIRS := $(wildcard js-libp2p/*/.)

all: $(RUST_SUBDIRS) $(GO_SUBDIRS) $(HTTPS_SUBDIRS) $(QUIC_GO_SUBDIRS) $(JS_SUBDIRS)

$(RUST_SUBDIRS):
$(MAKE) -C $@

$(GO_SUBDIRS):
$(MAKE) -C $@

$(HTTPS_SUBDIRS):
$(MAKE) -C $@

$(QUIC_GO_SUBDIRS):
$(MAKE) -C $@

$(JS_SUBDIRS):
$(MAKE) -C $@

go-libp2p: $(GO_SUBDIRS)

rust-libp2p: $(RUST_SUBDIRS)

https: $(HTTPS_SUBDIRS)

quic-go: $(QUIC_GO_SUBDIRS)

js-libp2p: $(JS_SUBDIRS)

clean: $(RUST_SUBDIRS:%=%clean) $(GO_SUBDIRS:%=%clean) $(HTTPS_SUBDIRS:%=%clean) $(QUIC_GO_SUBDIRS:%=%clean) $(JS_SUBDIRS:%=%clean)

%clean:
Expand Down
Loading

0 comments on commit e710545

Please sign in to comment.