This repository has been archived by the owner on May 1, 2024. It is now read-only.
2024-01-29 commits #131
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: selfdrive | |
on: | |
push: | |
branches: | |
- master | |
pull_request: | |
concurrency: | |
group: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' && github.run_id || github.head_ref || github.ref }}-${{ github.workflow }}-${{ github.event_name }} | |
cancel-in-progress: true | |
env: | |
PYTHONWARNINGS: error | |
BASE_IMAGE: openpilot-base | |
CL_BASE_IMAGE: openpilot-base-cl | |
DOCKER_REGISTRY: ghcr.io/commaai | |
AZURE_TOKEN: ${{ secrets.AZURE_COMMADATACI_OPENPILOTCI_TOKEN }} | |
DOCKER_LOGIN: docker login ghcr.io -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} | |
BUILD: | | |
DOCKER_BUILDKIT=1 docker build --pull --cache-to type=inline --cache-from $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $BASE_IMAGE:latest -f Dockerfile.openpilot_base . | |
RUN: docker run --shm-size 1G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e PYTHONWARNINGS=error -e FILEREADER_CACHE=1 -e PYTHONPATH=/tmp/openpilot -e NUM_JOBS -e JOB_ID -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v ~/scons_cache:/tmp/scons_cache -v ~/comma_download_cache:/tmp/comma_download_cache -v ~/openpilot_cache:/tmp/openpilot_cache $BASE_IMAGE /bin/sh -c | |
BUILD_CL: | | |
DOCKER_BUILDKIT=1 docker build --cache-to type=inline --cache-from $DOCKER_REGISTRY/$CL_BASE_IMAGE:latest -t $DOCKER_REGISTRY/$CL_BASE_IMAGE:latest -t $CL_BASE_IMAGE:latest -f Dockerfile.openpilot_base_cl . | |
RUN_CL: docker run --shm-size 1G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e PYTHONWARNINGS=error -e PYTHONPATH=/tmp/openpilot -e NUM_JOBS -e JOB_ID -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v ~/scons_cache:/tmp/scons_cache -v ~/comma_download_cache:/tmp/comma_download_cache -v ~/openpilot_cache:/tmp/openpilot_cache $CL_BASE_IMAGE /bin/sh -c | |
UNIT_TEST: coverage run --append -m unittest discover | |
jobs: | |
build_release: | |
name: build release | |
runs-on: ubuntu-20.04 | |
env: | |
STRIPPED_DIR: /tmp/releasepilot | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- name: Build devel | |
timeout-minutes: 1 | |
run: TARGET_DIR=$STRIPPED_DIR release/build_devel.sh | |
- uses: ./.github/workflows/setup | |
- name: Check submodules | |
if: github.ref == 'refs/heads/master' && github.repository == 'commaai/openpilot' | |
timeout-minutes: 1 | |
run: release/check-submodules.sh | |
- name: Build openpilot and run checks | |
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 10 || 30) }} # allow more time when we missed the scons cache | |
run: | | |
cd $STRIPPED_DIR | |
${{ env.RUN }} "CI=1 python selfdrive/manager/build.py" | |
- name: Run tests | |
timeout-minutes: 2 | |
run: | | |
cd $STRIPPED_DIR | |
${{ env.RUN }} "release/check-dirty.sh && \ | |
python -m unittest discover selfdrive/car" | |
- name: pre-commit | |
timeout-minutes: 3 | |
run: | | |
cd $GITHUB_WORKSPACE | |
cp .pre-commit-config.yaml $STRIPPED_DIR | |
cp pyproject.toml $STRIPPED_DIR | |
cp poetry.lock $STRIPPED_DIR | |
cd $STRIPPED_DIR | |
${{ env.RUN }} "unset PYTHONWARNINGS && pre-commit run --all" | |
build_all: | |
name: build all | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Build openpilot with all flags | |
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 12 || 30) }} # allow more time when we missed the scons cache | |
run: ${{ env.RUN }} "scons -j$(nproc) --extras && release/check-dirty.sh" | |
- name: Cleanup scons cache and rebuild | |
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 2 || 30) }} # allow more time when we missed the scons cache | |
run: | | |
${{ env.RUN }} "rm -rf /tmp/scons_cache/* && \ | |
scons -j$(nproc) --cache-populate" | |
- name: Save scons cache | |
uses: actions/cache/save@v3 | |
if: github.ref == 'refs/heads/master' | |
with: | |
path: ~/scons_cache | |
key: scons-${{ env.CACHE_COMMIT_DATE }}-${{ github.sha }} | |
build_mac: | |
name: build macos | |
runs-on: macos-latest | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- name: Determine pre-existing Homebrew packages | |
if: steps.dependency-cache.outputs.cache-hit != 'true' | |
run: | | |
echo 'EXISTING_CELLAR<<EOF' >> $GITHUB_ENV | |
brew list --formula -1 >> $GITHUB_ENV | |
echo 'EOF' >> $GITHUB_ENV | |
- name: Restore scons cache | |
id: scons-restore-cache | |
uses: actions/cache/restore@v3 | |
with: | |
path: /tmp/scons_cache | |
key: macos_scons-${{ github.sha }} | |
restore-keys: macos_scons- | |
- name: Cache dependencies | |
id: dependency-cache | |
uses: actions/cache@v3 | |
with: | |
path: | | |
.env | |
.venv | |
~/github_brew_cache_entries.txt | |
~/.pyenv | |
~/Library/Caches/pypoetry | |
/usr/local/Cellar | |
/usr/local/opt | |
/usr/local/Caskroom/gcc-arm-* | |
/opt/homebrew/Cellar | |
/opt/homebrew/opt | |
/opt/homebrew/Caskroom/gcc-arm-* | |
/Applications/ArmGNUToolchain/*/*/* | |
key: macos_deps-${{ hashFiles('tools/mac_setup.sh', 'tools/install_python_dependencies.sh', 'poetry.lock') }} | |
restore-keys: macos_deps- | |
- name: Brew link restored dependencies | |
run: | | |
if [ -f ~/github_brew_cache_entries.txt ]; then | |
brew link --force --overwrite $(cat ~/github_brew_cache_entries.txt) # `--force` for keg-only packages | |
if [ -d /Applications/ArmGNUToolchain ]; then # link gcc-arm-embedded manually | |
GCC_TOOLCHAIN="$(echo /Applications/ArmGNUToolchain/**/**/bin)" | |
echo "$GCC_TOOLCHAIN" >> $GITHUB_PATH | |
fi | |
else | |
echo "Cache entries not found" | |
fi | |
- name: Install dependencies | |
if: steps.dependency-cache.outputs.cache-hit != 'true' | |
run: ./tools/mac_setup.sh | |
env: | |
# package install has DeprecationWarnings | |
PYTHONWARNINGS: default | |
- name: Build openpilot | |
run: | | |
eval "$(pyenv init --path)" | |
poetry run scons -j$(nproc) | |
- name: Run tests | |
run: | | |
eval "$(pyenv init --path)" | |
poetry run tools/plotjuggler/test_plotjuggler.py | |
- name: Pre Cache - Cleanup scons cache | |
if: github.ref == 'refs/heads/master' | |
run: | | |
rm -rf /tmp/scons_cache/* | |
eval "$(pyenv init --path)" | |
poetry run scons -j$(nproc) --cache-populate | |
- name: Save scons cache | |
id: scons-save-cache | |
uses: actions/cache/save@v3 | |
if: github.ref == 'refs/heads/master' | |
with: | |
path: /tmp/scons_cache | |
key: macos_scons-${{ github.sha }} | |
- name: Pre Cache - Remove pre-existing Homebrew packages | |
if: steps.dependency-cache.outputs.cache-hit != 'true' | |
run: | | |
new_cellar=$(brew list --formula -1) | |
exceptions="zstd lz4 xz" # caching step needs zstd | |
comm -12 <(echo "$EXISTING_CELLAR") <(echo "$new_cellar") | while read pkg; do | |
if [[ " $exceptions " != *" $pkg "* ]]; then | |
rm -rf "$(brew --cellar)/$pkg" | |
fi | |
done | |
comm -13 <(echo "$EXISTING_CELLAR") <(echo "$new_cellar") | tee ~/github_brew_cache_entries.txt | |
# .fseventsd directory causes permission errors in dep caching step | |
# its used by the system to observe changes within the directory - toolchain works without it, just remove it | |
sudo rm -rf /Applications/ArmGNUToolchain/*/*/.fseventsd | |
docker_push: | |
name: docker push | |
runs-on: ubuntu-20.04 | |
if: github.ref == 'refs/heads/master' && github.event_name != 'pull_request' && github.repository == 'commaai/openpilot' | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- name: Build Docker image | |
run: eval "$BUILD" | |
- name: Push to container registry | |
run: | | |
$DOCKER_LOGIN | |
docker push $DOCKER_REGISTRY/$BASE_IMAGE:latest | |
docker tag $DOCKER_REGISTRY/$BASE_IMAGE:latest $DOCKER_REGISTRY/$BASE_IMAGE:$GITHUB_SHA | |
docker push $DOCKER_REGISTRY/$BASE_IMAGE:$GITHUB_SHA | |
- name: Build CL Docker image | |
run: eval "$BUILD_CL" | |
- name: Push to container registry | |
run: | | |
$DOCKER_LOGIN | |
docker push $DOCKER_REGISTRY/$CL_BASE_IMAGE:latest | |
docker tag $DOCKER_REGISTRY/$CL_BASE_IMAGE:latest $DOCKER_REGISTRY/$CL_BASE_IMAGE:$GITHUB_SHA | |
docker push $DOCKER_REGISTRY/$CL_BASE_IMAGE:$GITHUB_SHA | |
static_analysis: | |
name: static analysis | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- name: Build Docker image | |
run: eval "$BUILD" | |
- name: pre-commit | |
timeout-minutes: 4 | |
run: ${{ env.RUN }} "unset PYTHONWARNINGS && pre-commit run --all" | |
valgrind: | |
name: valgrind | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Build openpilot | |
run: ${{ env.RUN }} "scons -j$(nproc)" | |
- name: Run valgrind | |
timeout-minutes: 1 | |
run: | | |
${{ env.RUN }} "python selfdrive/test/test_valgrind_replay.py" | |
- name: Print logs | |
if: always() | |
run: cat selfdrive/test/valgrind_logs.txt | |
unit_tests: | |
name: unit tests | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Build openpilot | |
timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 10 || 30) }} # allow more time when we missed the scons cache | |
run: ${{ env.RUN }} "scons -j$(nproc)" | |
- name: Run unit tests | |
timeout-minutes: 40 | |
run: | | |
${{ env.RUN }} "export SKIP_LONG_TESTS=1 && \ | |
$UNIT_TEST common && \ | |
$UNIT_TEST opendbc/can && \ | |
$UNIT_TEST selfdrive/boardd && \ | |
$UNIT_TEST selfdrive/controls && \ | |
$UNIT_TEST selfdrive/monitoring && \ | |
$UNIT_TEST system/loggerd && \ | |
$UNIT_TEST selfdrive/car && \ | |
$UNIT_TEST selfdrive/locationd && \ | |
$UNIT_TEST selfdrive/test/longitudinal_maneuvers && \ | |
$UNIT_TEST system/tests && \ | |
$UNIT_TEST system/ubloxd && \ | |
selfdrive/locationd/test/_test_locationd_lib.py && \ | |
./system/ubloxd/tests/test_glonass_runner && \ | |
$UNIT_TEST selfdrive/athena && \ | |
$UNIT_TEST selfdrive/thermald && \ | |
$UNIT_TEST system/hardware/tici && \ | |
$UNIT_TEST tools/lib/tests && \ | |
./selfdrive/ui/tests/create_test_translations.sh && \ | |
QT_QPA_PLATFORM=offscreen ./selfdrive/ui/tests/test_translations && \ | |
./selfdrive/ui/tests/test_translations.py && \ | |
./common/tests/test_util && \ | |
./common/tests/test_ratekeeper && \ | |
./common/tests/test_swaglog && \ | |
./selfdrive/boardd/tests/test_boardd_usbprotocol && \ | |
./system/loggerd/tests/test_logger &&\ | |
./system/proclogd/tests/test_proclog && \ | |
./tools/replay/tests/test_replay && \ | |
./tools/cabana/tests/test_cabana && \ | |
./system/camerad/test/ae_gray_test && \ | |
./selfdrive/test/process_replay/test_fuzzy.py && \ | |
coverage xml" | |
- name: "Upload coverage to Codecov" | |
uses: codecov/codecov-action@v3 | |
process_replay: | |
name: process replay | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Cache test routes | |
id: dependency-cache | |
uses: actions/cache@v3 | |
with: | |
path: ~/comma_download_cache | |
key: proc-replay-${{ hashFiles('.github/workflows/selfdrive_tests.yaml', 'selfdrive/test/process_replay/ref_commit') }} | |
- name: Build openpilot | |
run: | | |
${{ env.RUN }} "scons -j$(nproc)" | |
- name: Run replay | |
timeout-minutes: 30 | |
run: | | |
${{ env.RUN }} "CI=1 coverage run selfdrive/test/process_replay/test_processes.py -j$(nproc) && \ | |
coverage xml" | |
- name: Print diff | |
id: print-diff | |
if: always() | |
run: cat selfdrive/test/process_replay/diff.txt | |
- uses: actions/upload-artifact@v2 | |
if: always() | |
continue-on-error: true | |
with: | |
name: process_replay_diff.txt | |
path: selfdrive/test/process_replay/diff.txt | |
- name: Upload reference logs | |
if: ${{ failure() && steps.print-diff.outcome == 'success' && github.repository == 'commaai/openpilot' && env.AZURE_TOKEN != '' }} | |
run: | | |
${{ env.RUN }} "CI=1 AZURE_TOKEN='$AZURE_TOKEN' python selfdrive/test/process_replay/test_processes.py -j$(nproc) --upload-only" | |
- name: "Upload coverage to Codecov" | |
uses: codecov/codecov-action@v3 | |
test_modeld: | |
name: model tests | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Build base Docker image | |
run: eval "$BUILD" | |
- name: Build Docker image | |
# Sim docker is needed to get the OpenCL drivers | |
run: eval "$BUILD_CL" | |
- name: Build openpilot | |
run: | | |
${{ env.RUN }} "scons -j$(nproc)" | |
# PYTHONWARNINGS triggers a SyntaxError in onnxruntime | |
- name: Run model replay with ONNX | |
timeout-minutes: 2 | |
run: | | |
${{ env.RUN_CL }} "unset PYTHONWARNINGS && \ | |
ONNXCPU=1 CI=1 NO_NAV=1 coverage run selfdrive/test/process_replay/model_replay.py && \ | |
coverage xml" | |
- name: Run unit tests | |
timeout-minutes: 4 | |
run: | | |
${{ env.RUN_CL }} "unset PYTHONWARNINGS && \ | |
$UNIT_TEST selfdrive/modeld && \ | |
coverage xml" | |
- name: "Upload coverage to Codecov" | |
uses: codecov/codecov-action@v3 | |
test_cars: | |
name: cars | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
job: [0, 1, 2, 3, 4] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- uses: ./.github/workflows/setup | |
- name: Cache test routes | |
id: dependency-cache | |
uses: actions/cache@v3 | |
with: | |
path: ~/comma_download_cache | |
key: car_models-${{ hashFiles('selfdrive/car/tests/test_models.py', 'selfdrive/car/tests/routes.py') }}-${{ matrix.job }} | |
- name: Build openpilot | |
run: ${{ env.RUN }} "scons -j$(nproc)" | |
- name: Test car models | |
timeout-minutes: 25 | |
run: | | |
${{ env.RUN }} "pytest --cov --cov-report=xml -n auto --dist=loadscope selfdrive/car/tests/test_models.py && \ | |
chmod -R 777 /tmp/comma_download_cache" | |
env: | |
NUM_JOBS: 5 | |
JOB_ID: ${{ matrix.job }} | |
- name: "Upload coverage to Codecov" | |
uses: codecov/codecov-action@v3 | |
car_docs_diff: | |
name: PR comments | |
runs-on: ubuntu-20.04 | |
if: github.event_name == 'pull_request' | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
ref: ${{ github.event.pull_request.base.ref }} | |
- uses: ./.github/workflows/setup | |
- name: Get base car info | |
run: | | |
${{ env.RUN }} "scons -j$(nproc) && python selfdrive/debug/dump_car_info.py --path /tmp/openpilot_cache/base_car_info" | |
sudo chown -R $USER:$USER ${{ github.workspace }} | |
- uses: actions/checkout@v3 | |
with: | |
submodules: true | |
- name: Save car docs diff | |
id: save_diff | |
run: | | |
${{ env.RUN }} "scons -j$(nproc)" | |
output=$(${{ env.RUN }} "python selfdrive/debug/print_docs_diff.py --path /tmp/openpilot_cache/base_car_info") | |
output="${output//$'\n'/'%0A'}" | |
echo "::set-output name=diff::$output" | |
- name: Find comment | |
if: ${{ env.AZURE_TOKEN != '' }} | |
uses: peter-evans/find-comment@1769778a0c5bd330272d749d12c036d65e70d39d | |
id: fc | |
with: | |
issue-number: ${{ github.event.pull_request.number }} | |
body-includes: This PR makes changes to | |
- name: Update comment | |
if: ${{ steps.save_diff.outputs.diff != '' && env.AZURE_TOKEN != '' }} | |
uses: peter-evans/create-or-update-comment@b95e16d2859ad843a14218d1028da5b2c4cbc4b4 | |
with: | |
comment-id: ${{ steps.fc.outputs.comment-id }} | |
issue-number: ${{ github.event.pull_request.number }} | |
body: "${{ steps.save_diff.outputs.diff }}" | |
edit-mode: replace | |
- name: Delete comment | |
if: ${{ steps.fc.outputs.comment-id != '' && steps.save_diff.outputs.diff == '' && env.AZURE_TOKEN != '' }} | |
uses: actions/github-script@v6 | |
with: | |
script: | | |
github.rest.issues.deleteComment({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
comment_id: ${{ steps.fc.outputs.comment-id }} | |
}) |