From ceabb56c64ebeda305ee20e39c7f5860998580aa Mon Sep 17 00:00:00 2001 From: Michael Kubacki Date: Wed, 16 Oct 2024 15:07:53 -0400 Subject: [PATCH] Define Python version once Defines the Python version in a single location `Version.njk` and syncs that version to other files. Sets the Python version to 3.12 (although 3.13 was recently released) for continued compatibility with the version already used in Windows pipelines. Signed-off-by: Michael Kubacki --- .sync/Files.yml | 19 + .sync/Version.njk | 3 + .../submodule-release-updater-action.yml | 441 ++++++++++++++++++ .sync/azure_pipelines/SetupPythonPreReqs.yml | 50 ++ .sync/containers/Ubuntu-22/Dockerfile | 225 +++++++++ .sync/workflows/leaf/codeql-platform.yml | 12 +- .sync/workflows/leaf/codeql.yml | 12 +- Containers/Ubuntu-22/Dockerfile | 6 + 8 files changed, 764 insertions(+), 4 deletions(-) create mode 100644 .sync/actions/submodule-release-updater-action.yml create mode 100644 .sync/azure_pipelines/SetupPythonPreReqs.yml create mode 100644 .sync/containers/Ubuntu-22/Dockerfile diff --git a/.sync/Files.yml b/.sync/Files.yml index 7bda3a94..0e48ba7b 100644 --- a/.sync/Files.yml +++ b/.sync/Files.yml @@ -109,6 +109,14 @@ group: repos: | microsoft/mu_feature_mm_supv +# Containers - Dockerfiles + - files: + - source: .sync/containers/Ubuntu-22/Dockerfile + dest: Containers/Ubuntu-22/Dockerfile + template: true + repos: | + microsoft/mu_devops + # dependabot - Track GitHub Actions and PIP Modules - files: - source: .sync/dependabot/actions-pip.yml @@ -185,6 +193,14 @@ group: microsoft/mu_tiano_platforms microsoft/mu_tiano_plus +# GitHub Actions - In the Local Repo + - files: + - source: .sync/actions/submodule-release-updater-action.yml + dest: .github/actions/submodule-release-updater/action.yml + template: true + repos: | + microsoft/mu_devops + # GitHub Templates - Contributing - files: - source: .sync/github_templates/contributing/CONTRIBUTING.md @@ -671,6 +687,9 @@ group: - source: .sync/azure_pipelines/RustSetupSteps.yml dest: Steps/RustSetupSteps.yml template: true + - source: .sync/azure_pipelines/SetupPythonPreReqs.yml + dest: Steps/SetupPythonPreReqs.yml + template: true repos: | microsoft/mu_devops diff --git a/.sync/Version.njk b/.sync/Version.njk index ee82be8e..aaa69c8c 100644 --- a/.sync/Version.njk +++ b/.sync/Version.njk @@ -39,6 +39,9 @@ {# The version of the ubuntu-22-build container to use. #} {% set linux_build_container = "ghcr.io/microsoft/mu_devops/ubuntu-22-build:d1e4ff1" %} +{# The Python version to use. #} +{% set python_version = "3.12" %} + {# The Rust toolchain version to use. #} {% set rust_toolchain = "1.80.0" %} diff --git a/.sync/actions/submodule-release-updater-action.yml b/.sync/actions/submodule-release-updater-action.yml new file mode 100644 index 00000000..ceb135bd --- /dev/null +++ b/.sync/actions/submodule-release-updater-action.yml @@ -0,0 +1,441 @@ +# A GitHub action to create pull requests for new releases of submodules in a repository. +# +# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there +# instead of the file in this repo. +# +# - Mu DevOps Repo: https://github.com/microsoft/mu_devops +# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# + +{% import '../Version.njk' as sync_version -%} + +{% raw %} +# NOTE: Because this pipeline YAML file is a Nunjucks template, the pipeline syntax of `{{}}` will conflict with +# Nunjucks style. Surround pipeline YAML code that uses `{{}}` within `raw` and `endraw` tags +# to allow it to pass through Nunjucks processing. +{% endraw %} +name: 'Submodule Release Updater' + +description: 'Checks if new releases are available for submodules and creates pull requests to update them.' + +inputs: + GH_PAT: + description: 'GitHub Personal Access Token (PAT) used to access repos and create pull requests.' + required: true + GH_USER: + description: 'GitHub username used to create pull requests.' + required: true + GIT_EMAIL: + description: 'Email address used for authoring Git commits.' + required: true + GIT_NAME: + description: 'Name used for authoring Git commits.' + required: true + +{% raw %} +outputs: + submodules-updated: + description: "Number of submodules updated." + value: ${{ steps.check-for-submodule-updates.outputs.submodule-update-count }} +{% endraw %} +runs: + using: "composite" + + steps: + - name: Set up Python Environment + uses: actions/setup-python@v5 + with: + python-version: {{ sync_version.python_version }} + +{% raw %} + - name: Install PIP Modules + shell: bash + run: | + python -m pip install --upgrade pip + pip install GitPython requests semantic-version + + - name: Check for Submodule Updates + id: check-for-submodule-updates + shell: python + env: + GITHUB_TOKEN: "${{ inputs.GH_PAT }}" + GITHUB_USER: "${{ inputs.GH_USER }}" + GIT_EMAIL_ADDRESS: "${{ inputs.GIT_EMAIL }}" + GIT_NAME: "${{ inputs.GIT_NAME }}" + PR_LABELS: "${{ inputs.PR_LABELS }}" + run: | + import git + import json + import os + import re + import requests + import semantic_version + import sys + from textwrap import dedent + from urllib.parse import urlparse + + # Ignore flake8 linter errors for lines that are too long (E501) + # flake8: noqa: E501 + + AUTHORIZED_ORGANIZATIONS = "microsoft" # Assume "microsoft" org right now + GH_REPO = os.environ["GITHUB_REPOSITORY"] + GH_TOKEN = os.environ["GITHUB_TOKEN"] + GH_USER = os.environ["GITHUB_USER"] + GIT_EMAIL_ADDRESS = os.environ["GIT_EMAIL_ADDRESS"] + GIT_NAME = os.environ["GIT_NAME"] + PR_LABELS = ['type:dependencies', 'type:submodules'] + WORKSPACE_DIR_NAME = "local_clone" + WORKSPACE_PATH = os.environ["GITHUB_WORKSPACE"] + + + def _ver_without_prefix(version: str) -> str: + if len(version) == 0: + return "" + + ver_prefix = version.strip().lower()[0] + if ver_prefix == "v": + return version[len("v"):] + + return version + + + # GitHub REST API request and response documentation is available here: + # https://docs.github.com/en/rest?apiVersion=2022-11-28 + + remote = f"https://{GH_USER}:{GH_TOKEN}@github.com/{GH_REPO}.git" + repo_owner, repo_name = GH_REPO.split('/') + + headers = { + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28" + } + submodule_headers = headers + headers["Authorization"] = f"Bearer {GH_TOKEN}" + + # Clone the repo using local creds + workspace_abs_path = os.path.join(WORKSPACE_PATH, WORKSPACE_DIR_NAME) + repo = git.Repo.clone_from(remote, workspace_abs_path) + repo.config_writer().set_value('user', 'name', GIT_NAME).release() + repo.config_writer().set_value('user', 'email', GIT_EMAIL_ADDRESS).release() + + base_branch = None + submodules = repo.submodules + + submodule_update_count = 0 + for submodule in submodules: + # The initial "querying" part of this flow relies upon the GitHub REST + # API which is must faster to query than initializing submodules + # locally + parsed_url = urlparse(submodule.url) + + # Only support GitHub repos for now + if "github" not in parsed_url.hostname: + print("::notice title=GitHub Host Not Found!::This workflow only " + "supports GitHub hosted repos!") + continue + + path = parsed_url.path.strip('/') + submod_user, submod_repo = os.path.split(path) + + submod_abs_path = os.path.join(workspace_abs_path, submodule.path) + submod_repo = submod_repo[:-len(".git")] if submod_repo.endswith(".git") else submod_repo + + authorized_orgs = AUTHORIZED_ORGANIZATIONS.split(',') + authorized_orgs = [s.strip() for s in authorized_orgs] + if any(org == submod_user for org in authorized_orgs): + # Use an auth token if possible to increase the access rate limit + submodule_headers["Authorization"] = f"Bearer {GH_TOKEN}" + + # Get the latest release for the submodule + response = requests.get( + f"https://api.github.com/repos/" + f"{submod_user}/{submod_repo}/releases/latest", + headers=submodule_headers) + if response.status_code == 200: + tag = response.json()["tag_name"] + else: + print(f"::notice title=Submodule Release Not Found!::Failed to " + f"query releases for {submod_repo}. Skipping!") + continue + + actual_available_tag = tag + available_tag = _ver_without_prefix(tag) + + print(f"::notice title=Available Submodule Tag Found!::Found {actual_available_tag} " + f"as the latest release tag for {submod_repo}.") + + response = requests.get( + f"https://api.github.com/repos/" + f"{submod_user}/{submod_repo}/git/refs/tags/{actual_available_tag}", + headers=submodule_headers) + if response.status_code != 200: + print(f"::error title=Commit For Release Tag Not Found!::Skipping " + f"submodule {submod_repo}.") + continue + + available_tag_commit_hash = response.json()["object"]["sha"] + + print(f"::notice title=New Release Commit Found!::Found " + f"{available_tag_commit_hash} as the commit for {actual_available_tag}.") + + try: + available_sem_ver = semantic_version.Version(available_tag) + print("::notice title=Semantic Version Tag!::The available tag is " + "recognized as a semantic version.") + except ValueError: + # Only semantic versioned tags are currently supported + print("::notice title=Non-Semantic Version Tag!::Skipping tag not " + "recognized as a semantic version.") + continue + + # Get the current submodule commit hash + response = requests.get( + f"https://api.github.com/repos/" + f"{repo_owner}/{repo_name}/contents/{submodule.path}", + headers=headers) + if response.status_code != 200: + print(f"::error title=Submodule Info Not Found!::Failed to find " + f"submodule info for {submod_repo}!") + continue + + current_tag_commit_hash = response.json()["sha"] + + print(f"::notice title=Current Submodule Commit Found!::Found " + f"{current_tag_commit_hash} as the commit for {submod_repo}.") + + # Get all of the submodule tags + response = requests.get( + f"https://api.github.com/repos/" + f"{submod_user}/{submod_repo}/git/refs/tags", + headers=submodule_headers) + if response.status_code != 200: + print(f"::error title=Failed to Get Submodule Tags!::Failed to get " + f"tags for {submod_repo}!") + continue + + # Find the most recent tag that contains the current commit hash + print(f"::notice title=Initializing Submodule...::Initializing " + f"{submod_repo}.") + submodule.update(init=True, recursive=False) + submodule_repo = git.Repo(submod_abs_path) + print(f"::notice title=Initialization Complete!::Done initializing " + f"{submod_repo}.") + + print(f"::notice title=Searching for Latest Tag Used!::Finding most " + f"recent tag used in {submod_repo}...") + actual_current_tag = None + actual_current_tag_committed_datetime = None + for tag in submodule_repo.tags: + tag_commit = submodule_repo.commit(tag.commit) + if current_tag_commit_hash in \ + [commit.hexsha for commit in tag_commit.iter_items(repo=submodule_repo, rev=tag.name)]: + # Find the "nearest" tag that contains the commit + if not actual_current_tag_committed_datetime or \ + (tag.commit.committed_datetime < actual_current_tag_committed_datetime): + actual_current_tag = tag.name + actual_current_tag_committed_datetime = tag.commit.committed_datetime + print("::notice title=Searching for Latest Tag Used!::Done!") + + if not actual_current_tag: + print(f"::notice title=Tag Not Found For Submodule Commit!::Could " + f"not find tag for commit {current_tag_commit_hash}. Skipping submodule.") + continue + + current_tag = _ver_without_prefix(actual_current_tag) + + print(f"::notice title=Current Submodule Tag Found!::{submod_repo} is " + f"currently on tag ({current_tag}).") + + try: + current_sem_ver = semantic_version.Version(current_tag) + print(f"::notice title=Semantic Version Tag!::{current_tag} is " + f"recognized as a semantic version.") + except ValueError: + # Only semantic versioned tags are currently supported + print(f"::notice title=Non-Semantic Version Tag!::Skipping tag " + f"({current_tag}) since it is not recognized as a semantic version.") + continue + + if available_sem_ver > current_sem_ver: + print(f"::notice title=Version Update Ready!::{submod_repo} can be " + f"updated from {current_tag} to {available_tag}.") + + response = requests.get( + f"https://api.github.com/repos/" + f"{submod_user}/{submod_repo}/compare/{actual_current_tag}...{actual_available_tag}", + headers=submodule_headers) + if response.status_code == 200: + print("::notice title=Commit Info Found!::Found commit delta " + "for the tag update.") + else: + # Commits should be available for existing tags + print("::error title=Commit Info Not Found!::Could not find " + "commit delta for the tag update}!") + continue + + tag_comp_response = response.json() + + if "commits" not in tag_comp_response: + # Not necessarily an error but no need to gather commit info + # if there are no commits + print(f"::notice title=Commits Not Found!::No new commits " + f"found in the new tag {actual_available_tag}!") + continue + + commit_summary = dedent(f""" + Introduces {tag_comp_response["total_commits"]} new commits in [{submodule.name}]({submodule.url}). + +
+ Commits +
    + """) + + for commit in tag_comp_response["commits"]: + commit_message = commit["commit"]["message"] + commit_title = commit_message.split("\n")[0] + + # Since the PR is in a different repo, replace a potential + # PR number in the commit title with an actual link to + # the PR in that repo. + pr_num_pattern = r"#(?P\d+)" + pr_url_template = f"\">#\\g" + + commit_title = re.sub( + pr_num_pattern, + pr_url_template, + commit_title) + + commit_summary += f"
  • {commit['sha'][:6]} {commit_title}
  • \n" + + commit_summary += dedent(""" +
+
+ """).strip() + + pr_body = dedent(f""" + Bumps {submodule.name} from `{current_tag}` to `{available_tag}` + + {commit_summary} + """).strip().strip("\n") + + pr_body += f"\n\nSigned-off-by: {GIT_NAME} <{GIT_EMAIL_ADDRESS}>" + + branch_name = f"projectmubot/submodules/{submod_repo}/{available_tag}" + + # Check if this update already exists on the remote + response = requests.get( + f"https://api.github.com/repos/" + f"{repo_owner}/{repo_name}/branches/{branch_name}", + headers=headers) + if response.status_code == 200: + print("::notice title=Update Already Exists!::This update " + "has already been pushed before. Skipping it.") + continue + + # Todo: Close PRs that already exist that update to an earlier + # version of a submodule release. + + # Get repo default branch + if not base_branch: + response = requests.get( + f"https://api.github.com/repos/" + f"{repo_owner}/{repo_name}", + headers=headers) + if response.status_code == 200: + base_branch = response.json()["default_branch"] + else: + # Commits should be available for existing tags + print(f"::error title=Default Branch Not Found!::Could " + f"not find the default branch for {repo_name}. Exiting.") + sys.exit(1) + + print(f"::notice title=Default Branch Found!::Default branch " + f"for {repo_name} is {base_branch}.") + + # Checkout the default branch + try: + repo.git.checkout(base_branch) + except git.exc.GitCommandError: + try: + repo.git.checkout('-b', base_branch) + except git.exc.GitCommandError: + print(f"::error title=Git Branch Checkout Failed!::" + f"Could not checkout {base_branch}. Exiting.") + sys.exit(1) + + # Create a local git branch from the default branch + try: + new_branch = repo.create_head(branch_name) + except OSError: + print(f"::error title=Failed to Create Branch!::Failed to " + f"create the branch needed to update PR. Skipping {submod_repo}.") + continue + repo.head.reference = new_branch + + # In the workflow, we assume the "origin" remote is available + origin = repo.remote(name="origin") + submodule_repo.remotes.origin.fetch() + + # Update the submodule to the release tag commit + # This has been shown to fail to apply on the first try so + # try up to 3 times + for i in range(3): + submodule_repo.git.reset('--hard', available_tag_commit_hash) + if submodule_repo.head.commit.hexsha == available_tag_commit_hash: + break + else: + print(f"::error title=Failed to Checkout New Commit!::Failed " + f"to checkout {available_tag_commit_hash}. Skipping.") + continue + + # Commit the change to the local branch + repo.git.add(submodule.path) + commit_message = pr_body + repo.index.commit(commit_message) + + # Push the branch + origin.push(new_branch) + + pr_payload = { + "title": f"Bump {submodule.name} from {current_tag} to {available_tag}", + "body": pr_body.replace("'", '"'), + "base": base_branch, + "head": branch_name, + } + + # Create the PR + response = requests.post( + f"https://api.github.com/repos/" + f"{repo_owner}/{repo_name}/pulls", + json=pr_payload, + headers=headers) + if response.status_code != 201: + print("::error title=Failed to Create PR!::Failed to " + "create the PR. Exiting.") + sys.exit(1) + + pr_number = response.json()["number"] + pr_url = response.json()["html_url"] + submodule_update_count += 1 + print(f"::notice title=PR Created!::{pr_url}") + + if PR_LABELS: + print(f"::notice title=Adding PR Labels::Adding labels to PR {pr_number}...") + + # Add labels to the PR + response = requests.post( + f"https://api.github.com/repos/" + f"{repo_owner}/{repo_name}/issues/{pr_number}/labels", + json=PR_LABELS, + headers=headers) + if response.status_code != 200: + print(f"::error title=Failed to Add Labels!::Could not " + f"add labels to PR {pr_number}.") + sys.exit(1) + + with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: + print(f'submodule-update-count={submodule_update_count}', file=fh) +{% endraw %} diff --git a/.sync/azure_pipelines/SetupPythonPreReqs.yml b/.sync/azure_pipelines/SetupPythonPreReqs.yml new file mode 100644 index 00000000..af963c5f --- /dev/null +++ b/.sync/azure_pipelines/SetupPythonPreReqs.yml @@ -0,0 +1,50 @@ +## @file +# Azure Pipelines step to setup Python pre-requisites. +# +# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there +# instead of the file in this repo. +# +# - Mu DevOps Repo: https://github.com/microsoft/mu_devops +# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +{% import '../Version.njk' as sync_version -%} + +{% raw %} +# NOTE: Because this pipeline YAML file is a Nunjucks template, the pipeline syntax of `{{}}` will conflict with +# Nunjucks style. Surround pipeline YAML code that uses `{{}}` within `raw` and `endraw` tags +# to allow it to pass through Nunjucks processing. + +parameters: +- name: install_pip_modules + displayName: Install PIP Modules + type: boolean + default: true +- name: install_python + displayName: Install Python + type: boolean + default: true +- name: pip_requirement_files + displayName: Pip Requirement Files + type: string + default: -r pip-requirements.txt + +steps: + +- ${{ if eq(parameters.install_python, true) }}: + - task: UsePythonVersion@0 + inputs:{% endraw %} + versionSpec: {{ sync_version.python_version }} + architecture: x64 + +{% raw %}- ${{ if eq(parameters.install_pip_modules, true) }}: + - script: python -m pip install --upgrade pip setuptools wheel + displayName: Install Wheel and SetupTools + condition: succeeded() + + - script: pip install ${{ parameters.pip_requirement_files }} --upgrade + displayName: Install and Upgrade pip Modules + condition: succeeded(){% endraw %} diff --git a/.sync/containers/Ubuntu-22/Dockerfile b/.sync/containers/Ubuntu-22/Dockerfile new file mode 100644 index 00000000..1ac19bb2 --- /dev/null +++ b/.sync/containers/Ubuntu-22/Dockerfile @@ -0,0 +1,225 @@ +# +# Dockerfile for building/testing Project MU with GCC. +# +# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there +# instead of the file in this repo. +# +# - Mu DevOps Repo: https://github.com/microsoft/mu_devops +# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml +# +# Copyright (c) Microsoft Corporation +# SPDX-License-Identifier: BSD-2-Clause-Patent +# + +{% import '../../Version.njk' as sync_version -%} + +{% raw %} +# NOTE: Because this pipeline YAML file is a Nunjucks template, the pipeline syntax of `{{}}` will conflict with +# Nunjucks style. Surround pipeline YAML code that uses `{{}}` within `raw` and `endraw` tags +# to allow it to pass through Nunjucks processing. + +# +# The build container layer is intended for pipeline use and is the minimal set +# of tools required for building Project MU. +# +FROM ubuntu:22.04 AS build + +# Versions +ARG GCC_MAJOR_VERSION=12 +ARG NASM_VERSION=2.16.01 +ARG CSPELL_VERSION=5.20.0 +ARG MARKDOWNLINT_VERSION=0.32.2 + +# Visit this NuGet package version page to see the latest version available +# https://dev.azure.com/projectmu/acpica/_artifacts/feed/mu_iasl/NuGet/edk2-acpica-iasl/versions +ARG IASL_VERSION=20210105.0.6 + +# Set environment variable to avoid interaction. +ENV DEBIAN_FRONTEND=noninteractive +ENV TZ=UTC + +# Work-around for azure pipelines adding the vsts_azpcontainer user, but not adding +# the .local/bin directory to the path which will be used by pip. +ENV PATH $PATH:/home/vsts_azpcontainer/.local/bin + +RUN apt-get update && \ + apt-get install --yes --no-install-recommends \ + bison \ + ca-certificates \ + curl \ + flex \ + git \ + lcov \ + jq \ + m4 \ + make \ + mono-complete \ + nasm \ + npm \ + python3 \ + python3-pip \ + python3-venv \ + software-properties-common \ + sudo \ + unzip \ + uuid-dev \ + wget \ + && \ + apt-get install --yes --no-install-recommends \ + g++-${GCC_MAJOR_VERSION} gcc-${GCC_MAJOR_VERSION} \ + gcc-${GCC_MAJOR_VERSION}-x86-64-linux-gnux32 \ + gcc-${GCC_MAJOR_VERSION}-aarch64-linux-gnu \ + gcc-${GCC_MAJOR_VERSION}-riscv64-linux-gnu \ + gcc-${GCC_MAJOR_VERSION}-arm-linux-gnueabi \ + gcc-${GCC_MAJOR_VERSION}-arm-linux-gnueabihf \ + && \ + apt-get upgrade -y && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +ENV GCC5_AARCH64_PREFIX /usr/bin/aarch64-linux-gnu- +ENV GCC5_ARM_PREFIX /usr/bin/arm-linux-gnueabi- +ENV GCC5_RISCV64_PREFIX /usr/bin/riscv64-linux-gnu- + +RUN pip install pip lcov_cobertura --upgrade + +# RUN npm install -g npm +RUN npm install -g \ + cspell@${CSPELL_VERSION} \ + markdownlint-cli@${MARKDOWNLINT_VERSION} + +RUN wget -O - https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/nasm-${NASM_VERSION}.tar.gz | \ + tar xz && cd nasm-${NASM_VERSION} && \ + ./autogen.sh && ./configure && make -j $(nproc) && make install && \ + cd .. && rm -rf nasm-${NASM_VERSION} + +RUN mkdir -p iasl_temp && cd iasl_temp && \ + wget -O iasl_${IASL_VERSION}.nupkg "https://pkgs.dev.azure.com/projectmu/acpica/_apis/packaging/feeds/mu_iasl/nuget/packages/edk2-acpica-iasl/versions/${IASL_VERSION}/content?api-version=6.0-preview.1" && \ + unzip iasl_${IASL_VERSION}.nupkg -d /iasl_pkg_contents && \ + find /iasl_pkg_contents -type f -name "iasl" -exec cp {} /usr/bin \; && chmod +x /usr/bin/iasl && \ + cd .. && rm -rf iasl_temp + +RUN wget -q "https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/packages-microsoft-prod.deb" && \ + dpkg -i packages-microsoft-prod.deb && rm packages-microsoft-prod.deb && \ + apt-get update && apt-get install -y powershell && apt-get clean + +# +# Rust support +# + +ENV CARGO_HOME="$HOME/.cargo" +ENV RUSTUP_HOME="$HOME/.rustup" +ENV PATH="$CARGO_HOME/bin:$PATH" + +# Install Rust/Cargo and extras (rust-src, rust fmt, cargo-make, cargo-tarpaulin) +RUN VERSION_URL="https://raw.githubusercontent.com/microsoft/mu_devops/main/.sync/Version.njk" && \ + RUST_VERSION=$(curl -s ${VERSION_URL} | grep -oP '(?<=rust_toolchain = ").*(?=")') && \ + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain ${RUST_VERSION} --profile minimal && \ + rustup component add rustfmt rust-src --toolchain ${RUST_VERSION}-x86_64-unknown-linux-gnu + +RUN mkdir cargo_downloads && \ + cd cargo_downloads && \ + TAG_NAME=$(curl -s https://api.github.com/repos/sagiegurari/cargo-make/releases/latest | jq -r '.tag_name') && \ + DOWNLOAD_URL="https://github.com/sagiegurari/cargo-make/releases/download/$TAG_NAME/cargo-make-v$TAG_NAME-x86_64-unknown-linux-gnu.zip" && \ + curl -L -o cargo-make.zip "$DOWNLOAD_URL" && \ + unzip cargo-make.zip && \ + mv cargo-make-v$TAG_NAME-x86_64-unknown-linux-gnu/cargo-make $CARGO_HOME/bin/ && \ + TAG_NAME=$(curl -s https://api.github.com/repos/xd009642/tarpaulin/releases/latest | jq -r '.tag_name') && \ + DOWNLOAD_URL="https://github.com/xd009642/tarpaulin/releases/download/$TAG_NAME/cargo-tarpaulin-x86_64-unknown-linux-gnu.tar.gz" && \ + curl -L -o cargo-tarpaulin.tar.gz "$DOWNLOAD_URL" && \ + tar -xzvf cargo-tarpaulin.tar.gz && \ + mv cargo-tarpaulin $CARGO_HOME/bin/ && \ + cd .. && \ + rm -r cargo_downloads + +# +# Alternatives +# +{% endraw %} + +RUN update-alternatives \ + --install /usr/bin/python python /usr/bin/python{{ sync_version.python_version }} 1 && \ + update-alternatives \ + --install /usr/bin/python3 python3 /usr/bin/python{{ sync_version.python_version }} 1 && \ + update-alternatives \ + --install /usr/bin/gcc gcc /usr/bin/gcc-${GCC_MAJOR_VERSION} 100 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/gcov gcov /usr/bin/gcov-${GCC_MAJOR_VERSION} && \ + update-alternatives \ + --install /usr/bin/cpp cpp /usr/bin/cpp-${GCC_MAJOR_VERSION} 100 && \ + update-alternatives \ + --install /usr/bin/aarch64-linux-gnu-gcc aarch64-linux-gnu-gcc /usr/bin/aarch64-linux-gnu-gcc-${GCC_MAJOR_VERSION} 100 \ + --slave /usr/bin/aarch64-linux-gnu-cpp aarch64-linux-gnu-cpp /usr/bin/aarch64-linux-gnu-cpp-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/aarch64-linux-gnu-g++ aarch64-linux-gnu-g++ /usr/bin/aarch64-linux-gnu-g++-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/aarch64-linux-gnu-gcc-ar aarch64-linux-gnu-gcc-ar /usr/bin/aarch64-linux-gnu-gcc-ar-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/aarch64-linux-gnu-gcc-nm aarch64-linux-gnu-gcc-nm /usr/bin/aarch64-linux-gnu-gcc-nm-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/aarch64-linux-gnu-gcc-ranlib aarch64-linux-gnu-gcc-ranlib /usr/bin/aarch64-linux-gnu-gcc-ranlib-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/aarch64-linux-gnu-gcov aarch64-linux-gnu-gcov /usr/bin/aarch64-linux-gnu-gcov-${GCC_MAJOR_VERSION} && \ + update-alternatives \ + --install /usr/bin/arm-linux-gnueabi-gcc arm-linux-gnueabi-gcc /usr/bin/arm-linux-gnueabi-gcc-${GCC_MAJOR_VERSION} 100 \ + --slave /usr/bin/arm-linux-gnueabi-cpp arm-linux-gnueabi-cpp /usr/bin/arm-linux-gnueabi-cpp-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/arm-linux-gnueabi-g++ arm-linux-gnueabi-g++ /usr/bin/arm-linux-gnueabi-g++-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/arm-linux-gnueabi-gcc-ar arm-linux-gnueabi-gcc-ar /usr/bin/arm-linux-gnueabi-gcc-ar-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/arm-linux-gnueabi-gcc-nm arm-linux-gnueabi-gcc-nm /usr/bin/arm-linux-gnueabi-gcc-nm-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/arm-linux-gnueabi-gcc-ranlib arm-linux-gnueabi-gcc-ranlib /usr/bin/arm-linux-gnueabi-gcc-ranlib-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/arm-linux-gnueabi-gcov arm-linux-gnueabi-gcov /usr/bin/arm-linux-gnueabi-gcov-${GCC_MAJOR_VERSION} && \ + update-alternatives \ + --install /usr/bin/riscv64-linux-gnu-gcc riscv64-linux-gnu-gcc /usr/bin/riscv64-linux-gnu-gcc-${GCC_MAJOR_VERSION} 100 \ + --slave /usr/bin/riscv64-linux-gnu-cpp riscv64-linux-gnu-cpp /usr/bin/riscv64-linux-gnu-cpp-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/riscv64-linux-gnu-g++ riscv64-linux-gnu-g++ /usr/bin/riscv64-linux-gnu-g++-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/riscv64-linux-gnu-gcc-ar riscv64-linux-gnu-gcc-ar /usr/bin/riscv64-linux-gnu-gcc-ar-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/riscv64-linux-gnu-gcc-nm riscv64-linux-gnu-gcc-nm /usr/bin/riscv64-linux-gnu-gcc-nm-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/riscv64-linux-gnu-gcc-ranlib riscv64-linux-gnu-gcc-ranlib /usr/bin/riscv64-linux-gnu-gcc-ranlib-${GCC_MAJOR_VERSION} \ + --slave /usr/bin/riscv64-linux-gnu-gcov riscv64-linux-gnu-gcov /usr/bin/riscv64-linux-gnu-gcov-${GCC_MAJOR_VERSION} + +{% raw %} +# +# The test container layer is intended for pipeline use and virtual platform tools +# on top of the base build image. +# +FROM build AS test + +ARG QEMU_URL="https://gitlab.com/qemu-project/qemu.git" +ARG QEMU_BRANCH="v9.0.0" + +RUN apt-get update && apt-get install --yes --no-install-recommends \ + autoconf \ + automake \ + autotools-dev \ + bc \ + build-essential \ + dosfstools \ + gcc \ + libglib2.0-dev \ + libgtk-3-dev \ + libpixman-1-dev \ + libsdl2-dev \ + mtools \ + ninja-build \ + tar \ + && \ + git clone "${QEMU_URL}" --branch "${QEMU_BRANCH}" --depth 1 qemu && \ + cd qemu && \ + ./configure --target-list=x86_64-softmmu,aarch64-softmmu --enable-gtk && \ + make install -j $(nproc) && \ + cd .. && \ + rm -rf qemu && \ + apt remove --yes ninja-build && \ + apt-get clean + +# +# The dev container layer is intended for local use and has more robust tooling +# and environment configurations for developers. +# +FROM test AS dev + +RUN apt-get update && \ + apt-get install --yes --no-install-recommends \ + nano \ + less \ + && \ + apt-get clean +{% endraw %} diff --git a/.sync/workflows/leaf/codeql-platform.yml b/.sync/workflows/leaf/codeql-platform.yml index b509acc0..5604210e 100644 --- a/.sync/workflows/leaf/codeql-platform.yml +++ b/.sync/workflows/leaf/codeql-platform.yml @@ -54,13 +54,17 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 +{% endraw %} + - name: Install Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '{{ sync_version.python_version }}' cache: 'pip' cache-dependency-path: 'pip-requirements.txt' +{% raw %} + - name: Install/Upgrade pip Modules run: pip install -r pip-requirements.txt --upgrade @@ -136,13 +140,17 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 +{% endraw %} + - name: Install Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '{{ sync_version.python_version }}' cache: 'pip' cache-dependency-path: 'pip-requirements.txt' +{% raw %} + - name: Use Git Long Paths on Windows if: runner.os == 'Windows' shell: pwsh diff --git a/.sync/workflows/leaf/codeql.yml b/.sync/workflows/leaf/codeql.yml index c3fc9b1e..382984cf 100644 --- a/.sync/workflows/leaf/codeql.yml +++ b/.sync/workflows/leaf/codeql.yml @@ -53,10 +53,14 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 +{% endraw %} + - name: Install Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '{{ sync_version.python_version }}' + +{% raw %} - name: Generate Package Matrix id: generate_matrix @@ -99,13 +103,17 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 +{% endraw %} + - name: Install Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '{{ sync_version.python_version }}' cache: 'pip' cache-dependency-path: 'pip-requirements.txt' +{% raw %} + - name: Use Git Long Paths on Windows if: runner.os == 'Windows' shell: pwsh diff --git a/Containers/Ubuntu-22/Dockerfile b/Containers/Ubuntu-22/Dockerfile index 65c31df2..5a0de86d 100644 --- a/Containers/Ubuntu-22/Dockerfile +++ b/Containers/Ubuntu-22/Dockerfile @@ -1,6 +1,12 @@ # # Dockerfile for building/testing Project MU with GCC. # +# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there +# instead of the file in this repo. +# +# - Mu DevOps Repo: https://github.com/microsoft/mu_devops +# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml +# # Copyright (c) Microsoft Corporation # SPDX-License-Identifier: BSD-2-Clause-Patent #