Add TPM Replay FW CFG Input Channel library instance #255
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow runs CodeQL against platform builds in the repository. | |
# | |
# Any platform that supports the `--codeql` parameter will be built and the | |
# results will be uploaded to GitHub Code Scanning. | |
# | |
# Note: This workflow only supports Windows as CodeQL CLI has confirmed issues running | |
# against edk2-style codebases on Linux (only tested on Ubuntu). Therefore, this | |
# workflow is written only for Windows but could easily be adapted to run on Linux | |
# in the future if needed (e.g. swap out "windows" with agent OS var value, etc.) | |
# | |
# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there | |
# instead of the file in this repo. | |
# | |
# - Mu DevOps Repo: https://github.com/microsoft/mu_devops | |
# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml | |
# | |
# Copyright (c) Microsoft Corporation. | |
# SPDX-License-Identifier: BSD-2-Clause-Patent | |
name: "CodeQL - Platform" | |
on: | |
push: | |
branches: | |
- main | |
- release/* | |
pull_request: | |
branches: | |
- main | |
- release/* | |
paths-ignore: | |
- '!**.c' | |
- '!**.h' | |
jobs: | |
gather_build_files: | |
name: Gather Platform Build Files | |
runs-on: ubuntu-latest | |
outputs: | |
platform_build_files: ${{ steps.generate_matrix.outputs.platform_build_files }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
- name: Install Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: '3.12' | |
cache: 'pip' | |
cache-dependency-path: 'pip-requirements.txt' | |
- name: Install/Upgrade pip Modules | |
run: pip install -r pip-requirements.txt --upgrade | |
- name: Generate Package Matrix | |
id: generate_matrix | |
shell: python | |
run: | | |
import os | |
import json | |
from pathlib import Path | |
def supports_parameter(script_path: str, parameter: str): | |
import subprocess | |
try: | |
# Run the script with the --help parameter and capture the output | |
# Note: subprocess.run() failed in the GitHub workflow | |
process = subprocess.Popen(['python', script_path, '--help'], | |
stdout=subprocess.PIPE, | |
stderr=subprocess.PIPE) | |
output, error = process.communicate() | |
if process.returncode != 0: | |
print(f"::error title=CodeQL Check Failed!::Failed to determine if the platform is CodeQL compatible! Return code {process.returncode}. {error}") | |
return False | |
return parameter in output.decode('utf-8') | |
except Exception as e: | |
print(f"::error title=CodeQL Check Exception!::Exception occurred while checking if the platform is CodeQL compatible! {e}") | |
return False | |
root_dir = Path(os.environ['GITHUB_WORKSPACE']) | |
packages = [d for d in root_dir.rglob('*') if d.is_dir() and d.name.strip().lower().endswith('pkg')] | |
platform_build_files = [] | |
# This can be made more robust than just checking if a PlatformBuild.py | |
# file exists in the package directory, but the additional complexity is | |
# not needed right now given current package construction conventions. | |
for package in packages: | |
platform_build_file = package / "PlatformBuild.py" | |
if platform_build_file.exists() and platform_build_file.is_file() \ | |
and any(file.endswith('.dsc') for file in os.listdir(package)) \ | |
and supports_parameter(str(platform_build_file), "--codeql"): | |
platform_build_files.append(str(platform_build_file.relative_to(root_dir))) | |
platform_build_files.sort() | |
print(platform_build_files) | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'platform_build_files={json.dumps(platform_build_files)}', file=fh) | |
analyze: | |
name: Analyze | |
runs-on: windows-2022 | |
needs: | |
- gather_build_files | |
permissions: | |
actions: read | |
contents: read | |
security-events: write | |
strategy: | |
fail-fast: false | |
matrix: | |
build_file: ${{ fromJson(needs.gather_build_files.outputs.platform_build_files) }} | |
include: | |
- archs: IA32,X64 | |
- tool_chain_tag: VS2022 | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
- name: Install Python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: '3.12' | |
cache: 'pip' | |
cache-dependency-path: 'pip-requirements.txt' | |
- name: Use Git Long Paths on Windows | |
if: runner.os == 'Windows' | |
shell: pwsh | |
run: | | |
git config --system core.longpaths true | |
- name: Install/Upgrade pip Modules | |
run: pip install -r pip-requirements.txt --upgrade requests | |
- name: Get Cargo Tool Details | |
id: get_cargo_tool_details | |
shell: python | |
run: | | |
import os | |
import requests | |
GITHUB_REPO = "sagiegurari/cargo-make" | |
API_URL = f"https://api.github.com/repos/{GITHUB_REPO}/releases/latest" | |
# Default value in case getting latest fails, cache will fall | |
# back on this version. | |
latest_cargo_make_version = "0.36.13" | |
response = requests.get(API_URL) | |
if response.status_code == 200: | |
latest_cargo_make_version = response.json()["tag_name"] | |
else: | |
print("::error title=GitHub Release Error!::Failed to get latest cargo-make version!") | |
cache_key = f'cargo-make-{latest_cargo_make_version}' | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'cargo_bin_path={os.path.join(os.environ["USERPROFILE"], ".cargo", "bin")}', file=fh) | |
print(f'cargo_make_cache_key={cache_key}', file=fh) | |
print(f'cargo_make_version={latest_cargo_make_version}', file=fh) | |
- name: Attempt to Load cargo-make From Cache | |
id: cargo_make_cache | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.get_cargo_tool_details.outputs.cargo_bin_path }} | |
key: ${{ steps.get_cargo_tool_details.outputs.cargo_make_cache_key }} | |
- name: Download cargo-make | |
if: steps.cargo_make_cache.outputs.cache-hit != 'true' | |
uses: robinraju/[email protected] | |
with: | |
repository: 'sagiegurari/cargo-make' | |
tag: '${{ steps.get_cargo_tool_details.outputs.cargo_make_version }}' | |
fileName: 'cargo-make-v${{ steps.get_cargo_tool_details.outputs.cargo_make_version }}-x86_64-pc-windows-msvc.zip' | |
out-file-path: 'cargo-make-download' | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract cargo-make | |
if: steps.cargo_make_cache.outputs.cache-hit != 'true' | |
env: | |
CARGO_MAKE_VERSION: ${{ steps.get_cargo_tool_details.outputs.cargo_make_version }} | |
DEST_DIR: ${{steps.get_cargo_tool_details.outputs.cargo_bin_path }} | |
shell: python | |
run: | | |
import os | |
import shutil | |
import zipfile | |
from pathlib import Path | |
DOWNLOAD_DIR = Path(os.environ["GITHUB_WORKSPACE"], "cargo-make-download") | |
ZIP_FILE_NAME = f"cargo-make-v{os.environ['CARGO_MAKE_VERSION']}-x86_64-pc-windows-msvc.zip" | |
ZIP_FILE_PATH = Path(DOWNLOAD_DIR, ZIP_FILE_NAME) | |
EXTRACT_DIR = Path(DOWNLOAD_DIR, "cargo-make-contents") | |
with zipfile.ZipFile(ZIP_FILE_PATH, 'r') as zip_ref: | |
zip_ref.extractall(EXTRACT_DIR) | |
for extracted_file in EXTRACT_DIR.iterdir(): | |
if extracted_file.name == "cargo-make.exe": | |
shutil.copy2(extracted_file, os.environ["DEST_DIR"]) | |
break | |
- name: Rust Prep | |
run: rustup component add rust-src | |
- name: Get Platform Information | |
id: get_platform_info | |
env: | |
BUILD_FILE_PATH: ${{ matrix.build_file }} | |
shell: python | |
run: | | |
import importlib | |
import inspect | |
import os | |
import sys | |
from pathlib import Path | |
from edk2toolext.invocables.edk2_platform_build import BuildSettingsManager | |
from edk2toolext.invocables.edk2_ci_setup import CiSetupSettingsManager | |
from edk2toolext.invocables.edk2_setup import SetupSettingsManager | |
platform_build_file = Path(os.environ['BUILD_FILE_PATH']) | |
if not platform_build_file.is_file(): | |
print(f"::error title=Invalid Build File!::Failed to find {str(platform_build_file)}!") | |
sys.exit(1) | |
# Load the module | |
module_name = 'platform_settings' | |
spec = importlib.util.spec_from_file_location(module_name, platform_build_file) | |
module = importlib.util.module_from_spec(spec) | |
spec.loader.exec_module(module) | |
# Get info from the platform build file | |
pkg_name = "UnknownPkg" | |
ci_setup_supported = False | |
setup_supported = False | |
for name, obj in inspect.getmembers(module): | |
if inspect.isclass(obj): | |
if issubclass(obj, CiSetupSettingsManager): | |
ci_setup_supported = True | |
if issubclass(obj, SetupSettingsManager): | |
setup_supported = True | |
if issubclass(obj, BuildSettingsManager): | |
try: | |
pkg_name = obj().GetName() | |
except AttributeError: | |
print(f"::error title=Invalid Package name!::Failed to get package name in {str(platform_build_file)}!") | |
sys.exit(1) | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'ci_setup_supported={str(ci_setup_supported).lower()}', file=fh) | |
print(f'setup_supported={str(setup_supported).lower()}', file=fh) | |
print(f'pkg_name={pkg_name}', file=fh) | |
- name: Assign Temp Drive Letter | |
if: runner.os == 'Windows' | |
shell: pwsh | |
run: | | |
subst Z: ${{ github.workspace }} | |
- name: Setup | |
if: steps.get_platform_info.outputs.setup_supported == 'true' | |
shell: pwsh | |
working-directory: "Z:" | |
run: stuart_setup -c ${{ matrix.build_file }} -t DEBUG -a ${{ matrix.archs }} TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} | |
- name: Upload Setup Log As An Artifact | |
uses: actions/upload-artifact@v3 | |
if: (success() || failure()) && steps.get_platform_info.outputs.setup_supported == 'true' | |
with: | |
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Logs | |
path: | | |
**/SETUPLOG.txt | |
retention-days: 7 | |
if-no-files-found: ignore | |
- name: CI Setup | |
if: steps.get_platform_info.outputs.ci_setup_supported == 'true' | |
shell: pwsh | |
working-directory: "Z:" | |
run: stuart_ci_setup -c ${{ matrix.build_file }} -t DEBUG -a ${{ matrix.archs }} TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} | |
- name: Upload CI Setup Log As An Artifact | |
uses: actions/upload-artifact@v3 | |
if: (success() || failure()) && steps.get_platform_info.outputs.ci_setup_supported == 'true' | |
with: | |
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Logs | |
path: | | |
**/CISETUP.txt | |
retention-days: 7 | |
if-no-files-found: ignore | |
- name: Update | |
shell: pwsh | |
working-directory: "Z:" | |
run: stuart_update -c ${{ matrix.build_file }} -t DEBUG -a ${{ matrix.archs }} TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} | |
- name: Upload Update Log As An Artifact | |
uses: actions/upload-artifact@v3 | |
if: success() || failure() | |
with: | |
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Logs | |
path: | | |
**/UPDATE_LOG.txt | |
retention-days: 7 | |
if-no-files-found: ignore | |
- name: Find CodeQL Plugin Directory | |
id: find_dir | |
shell: python | |
run: | | |
import os | |
import sys | |
from pathlib import Path | |
# Find the plugin directory that contains the CodeQL plugin | |
plugin_dir = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('.pytool/Plugin/CodeQL')) | |
# This should only be found once | |
if len(plugin_dir) == 1: | |
plugin_dir = str(plugin_dir[0]) | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'codeql_plugin_dir={plugin_dir}', file=fh) | |
else: | |
print("::error title=Workspace Error!::Failed to find Mu Basecore plugin directory!") | |
sys.exit(1) | |
- name: Get CodeQL CLI Cache Data | |
id: cache_key_gen | |
env: | |
CODEQL_PLUGIN_DIR: ${{ steps.find_dir.outputs.codeql_plugin_dir }} | |
shell: python | |
run: | | |
import os | |
import yaml | |
codeql_cli_ext_dep_name = 'codeqlcli_windows_ext_dep' | |
codeql_plugin_file = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep_name + '.yaml') | |
with open (codeql_plugin_file) as pf: | |
codeql_cli_ext_dep = yaml.safe_load(pf) | |
cache_key_name = codeql_cli_ext_dep['name'] | |
cache_key_version = codeql_cli_ext_dep['version'] | |
cache_key = f'{cache_key_name}-{cache_key_version}' | |
codeql_plugin_cli_ext_dep_dir = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep['name'].strip() + '_extdep') | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'codeql_cli_cache_key={cache_key}', file=fh) | |
print(f'codeql_cli_ext_dep_dir={codeql_plugin_cli_ext_dep_dir}', file=fh) | |
- name: Attempt to Load CodeQL CLI From Cache | |
id: codeqlcli_cache | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }} | |
key: ${{ steps.cache_key_gen.outputs.codeql_cli_cache_key }} | |
- name: Download CodeQL CLI | |
if: steps.codeqlcli_cache.outputs.cache-hit != 'true' | |
shell: pwsh | |
working-directory: "Z:" | |
run: stuart_update -c ${{ matrix.build_file }} -t DEBUG -a ${{ matrix.archs }} TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} --codeql | |
- name: Remove CI Plugins Irrelevant to CodeQL | |
shell: python | |
env: | |
CODEQL_PLUGIN_DIR: ${{ steps.find_dir.outputs.codeql_plugin_dir }} | |
run: | | |
import os | |
import shutil | |
from pathlib import Path | |
# Only these two plugins are needed for CodeQL | |
plugins_to_keep = ['CodeQL', 'CompilerPlugin'] | |
plugin_dir = Path(os.environ['CODEQL_PLUGIN_DIR']).parent.absolute() | |
if plugin_dir.is_dir(): | |
for dir in plugin_dir.iterdir(): | |
if str(dir.stem) not in plugins_to_keep: | |
shutil.rmtree(str(dir.absolute()), ignore_errors=True) | |
- name: Platform Build | |
shell: pwsh | |
working-directory: "Z:" | |
env: | |
RUST_ENV_CHECK_TOOL_EXCLUSIONS: "cargo fmt, cargo tarpaulin" | |
STUART_CODEQL_PATH: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }} | |
run: stuart_build -c ${{ matrix.build_file }} -t DEBUG -a ${{ matrix.archs }} TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} --codeql | |
- name: Build Cleanup | |
id: build_cleanup | |
shell: python | |
run: | | |
import os | |
import shutil | |
from pathlib import Path | |
dirs_to_delete = ['ia32', 'x64', 'arm', 'aarch64'] | |
def delete_dirs(path: Path): | |
if path.exists() and path.is_dir(): | |
if path.name.lower() in dirs_to_delete: | |
print(f'Removed {str(path)}') | |
shutil.rmtree(path) | |
return | |
for child_dir in path.iterdir(): | |
delete_dirs(child_dir) | |
build_path = Path(os.environ['GITHUB_WORKSPACE'], 'Build') | |
delete_dirs(build_path) | |
- name: Upload Build Logs As An Artifact | |
uses: actions/upload-artifact@v3 | |
if: success() || failure() | |
with: | |
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Logs | |
path: | | |
**/BUILD_REPORT.TXT | |
**/OVERRIDELOG.TXT | |
**/BUILDLOG_*.md | |
**/BUILDLOG_*.txt | |
**/CI_*.md | |
**/CI_*.txt | |
retention-days: 7 | |
if-no-files-found: ignore | |
- name: Prepare Env Data for CodeQL Upload | |
id: env_data | |
env: | |
PACKAGE_NAME: ${{ steps.get_platform_info.outputs.pkg_name }} | |
shell: python | |
run: | | |
import os | |
package = os.environ['PACKAGE_NAME'].strip().lower() | |
directory_name = 'codeql-analysis-' + package + '-debug' | |
file_name = 'codeql-db-' + package + '-debug-0.sarif' | |
sarif_path = os.path.join('Build', directory_name, file_name) | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: | |
print(f'sarif_file_path={sarif_path}', file=fh) | |
- name: Upload CodeQL Results (SARIF) As An Artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ${{ steps.get_platform_info.outputs.pkg_name }}-CodeQL-SARIF | |
path: ${{ steps.env_data.outputs.sarif_file_path }} | |
retention-days: 14 | |
if-no-files-found: warn | |
- name: Upload CodeQL Results (SARIF) To GitHub Code Scanning | |
uses: github/codeql-action/upload-sarif@v2 | |
with: | |
# Path to SARIF file relative to the root of the repository. | |
sarif_file: ${{ steps.env_data.outputs.sarif_file_path }} | |
# Optional category for the results. Used to differentiate multiple results for one commit. | |
# Each package is a separate category. | |
category: ${{ steps.get_platform_info.outputs.pkg_name }} | |
- name: Remove Temp Drive Letter | |
if: runner.os == 'Windows' | |
shell: pwsh | |
run: | | |
subst Z: /D |