From 6a0ba8bdd8a84f994df423a35c90641d9f626eda Mon Sep 17 00:00:00 2001 From: Sean Brogan Date: Mon, 5 Aug 2019 12:59:06 -0700 Subject: [PATCH] Initial content import(#1) * Initial port from Project Mu * Quote Python_home and Python_command if needed to avoid BaseTools change --- .coveragerc | 2 + .flake8 | 5 + .gitattributes | 59 ++ .gitignore | 14 + BasicDevTests.py | 108 ++++ ConfirmVersionAndTag.py | 27 + LICENSE | 52 ++ MANIFEST.in | 7 + azure-pipelines-pr-gate.yml | 83 +++ azure-pipelines-release.yml | 109 ++++ edk2toolext/__init__.py | 5 + edk2toolext/base_abstract_invocable.py | 140 +++++ edk2toolext/bin/__init__.py | 5 + edk2toolext/bin/nuget.py | 40 ++ edk2toolext/bin/readme.md | 1 + edk2toolext/config_validator.py | 292 ++++++++++ edk2toolext/edk2_git.py | 268 +++++++++ edk2toolext/edk2_invocable.py | 167 ++++++ edk2toolext/edk2_logging.py | 268 +++++++++ edk2toolext/environment/__init__.py | 5 + edk2toolext/environment/conf_mgmt.py | 234 ++++++++ .../environment_descriptor_files.py | 125 +++++ .../environment/extdeptypes/__init__.py | 5 + .../environment/extdeptypes/git_dependency.py | 106 ++++ .../extdeptypes/nuget_dependency.py | 178 ++++++ .../environment/extdeptypes/web_dependency.py | 152 +++++ .../environment/external_dependency.py | 165 ++++++ edk2toolext/environment/multiple_workspace.py | 157 ++++++ edk2toolext/environment/plugin_manager.py | 101 ++++ .../environment/plugintypes/__init__.py | 5 + .../plugintypes/ci_build_plugin.py | 125 +++++ .../plugintypes/dsc_processor_plugin.py | 32 ++ .../plugintypes/uefi_build_plugin.py | 33 ++ .../plugintypes/uefi_helper_plugin.py | 73 +++ edk2toolext/environment/repo_resolver.py | 238 ++++++++ .../self_describing_environment.py | 286 ++++++++++ edk2toolext/environment/shell_environment.py | 275 +++++++++ edk2toolext/environment/uefi_build.py | 531 ++++++++++++++++++ edk2toolext/environment/var_dict.py | 179 ++++++ edk2toolext/environment/version_aggregator.py | 79 +++ edk2toolext/invocables/__init__.py | 5 + edk2toolext/invocables/edk2_ci_build.py | 310 ++++++++++ edk2toolext/invocables/edk2_ci_setup.py | 109 ++++ edk2toolext/invocables/edk2_platform_build.py | 136 +++++ edk2toolext/invocables/edk2_setup.py | 179 ++++++ edk2toolext/invocables/edk2_update.py | 88 +++ edk2toolext/nuget_publishing.py | 491 ++++++++++++++++ edk2toolext/omnicache.py | 420 ++++++++++++++ edk2toolext/tests/__init__.py | 5 + edk2toolext/tests/test_config_validator.py | 204 +++++++ edk2toolext/tests/test_edk2_logging.py | 59 ++ edk2toolext/tests/test_git_dependency.py | 306 ++++++++++ edk2toolext/tests/test_omnicache.py | 141 +++++ edk2toolext/tests/test_repo_resolver.py | 268 +++++++++ .../tests/test_self_describing_environment.py | 102 ++++ edk2toolext/tests/test_shell_environment.py | 432 ++++++++++++++ edk2toolext/tests/test_web_dependency.py | 349 ++++++++++++ readme.md | 13 +- requirements.publisher.txt | 3 + requirements.txt | 4 + setup.py | 83 +++ 61 files changed, 8437 insertions(+), 6 deletions(-) create mode 100644 .coveragerc create mode 100644 .flake8 create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 BasicDevTests.py create mode 100644 ConfirmVersionAndTag.py create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 azure-pipelines-pr-gate.yml create mode 100644 azure-pipelines-release.yml create mode 100644 edk2toolext/__init__.py create mode 100644 edk2toolext/base_abstract_invocable.py create mode 100644 edk2toolext/bin/__init__.py create mode 100644 edk2toolext/bin/nuget.py create mode 100644 edk2toolext/bin/readme.md create mode 100644 edk2toolext/config_validator.py create mode 100644 edk2toolext/edk2_git.py create mode 100644 edk2toolext/edk2_invocable.py create mode 100644 edk2toolext/edk2_logging.py create mode 100644 edk2toolext/environment/__init__.py create mode 100644 edk2toolext/environment/conf_mgmt.py create mode 100644 edk2toolext/environment/environment_descriptor_files.py create mode 100644 edk2toolext/environment/extdeptypes/__init__.py create mode 100644 edk2toolext/environment/extdeptypes/git_dependency.py create mode 100644 edk2toolext/environment/extdeptypes/nuget_dependency.py create mode 100644 edk2toolext/environment/extdeptypes/web_dependency.py create mode 100644 edk2toolext/environment/external_dependency.py create mode 100644 edk2toolext/environment/multiple_workspace.py create mode 100644 edk2toolext/environment/plugin_manager.py create mode 100644 edk2toolext/environment/plugintypes/__init__.py create mode 100644 edk2toolext/environment/plugintypes/ci_build_plugin.py create mode 100644 edk2toolext/environment/plugintypes/dsc_processor_plugin.py create mode 100644 edk2toolext/environment/plugintypes/uefi_build_plugin.py create mode 100644 edk2toolext/environment/plugintypes/uefi_helper_plugin.py create mode 100644 edk2toolext/environment/repo_resolver.py create mode 100644 edk2toolext/environment/self_describing_environment.py create mode 100644 edk2toolext/environment/shell_environment.py create mode 100644 edk2toolext/environment/uefi_build.py create mode 100644 edk2toolext/environment/var_dict.py create mode 100644 edk2toolext/environment/version_aggregator.py create mode 100644 edk2toolext/invocables/__init__.py create mode 100644 edk2toolext/invocables/edk2_ci_build.py create mode 100644 edk2toolext/invocables/edk2_ci_setup.py create mode 100644 edk2toolext/invocables/edk2_platform_build.py create mode 100644 edk2toolext/invocables/edk2_setup.py create mode 100644 edk2toolext/invocables/edk2_update.py create mode 100644 edk2toolext/nuget_publishing.py create mode 100644 edk2toolext/omnicache.py create mode 100644 edk2toolext/tests/__init__.py create mode 100644 edk2toolext/tests/test_config_validator.py create mode 100644 edk2toolext/tests/test_edk2_logging.py create mode 100644 edk2toolext/tests/test_git_dependency.py create mode 100644 edk2toolext/tests/test_omnicache.py create mode 100644 edk2toolext/tests/test_repo_resolver.py create mode 100644 edk2toolext/tests/test_self_describing_environment.py create mode 100644 edk2toolext/tests/test_shell_environment.py create mode 100644 edk2toolext/tests/test_web_dependency.py create mode 100644 requirements.publisher.txt create mode 100644 requirements.txt create mode 100644 setup.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..3b5a1add --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = edk2toolext/tests/* \ No newline at end of file diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..3874717e --- /dev/null +++ b/.flake8 @@ -0,0 +1,5 @@ +[flake8] +#E266 too many leading '#' for block comment +#E722 do not use bare 'except' +ignore = E266,E722 +max_line_length = 120 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..ba67b6cd --- /dev/null +++ b/.gitattributes @@ -0,0 +1,59 @@ +* text=auto + +*.md text +*.txt text + +# Graphics +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.tif binary +*.tiff binary +*.ico binary +# SVG treated as an asset (binary) by default. +*.svg text + +# Scripts +*.bash text eol=lf +*.sh text eol=lf +# These are explicitly windows files and should use crlf +*.bat text eol=crlf +*.cmd text eol=crlf +*.ps1 text eol=crlf + +# Serialisation +*.json text +*.toml text +*.xml text +*.yaml text +*.yml text + +# Archives +*.7z binary +*.gz binary +*.tar binary +*.zip binary +*.exe binary + +# +# Exclude files from exporting +# + +.gitattributes export-ignore +.gitignore export-ignore + + +# Basic .gitattributes for a python repo. + +# Source files +# ============ +*.pxd text diff=python +*.py text diff=python +*.py3 text diff=python +*.pyc text diff=python +*.pyd text diff=python +*.pyo text diff=python +*.pyw text diff=python +*.pyx text diff=python +*.pyz text diff=python \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..c8dfb423 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +*.exe +*.pyc +Lib +dist +*.egg-info +build. +/cov_html +/.pytest_cache +/pytest_report.html +/.coverage +/cov.xml +/test.junit.xml +flake8.err.log +/.eggs diff --git a/BasicDevTests.py b/BasicDevTests.py new file mode 100644 index 00000000..59d3286f --- /dev/null +++ b/BasicDevTests.py @@ -0,0 +1,108 @@ +## +# Quick script to check that python code in the package +# aligns with pep8 and file encoding. I have not found +# a way to enforce that with tools like flake8 +# +# There must be a better way. :) +# +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import glob +import os +import sys +import logging + + +def TestEncodingOk(apath, encodingValue): + try: + with open(apath, "rb") as fobj: + fobj.read().decode(encodingValue) + except Exception as exp: + logging.critical("Encoding failure: file: {0} type: {1}".format(apath, encodingValue)) + logging.error("EXCEPTION: while processing {1} - {0}".format(exp, apath)) + return False + return True + + +def TestLineEndingsOk(apath, Windows: bool): + WIN_EOL = b'\r\n' + UNIX_EOL = b'\n' + + with open(apath, "rb") as fobj: + content_uni = fobj.read() + + if(not Windows): + if(WIN_EOL in content_uni): + logging.critical("Windows EOL in use file: {0}".format(apath)) + return False + return True + + else: + # windows + # since UNIX EOL is substring of WIN EOL replace WIN with something + # else and then look for UNIX + content_no_nl = content_uni.replace(WIN_EOL, b" ") + if UNIX_EOL in content_no_nl: + logging.critical("UNIX EOL in use file: {0}".format(apath)) + return False + return True + + +def TestFilenameLowercase(apath): + if apath != apath.lower(): + logging.critical(f"Lowercase failure: file {apath} not lower case path") + logging.error(f"\n\tLOWERCASE: {apath.lower()}\n\tINPUTPATH: {apath}") + return False + return True + + +def TestNoSpaces(apath): + if " " in apath: + logging.critical(f"NoSpaces failure: file {apath} has spaces in path") + return False + return True + + +def TestRequiredLicense(apath): + lic = ["SPDX-License-Identifier: BSD-2-Clause-Patent"] + try: + with open(apath, "rb") as fobj: + contents = fobj.read().decode() + found = False + for l in lic: + if l in contents: + found = True + break + if not found: + logging.critical(f"License failure: file {apath} has incorrect, invalid, or unsupported license") + return False + except Exception as exp: + logging.critical(f"License failure: Exception trying to read file: {apath}") + logging.error("EXCEPTION: while processing {1} - {0}".format(exp, apath)) + return False + return True + + +p = os.path.join(os.getcwd(), "edk2toolext") +pyfiles = glob.glob(os.path.join(p, "**", "*.py"), recursive=True) +error = 0 +for a in pyfiles: + aRelativePath = os.path.relpath(a, os.getcwd()) + if(not TestEncodingOk(a, "ascii")): + error += 1 + if(not TestFilenameLowercase(aRelativePath)): + error += 1 + if(not TestNoSpaces(aRelativePath)): + error += 1 + if(not TestRequiredLicense(a)): + error += 1 + + # Don't check EOL. Use .gitattributes + # if(not TestLineEndingsOk(a, True)): + # error += 1 + +logging.critical(f"Found {error} error(s) in {len(pyfiles)} file(s)") +sys.exit(error) diff --git a/ConfirmVersionAndTag.py b/ConfirmVersionAndTag.py new file mode 100644 index 00000000..f712f93f --- /dev/null +++ b/ConfirmVersionAndTag.py @@ -0,0 +1,27 @@ +## @file +# Quick script to check that the wheel/package created is aligned on a git tag. +# Official releases should not be made from non-tagged code. +# +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import glob +import os +import sys + +p = os.path.join(os.getcwd(), "dist") +whlfile = glob.glob(os.path.join(p, "*.whl")) +if(len(whlfile) != 1): + for filename in whlfile: + print(filename) + raise Exception("Too many wheel files") +rfn = os.path.relpath(whlfile[0], os.getcwd()) +v = rfn.split("-")[1] +if v.count(".") != 2: + raise Exception("Version %s not in format major.minor.patch" % v) +if "dev" in v: + raise Exception("No Dev versions allowed to be published.") +print("version: " + str(v)) +sys.exit(0) diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..a78b25be --- /dev/null +++ b/LICENSE @@ -0,0 +1,52 @@ +Copyright (c) 2019, TianoCore and contributors. All rights reserved. +Copyright (c) Microsoft All rights reserved. + +SPDX-License-Identifier: BSD-2-Clause-Patent + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +Subject to the terms and conditions of this license, each copyright holder +and contributor hereby grants to those receiving rights under this license +a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except for failure to satisfy the conditions of this license) patent +license to make, have made, use, offer to sell, sell, import, and otherwise +transfer this software, where such license applies only to those patent +claims, already acquired or hereafter acquired, licensable by such copyright +holder or contributor that are necessarily infringed by: + +(a) their Contribution(s) (the licensed copyrights of copyright holders and + non-copyrightable additions of contributors, in source or binary form) + alone; or + +(b) combination of their Contribution(s) with the work of authorship to + which such Contribution(s) was added by such copyright holder or + contributor, if, at the time the Contribution is added, such addition + causes such combination to be necessarily infringed. The patent license + shall not apply to any other combinations which include the + Contribution. + +Except as expressly stated above, no rights or licenses from any copyright +holder or contributor is granted under this license, whether expressly, by +implication, estoppel or otherwise. + +DISCLAIMER + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..336f1fd1 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,7 @@ +exclude *.yml +exclude *.md +exclude *.txt +exclude .flake8 +exclude .coveragerc +exclude .gitignore +include edk2toolext/bin/NuGet.exe \ No newline at end of file diff --git a/azure-pipelines-pr-gate.yml b/azure-pipelines-pr-gate.yml new file mode 100644 index 00000000..b042c97f --- /dev/null +++ b/azure-pipelines-pr-gate.yml @@ -0,0 +1,83 @@ +workspace: + clean: all + +steps: +- checkout: self + clean: true + +- task: UsePythonVersion@0 + inputs: + versionSpec: '3.7.x' + architecture: 'x64' + +- script: python -m pip install --upgrade pip + displayName: 'Install/Upgrade pip' + +- script: pip uninstall -y edk2_pytool_extensions + displayName: 'Remove existing version of self' + +- script: pip install --upgrade -r requirements.txt + displayName: 'Install requirements' + +- script: pip install -e . + displayName: 'Install from Source' + +- script: pytest -v --junitxml=test.junit.xml --html=pytest_report.html --self-contained-html --cov=edk2toolext --cov-report html:cov_html --cov-report xml:cov.xml --cov-config .coveragerc + displayName: 'Run UnitTests' + +# Publish Test Results to Azure Pipelines/TFS +- task: PublishTestResults@2 + displayName: 'Publish junit test results' + continueOnError: true + condition: succeededOrFailed() + inputs: + testResultsFormat: 'JUnit' # Options: JUnit, NUnit, VSTest, xUnit + testResultsFiles: 'test.junit.xml' + mergeTestResults: true # Optional + publishRunAttachments: true # Optional + +# Publish Build Artifacts +# Publish build artifacts to Azure Pipelines/TFS or a file share +- task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: 'pytest_report.html' + artifactName: 'Edk2_pytool_extensions unit test report' + continueOnError: true + condition: succeededOrFailed() + +# Publish Code Coverage Results +# Publish Cobertura code coverage results +- task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'cobertura' # Options: cobertura, jaCoCo + summaryFileLocation: $(System.DefaultWorkingDirectory)/cov.xml + reportDirectory: $(System.DefaultWorkingDirectory)/cov_html + condition: succeededOrFailed() + +- script: flake8 . + displayName: 'Run flake8' + condition: succeededOrFailed() + +# Only capture and archive the lint log on failures. +- script: flake8 . > flake8.err.log + displayName: 'Capture flake8 failures' + condition: Failed() + +- task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: 'flake8.err.log' + artifactName: 'Flake8 Error log file' + continueOnError: true + condition: Failed() + +- task: PythonScript@0 + inputs: + scriptSource: 'filePath' + scriptPath: 'BasicDevTests.py' + #script: # Required when scriptSource == Inline + #arguments: # Optional + #pythonInterpreter: # Optional + #workingDirectory: # Optional + #failOnStderr: false # Optional + displayName: 'Check basic file and folder tests' + condition: succeededOrFailed() diff --git a/azure-pipelines-release.yml b/azure-pipelines-release.yml new file mode 100644 index 00000000..1aa96122 --- /dev/null +++ b/azure-pipelines-release.yml @@ -0,0 +1,109 @@ +workspace: + clean: all + +trigger: none # will disable CI builds entirely + +steps: +- checkout: self + clean: true + +- task: UsePythonVersion@0 + inputs: + versionSpec: '3.7.x' + architecture: 'x64' + +- script: python -m pip install --upgrade pip + displayName: 'Install/Upgrade pip' + +- script: pip uninstall -y edk2_pytool_extensions + displayName: 'Remove existing version of self' + +- script: pip install --upgrade -r requirements.txt + displayName: 'Install requirements' + +- script: pip install -e . + displayName: 'Install from Source' + +- script: pytest -v --junitxml=test.junit.xml --html=pytest_report.html --self-contained-html --cov=edk2toolext --cov-report html:cov_html --cov-report xml:cov.xml --cov-config .coveragerc + displayName: 'Run UnitTests' + +# Publish Test Results to Azure Pipelines/TFS +- task: PublishTestResults@2 + displayName: 'Publish junit test results' + continueOnError: true + condition: succeededOrFailed() + inputs: + testResultsFormat: 'JUnit' # Options: JUnit, NUnit, VSTest, xUnit + testResultsFiles: 'test.junit.xml' + mergeTestResults: true # Optional + publishRunAttachments: true # Optional + +# Publish Build Artifacts +# Publish build artifacts to Azure Pipelines/TFS or a file share +- task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: 'pytest_report.html' + artifactName: 'Edk2_pytool_extensions unit test report' + continueOnError: true + condition: succeededOrFailed() + +# Publish Code Coverage Results +# Publish Cobertura code coverage results +- task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'cobertura' # Options: cobertura, jaCoCo + summaryFileLocation: $(System.DefaultWorkingDirectory)/cov.xml + condition: succeededOrFailed() + +- script: flake8 . + displayName: 'Run flake8' + condition: succeededOrFailed() + +# Only capture and archive the lint log on failures. +- script: flake8 . > flake8.err.log + displayName: 'Capture flake8 failures' + condition: Failed() + +- task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: 'flake8.err.log' + artifactName: 'Flake8 Error log file' + continueOnError: true + condition: Failed() + +- task: PythonScript@0 + inputs: + scriptSource: 'filePath' + scriptPath: 'BasicDevTests.py' + #script: # Required when scriptSource == Inline + #arguments: # Optional + #pythonInterpreter: # Optional + #workingDirectory: # Optional + #failOnStderr: false # Optional + displayName: 'Check basic file and folder tests' + condition: succeededOrFailed() + +- script: pip install --upgrade -r requirements.publisher.txt + displayName: 'Install PyPI publishing requirements' + +- script: python setup.py sdist bdist_wheel + displayName: 'Build a wheel' + +# Python Script +# Run a Python script. +- task: PythonScript@0 + displayName: 'Confirm Version and Tag' + inputs: + scriptSource: 'filePath' # Options: filePath, inline + scriptPath: ConfirmVersionAndTag.py + #arguments: # Optional + #pythonInterpreter: # Optional + #workingDirectory: # Optional + failOnStderr: true # Optional + +- task: TwineAuthenticate@0 + inputs: + externalFeeds: 'Pypi-edk2-pytool-extensions' + +- script: 'twine upload -r Pypi-edk2-pytool-extensions --config-file $(PYPIRC_PATH) dist/*' + displayName: 'Publish to pypi' \ No newline at end of file diff --git a/edk2toolext/__init__.py b/edk2toolext/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/base_abstract_invocable.py b/edk2toolext/base_abstract_invocable.py new file mode 100644 index 00000000..76ceff86 --- /dev/null +++ b/edk2toolext/base_abstract_invocable.py @@ -0,0 +1,140 @@ +# @file base_abstract_invocable +# Base class for an Invocable. Loads environment before calling subclass. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import sys +import logging +from datetime import datetime +from edk2toolext import edk2_logging +from edk2toolext.environment import plugin_manager +from edk2toolext.environment.plugintypes.uefi_helper_plugin import HelperFunctions +from edk2toolext.environment import self_describing_environment + + +class BaseAbstractInvocable(object): + + def __init__(self): + return + + def ParseCommandLineOptions(self): + ''' parse arguments ''' + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' Return the workspace root for initializing the SDE ''' + raise NotImplementedError() + + def GetActiveScopes(self): + '''Return the scopes for use in SDE ''' + raise NotImplementedError() + + def GetLoggingLevel(self, loggerType): + ''' Get the logging level for a given type (return Logging.Level) + base == lowest logging level supported + con == Screen logging + txt == plain text file logging + md == markdown file logging + ''' + raise NotImplementedError() + + def GetLoggingFolderRelativeToRoot(self): + ''' Return a path to folder for log files ''' + raise NotImplementedError() + + def GetVerifyCheckRequired(self): + ''' Will call self_describing_environment.VerifyEnvironment if this returns True ''' + return True + + def GetLoggingFileName(self, loggerType): + ''' Get the logging file name for the type. + Return None if the logger shouldn't be created + + base == lowest logging level supported + con == Screen logging + txt == plain text file logging + md == markdown file logging + ''' + raise NotImplementedError() + + def Go(self): + ''' Main function to run ''' + raise NotImplementedError() + + def ConfigureLogging(self): + ''' Set up the logging. This function only needs to be overridden if new behavior is needed''' + + logger = logging.getLogger('') + logger.setLevel(self.GetLoggingLevel("base")) + + # Adjust console mode depending on mode. + edk2_logging.setup_section_level() + + edk2_logging.setup_console_logging(self.GetLoggingLevel("con")) + + log_directory = os.path.join(self.GetWorkspaceRoot(), self.GetLoggingFolderRelativeToRoot()) + + txtlogfile = self.GetLoggingLevel("txt") + if(txtlogfile is not None): + logfile, filelogger = edk2_logging.setup_txt_logger(log_directory, + self.GetLoggingFileName("txt"), + txtlogfile) + + mdlogfile = self.GetLoggingLevel("md") + if(mdlogfile is not None): + mdfile, mdlogger = edk2_logging.setup_markdown_logger(log_directory, + self.GetLoggingFileName("md"), + mdlogfile) + + logging.info("Log Started: " + datetime.strftime(datetime.now(), "%A, %B %d, %Y %I:%M%p")) + + return + + def Invoke(self): + ''' Main process function. Should not need to be overwritten ''' + + self.ParseCommandLineOptions() + self.ConfigureLogging() + + logging.log(edk2_logging.SECTION, "Init SDE") + + # + # Next, get the environment set up. + # + (build_env, shell_env) = self_describing_environment.BootstrapEnvironment( + self.GetWorkspaceRoot(), self.GetActiveScopes()) + + # Make sure the environment verifies IF it is required for this invokation + if self.GetVerifyCheckRequired() and not self_describing_environment.VerifyEnvironment( + self.GetWorkspaceRoot(), self.GetActiveScopes()): + raise RuntimeError("SDE is not current. Please update your env before running this tool.") + + # Load plugins + logging.log(edk2_logging.SECTION, "Loading Plugins") + + self.plugin_manager = plugin_manager.PluginManager() + failedPlugins = self.plugin_manager.SetListOfEnvironmentDescriptors( + build_env.plugins) + if failedPlugins: + logging.critical("One or more plugins failed to load. Halting build.") + for a in failedPlugins: + logging.error("Failed Plugin: {0}".format(a["name"])) + raise Exception("One or more plugins failed to load.") + + self.helper = HelperFunctions() + if(self.helper.LoadFromPluginManager(self.plugin_manager) > 0): + raise Exception("One or more helper plugins failed to load.") + + logging.log(edk2_logging.SECTION, "Start Invocable Tool") + retcode = self.Go() + logging.log(edk2_logging.SECTION, "Summary") + if(retcode != 0): + edk2_logging.log_progress("Error") + else: + edk2_logging.log_progress("Success") + + logging.shutdown() + sys.exit(retcode) diff --git a/edk2toolext/bin/__init__.py b/edk2toolext/bin/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/bin/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/bin/nuget.py b/edk2toolext/bin/nuget.py new file mode 100644 index 00000000..2fe70ceb --- /dev/null +++ b/edk2toolext/bin/nuget.py @@ -0,0 +1,40 @@ +# @file NuGet.py +# This module contains code that knows how to download nuget +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import urllib.error +import urllib.request +import logging + +# Update this when you want a new version of NuGet +VERSION = "5.1.0" +URL = "https://dist.nuget.org/win-x86-commandline/v{}/nuget.exe".format(VERSION) +SHA256 = "0ace4f53493332c9a75291ee96acd76b371b4e687175e4852bf85948176d7152" + + +def DownloadNuget(unpack_folder=None): + if unpack_folder is None: + unpack_folder = os.path.dirname(__file__) + + out_file_name = os.path.join(unpack_folder, "NuGet.exe") + # check if we have the nuget file already downloaded + if not os.path.isfile(out_file_name): + try: + # Download the file and save it locally under `temp_file_name` + with urllib.request.urlopen(URL) as response, open(out_file_name, 'wb') as out_file: + out_file.write(response.read()) + except urllib.error.HTTPError as e: + logging.error(f"We ran into an issue when getting NuGet") + raise e + + # do the hash to make sure the file is good + with open(out_file_name, "rb") as file: + import hashlib + temp_file_sha256 = hashlib.sha256(file.read()).hexdigest() + if temp_file_sha256 != SHA256: + os.remove(out_file_name) + raise RuntimeError(f"Nuget - sha256 does not match\n\tdownloaded:\t{temp_file_sha256}\n\t") diff --git a/edk2toolext/bin/readme.md b/edk2toolext/bin/readme.md new file mode 100644 index 00000000..7b6a54e3 --- /dev/null +++ b/edk2toolext/bin/readme.md @@ -0,0 +1 @@ +## The binary files that will be included with this package diff --git a/edk2toolext/config_validator.py b/edk2toolext/config_validator.py new file mode 100644 index 00000000..281f836d --- /dev/null +++ b/edk2toolext/config_validator.py @@ -0,0 +1,292 @@ +# @file config_validator.py +# This module contains support for validating .mu.json config files +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import urllib.request as req + +''' +Example_MU_CONFIG_FILE: + Name: Project Mu BaseCore Repo CI Build + GroupName: BaseCore + RelativeWorkspaceRoot: "" + Scopes: + - basecore + - corebuild + Dependencies: + - Silicon/Arm/Tiano + Path: Silicon/Arm/Tiano + Url: https://github.com/Microsoft/mu_silicon_arm_tiano.git + Branch: release/20180529 + ReferencePath: "../place" + Full: true + PackagesPath: + ReferencePath: "../" # (omnicache) + + Packages: + - MdeModulePkg + - MdePkg + - MsUnitTestPkg + - NetworkPkg + - PcAtChipsetPkg + - PerformancePkg + - SecurityPkg + - UefiCpuPkg + ArchSupported: + - IA32 + - X64 + DependencyCheckPlugin: + skip: true + DscCheckPlugin: + skip:true +''' + + +# Checks the top level MU Config +def check_mu_confg(config, edk2path, pluginList): + workspace = edk2path.WorkspacePath + + def _mu_error(message): + raise RuntimeError("Mu Config Error: {0}".format(message)) + + def _is_valid_dir(path, name): + path = os.path.join(workspace, path) + if not os.path.isdir(path): + _mu_error("{0} isn't a valid directory".format(path)) + + def _check_url(url): + request = req.Request(url) + try: + req.urlopen(request) + return True + except: + # The url wasn't valid + return False + + def _check_packages(packages, name): + for package in packages: + path = edk2path.GetAbsolutePathOnThisSytemFromEdk2RelativePath(package) + if path is None or not os.path.isdir(path): + _mu_error("{0} isn't a valid package to build".format(package)) + return True + + def _is_valid_arch(targets, name): + valid_targets = ["AARCH64", "IA32", "X64", "ARM"] + for target in targets: + if target not in valid_targets: + _mu_error("{0} is not a valid target".format(target)) + + def _check_dependencies(dependencies, name): + valid_attributes = ["Path", "Url", "Branch", "Commit", "ReferencePath", "Full"] + for dependency in dependencies: + # check to make sure we have a path + if "Path" not in dependency: + _mu_error("Path not found in dependency {0}".format(dependency)) + # check to sure we have a valid url and we can reach it + if "Url" not in dependency: + _mu_error("Url not found in dependency {0}".format(dependency)) + if not _check_url(dependency["Url"]): + _mu_error("Invalid URL {0}".format(dependency["Url"])) + # make sure we have a valid branch or commit + if "Branch" not in dependency and "Commit" not in dependency: + _mu_error("You must have a commit or a branch dependency {0}".format(dependency)) + if "Branch" in dependency and "Commit" in dependency: + _mu_error("You cannot have both a commit or a branch dependency {0}".format(dependency)) + if "ReferencePath" in dependency: + if dependency["ReferencePath"] is not None and not os.path.isdir(dependency["ReferencePath"]): + _mu_error("This cache does not exist".format(dependency)) + # check to make sure we don't have something else in there + for attribute in dependency: + if attribute not in valid_attributes: + _mu_error("Unknown attribute {0} in dependecy".format(attribute)) + + return True + + config_rules = { + "required": { + "Name": { + "type": "str" + }, + "GroupName": { + "type": "str" + }, + "Scopes": { + "type": "list", + "items": "str" + }, + "ArchSupported": { + "type": "list", + "validator": _is_valid_arch + }, + "RelativeWorkspaceRoot": { + "type": "str", + "validator": _is_valid_dir + }, + "Targets": { + "type": "list" + } + }, + "optional": { + "Packages": { + "type": "list", + "items": "str", + "validator": _check_packages + }, + "PackagesPath": { + "type": "list", + "items": "str" + }, + "Dependencies": { + "type": "list", + "validator": _check_dependencies + }, + "OmnicachePath": { + "type": "str", + "validator": _is_valid_dir + } + } + } + + for plugin in pluginList: + if "module" in plugin.descriptor: + plugin_name = plugin.descriptor["module"] + if "config_name" in plugin.descriptor: + plugin_name = plugin.descriptor["config_name"] + config_rules["optional"][plugin_name] = { + "validator": plugin.Obj.ValidateConfig + } + + # check if all the requires are satisified + for rule in config_rules["required"]: + if rule not in config: + _mu_error("{0} is a required attribute in your MU Config".format(rule)) + + if "type" in config_rules["required"][rule]: + config_type = str(type(config[rule]).__name__) + wanted_type = config_rules["required"][rule]["type"] + if config_type != wanted_type: + _mu_error("{0} is a required attribute and is not the correct type. " + "We are expecting a {1} and got a {2}".format(rule, config_type, wanted_type)) + + if "validator" in config_rules["required"][rule]: + validator = config_rules["required"][rule]["validator"] + validator(config[rule], "Base Mu.json") + + # check optional types + for rule in config_rules["optional"]: + if rule not in config: + continue + + if "type" in config_rules["optional"][rule]: + config_type = str(type(config[rule]).__name__) + wanted_type = config_rules["optional"][rule]["type"] + if config_type != wanted_type: + _mu_error("{0} is a optional attribute and is not the correct type. " + "We are expecting a {1} and got a {2}".format(rule, config_type, wanted_type)) + + if "validator" in config_rules["optional"][rule]: + validator = config_rules["optional"][rule]["validator"] + validator(config[rule], "Base mu.json") + + # check to make sure we don't have any stray keys in there + for rule in config: + if rule not in config_rules["optional"] and rule not in config_rules["required"]: + _mu_error("Unknown parameter {0} is unexpected".format(rule)) + + return True + + +''' +{ + "Defines": { + "PLATFORM_NAME": "MdeModule", + "DSC_SPECIFICATION": "0x00010005", + "SUPPORTED_ARCHITECTURES": "IA32|X64|ARM|AARCH64", + "BUILD_TARGETS": "DEBUG|RELEASE" + }, + "CompilerPlugin": { + "skip":false, + "IgnoreInf": [] + }, + "DependencyCheckPlugin":{ + "AcceptableDependencies": [ + "MdePkg/MdePkg.dec", + "MdeModulePkg/MdeModulePkg.dec", + "MsUnitTestPkg/MsUnitTestPkg.dec" + ], + "IgnoreInf": { + + }, + "skip": false + } +} +''' +## +# Checks the package configuration for errors +## + + +def check_package_confg(name, config, pluginList): + def _mu_error(message): + raise RuntimeError("Package {0} Config Error: {1}".format(name, message)) + + config_rules = { + "required": { + }, + "optional": { + "Defines": { + "type": "dict", + "items": "str" + } + } + } + for plugin in pluginList: + if "module" in plugin.descriptor: + plugin_name = plugin.descriptor["module"] + if "config_name" in plugin.descriptor: + plugin_name = plugin.descriptor["config_name"] + # add the validator + config_rules["optional"][plugin_name] = { + "validator": plugin.Obj.ValidateConfig + } + + # check if all the requires are satisified + for rule in config_rules["required"]: + if rule not in config: + _mu_error("{0} is a required attribute in your MU Config".format(rule)) + + if "type" in config_rules["required"][rule]: + config_type = str(type(config[rule]).__name__) + wanted_type = config_rules["required"][rule]["type"] + if config_type != wanted_type: + _mu_error("{0} is a required attribute and is not the correct type. " + "We are expecting a {1} and got a {2}".format(rule, config_type, wanted_type)) + + if "validator" in config_rules["required"][rule]: + validator = config_rules["required"][rule]["validator"] + validator(config[rule], name) + + # check optional types + for rule in config_rules["optional"]: + if rule not in config: + continue + + if "type" in config_rules["optional"][rule]: + config_type = str(type(config[rule]).__name__) + wanted_type = config_rules["optional"][rule]["type"] + if config_type != wanted_type: + _mu_error("{0} is a optional attribute and is not the correct type. " + "We are expecting a {1} and got a {2}".format(rule, config_type, wanted_type)) + + if "validator" in config_rules["optional"][rule]: + validator = config_rules["optional"][rule]["validator"] + validator(config[rule], name) + + # check to make sure we don't have any stray keys in there + # for rule in config: + # if rule not in config_rules["optional"] and rule not in config_rules["required"]: + # _mu_error("Unknown parameter {0} is unexpected".format(rule)) diff --git a/edk2toolext/edk2_git.py b/edk2toolext/edk2_git.py new file mode 100644 index 00000000..674b1d43 --- /dev/null +++ b/edk2toolext/edk2_git.py @@ -0,0 +1,268 @@ +# @file edk2_git.py +# This module contains code that supports simple git operations. This should +# not be used as an extensive git lib but as what is needed for CI/CD builds +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +from io import StringIO +from edk2toollib.utility_functions import RunCmd + + +class ObjectDict(object): + def __init__(self): + self.__values = list() + + def __setattr__(self, key, value): + if not key.startswith("_"): + self.__values.append(key) + super().__setattr__(key, value) + + def __str__(self): + result = list() + result.append("ObjectDict:") + for value in self.__values: + result.append(value + ":" + str(getattr(self, value))) + return "\n".join(result) + + def set(self, key, value): + self.__setattr__(key, value) + + +class Repo(object): + + def __init__(self, path=None): + self._path = path # the path that the repo is pointed at + self.active_branch = None # the active branch or none if detached + self.bare = True # if the repo is bare + self.exists = False # if the .git folder exists + self.remotes = ObjectDict() + self.initalized = False # if there is a git repo at the directory + self.url = None # the origin remote + self.dirty = False # if there are changes + self.head = None # the head commit that this repo is at + self.submodules = None # List of submodule paths + self._update_from_git() + self._logger = logging.getLogger("git.repo") + + # Updates the .git file + def _update_from_git(self): + + if os.path.isdir(self._path): + try: + self.exists = True + self.active_branch = self._get_branch() + self.remotes = self._get_remotes() + self.head = self._get_head() + self.dirty = self._get_dirty() + self.url = self._get_url() + self.bare = self._get_bare() + self.initalized = self._get_initalized() + self.submodules = self._get_submodule_list() + except Exception as e: + self._logger.error("GIT ERROR for {0}".format(self._path)) + self._logger.error(e) + raise e + return False + + def _get_submodule_list(self): + submodule_list = [] + return_buffer = StringIO() + params = "config --file .gitmodules --get-regexp path" + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + p1 = return_buffer.getvalue().strip() + return_buffer.close() + if (len(p1) > 0): + submodule_list = p1.split("\n") + for i in range(0, len(submodule_list)): + submodule_list[i] = submodule_list[i].split(' ')[1] + return submodule_list + + def _get_remotes(self): + return_buffer = StringIO() + params = "remote" + new_remotes = ObjectDict() + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + p1 = return_buffer.getvalue().strip() + return_buffer.close() + remote_list = p1.split("\n") + for remote in remote_list: + url = ObjectDict() + url.set("url", self._get_url(remote)) + setattr(new_remotes, remote, url) + + return new_remotes + + def _get_url(self, remote="origin"): + return_buffer = StringIO() + params = "config --get remote.{0}.url".format(remote) + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + return p1 + + def _get_dirty(self): + return_buffer = StringIO() + params = "status --short" + + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + + if len(p1) > 0: + return True + + return_buffer = StringIO() + params = "log --branches --not --remotes --decorate --oneline" + + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + + if len(p1) > 0: + return True + + return False + + def _get_branch(self): + return_buffer = StringIO() + params = "rev-parse --abbrev-ref HEAD" + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + return p1 + + def _get_head(self): + return_buffer = StringIO() + params = "rev-parse HEAD" + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + + head = ObjectDict() + head.set("commit", p1) + + return head + + def _get_bare(self): + return_buffer = StringIO() + params = "rev-parse --is-bare-repository" + RunCmd("git", params, workingdir=self._path, outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + return_buffer.close() + if p1.lower() == "true": + return True + else: + return False + + def _get_initalized(self): + return os.path.isdir(os.path.join(self._path, ".git")) + + def submodule(self, command, *args): + self._logger.debug( + "Calling command on submodule {0} with {1}".format(command, args)) + return_buffer = StringIO() + flags = " ".join(args) + params = "submodule {0} {1}".format(command, flags) + + ret = RunCmd("git", params, workingdir=self._path, + outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + if ret != 0: + self._logger.error(p1) + return False + + return True + + def fetch(self): + return_buffer = StringIO() + + params = "fetch" + + ret = RunCmd("git", params, workingdir=self._path, + outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + if ret != 0: + self._logger.error(p1) + return False + + return True + + def pull(self): + return_buffer = StringIO() + + params = "pull" + + ret = RunCmd("git", params, workingdir=self._path, + outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + if ret != 0: + self._logger.error(p1) + return False + + return True + + def checkout(self, branch=None, commit=None): + return_buffer = StringIO() + if branch is not None: + params = "checkout %s" % branch + elif commit is not None: + params = "checkout %s" % commit + ret = RunCmd("git", params, workingdir=self._path, + outstream=return_buffer) + + p1 = return_buffer.getvalue().strip() + if ret != 0: + self._logger.debug(p1) + return False + + return True + + @classmethod + def clone_from(self, url, to_path, progress=None, env=None, shallow=False, reference=None, **kwargs): + _logger = logging.getLogger("git.repo") + _logger.debug("Cloning {0} into {1}".format(url, to_path)) + # make sure we get the commit if + # use run command from utilities + cmd = "git" + params = ["clone"] + if shallow: + params.append("--shallow-submodules") + if reference: + params.append("--reference %s" % reference) + else: + params.append("--recurse-submodules") # if we don't have a reference we can just recurse the submodules + params.append(url) + params.append(to_path) + + # Combine all the parameters together + param_string = " ".join(params) + + ret = RunCmd(cmd, param_string) + + if ret != 0: + logging.error("ERROR CLONING ") + return None + + # if we have a reference path we must init the submodules + if reference: + params = ["submodule", "update", "--init", "--recursive"] + params.append("--reference %s" % reference) + param_string = " ".join(params) + ret = RunCmd(cmd, param_string) + + return Repo(to_path) diff --git a/edk2toolext/edk2_invocable.py b/edk2toolext/edk2_invocable.py new file mode 100644 index 00000000..a55740f8 --- /dev/null +++ b/edk2toolext/edk2_invocable.py @@ -0,0 +1,167 @@ +# @file edk2_invocable +# Middle layer providing Project MU specific parsing to Invocable tools. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import sys +import logging +import argparse +from edk2toolext.environment import shell_environment +from edk2toollib.utility_functions import GetHostInfo +from edk2toollib.utility_functions import locate_class_in_module +from edk2toollib.utility_functions import import_module_by_file_name +from edk2toolext.base_abstract_invocable import BaseAbstractInvocable + + +class Edk2Invocable(BaseAbstractInvocable): + + def GetWorkspaceRoot(self): + try: + return self.PlatformSettings.GetWorkspaceRoot() + except AttributeError: + raise RuntimeError("Can't call this before PlatformSettings has been set up!") + + def GetActiveScopes(self): + try: + scopes = self.PlatformSettings.GetActiveScopes() + except AttributeError: + raise RuntimeError("Can't call this before PlatformSettings has been set up!") + + # Add any OS-specific scope. + if GetHostInfo().os == "Windows": + scopes += ('global-win',) + elif GetHostInfo().os == "Linux": + scopes += ('global-nix',) + # Add the global scope. To be deprecated. + scopes += ('global',) + return scopes + + def GetLoggingLevel(self, loggerType): + ''' Get the logging level for a given type + base == lowest logging level supported + con == Screen logging + txt == plain text file logging + md == markdown file logging + ''' + if(loggerType == "con") and not self.Verbose: + return logging.INFO + return logging.DEBUG + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self, args): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + def GetSettingsClass(self): + ''' Child class should provide the class that contains their required settings ''' + raise NotImplementedError() + + def GetLoggingFolderRelativeToRoot(self): + return "Build" + + def ParseCommandLineOptions(self): + ''' + Parses command line options. + Sets up argparser specifically to get PlatformSettingsManager instance. + Then sets up second argparser and passes it to child class and to PlatformSettingsManager. + Finally, parses all known args and then reads the unknown args in to build vars. + ''' + # first argparser will only get settings manager and help will be disabled + settingsParserObj = argparse.ArgumentParser(add_help=False) + # instantiate the second argparser that will get passed around + + epilog = ''' += - Set an env variable for the pre/post build process +BLD_*_= - Set a build flag for all build types. +Key=value will get passed to build process +BLD__= - Set a build flag for build type of +Key=value will get passed to build process for given build type)''' + + parserObj = argparse.ArgumentParser(epilog=epilog) + + settingsParserObj.add_argument('-c', '--platform_module', dest='platform_module', + default="PlatformBuild.py", type=str, + help='Provide the Platform Module relative to the current working directory.' + f'This should contain a {self.GetSettingsClass().__name__} instance.') + + # get the settings manager from the provided file and load an instance + settingsArg, unknown_args = settingsParserObj.parse_known_args() + try: + self.PlatformModule = import_module_by_file_name(os.path.abspath(settingsArg.platform_module)) + self.PlatformSettings = locate_class_in_module( + self.PlatformModule, self.GetSettingsClass())() + except (TypeError, FileNotFoundError) as e: + # Gracefully exit if setup doesn't go well. + try: + # If this works, we can provide help for whatever special functions + # the subclass is offering. + self.AddCommandLineOptions(settingsParserObj) + except: + # If it didn't work, oh well. + pass + print(e) + settingsParserObj.print_help() + sys.exit(0) + + # now to get the big arg parser going... + # first pass it to the subclass + self.AddCommandLineOptions(parserObj) + + # next pass it to the settings manager + self.PlatformSettings.AddCommandLineOptions(parserObj) + + default_build_config_path = os.path.join(self.GetWorkspaceRoot(), "BuildConfig.conf") + + # add the common stuff that everyone will need + parserObj.add_argument('--build-config', dest='build_config', default=default_build_config_path, type=str, + help='Provide shell variables in a file') + parserObj.add_argument('--verbose', '--VERBOSE', '-v', dest="verbose", action='store_true', default=False, + help='verbose') + + # setup sys.argv and argparse round 2 + sys.argv = [sys.argv[0]] + unknown_args + args, unknown_args = parserObj.parse_known_args() + self.Verbose = args.verbose + + # give the parsed args to the subclass + self.RetrieveCommandLineOptions(args) + + # give the parsed args to platform settings manager + self.PlatformSettings.RetrieveCommandLineOptions(args) + + # + # Look through unknown_args and BuildConfig for strings that are x=y, + # set env.SetValue(x, y), + # then remove this item from the list. + # + env = shell_environment.GetBuildVars() + BuildConfig = os.path.abspath(args.build_config) + + if os.path.isfile(BuildConfig): + with open(BuildConfig) as file: + for line in file: + stripped_line = line.strip() + if not stripped_line.startswith("#"): + unknown_args.append(line) + + i = 0 + while i < len(unknown_args): + unknown_arg = unknown_args[i] + print(f"unknown arg {unknown_arg}") + if(unknown_arg.count("=") == 1): + tokens = unknown_arg.strip().split("=") + env.SetValue(tokens[0].strip().upper(), tokens[1].strip(), "From CmdLine") + del unknown_args[i] + else: + i += 1 + + if len(unknown_args) > 0: + parserObj.print_help() + raise RuntimeError(f"Unknown variables passed in: {unknown_args}") diff --git a/edk2toolext/edk2_logging.py b/edk2toolext/edk2_logging.py new file mode 100644 index 00000000..513be678 --- /dev/null +++ b/edk2toolext/edk2_logging.py @@ -0,0 +1,268 @@ +# @file edk2_logging.py +# Handle basic logging config for builds; +# splits logs into a master log and per package. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import logging +import os +import shutil +import re + +try: + from edk2toollib.log import ansi_handler +except ImportError: + ansi_handler = None +try: + from edk2toollib.log import markdown_handler +except ImportError: + markdown_handler = None +try: + from edk2toollib.log import string_handler +except ImportError: + string_handler = None +try: + from edk2toollib.log import file_handler +except ImportError: + file_handler = logging + + +# These three are for emitting different events +# section is for marking different sections of the build process +# subsection is similar to sub section but denotes a subsection of the current section +# both of the section levels are high enough that they won't get filtered out +# progress is for marking things like a process completed. Similar to critical but doesn't mean the process is exiting +# progress is below critical so it can be turned off but still high enough that it doesn't get filtered out +SECTION = logging.CRITICAL + 2 # just above critical +SUB_SECTION = logging.CRITICAL + 1 # just above critical +PROGRESS = logging.CRITICAL - 1 # just below critical + + +# sub_directory is relative to ws argument +def clean_build_logs(ws, sub_directory=None): + # Make sure that we have a clean environment. + if sub_directory is None: + sub_directory = os.path.join("Build", "BuildLogs") + if os.path.isdir(os.path.join(ws, sub_directory)): + shutil.rmtree(os.path.join(ws, sub_directory)) + + +def get_section_level(): + return SECTION + + +def get_subsection_level(): + return SUB_SECTION + + +def get_progress_level(): + return PROGRESS + + +def get_mu_filter(verbose=False): + # gMuFilter = MuLogFilter.instance() + gMuFilter = MuLogFilter() + if verbose: + gMuFilter.setVerbose(verbose) + return gMuFilter + + +def log_progress(message): + logging.log(get_progress_level(), message) + + +def setup_section_level(): + # todo define section level + # add section as a level to the logger + section_level = get_section_level() + subsection_level = get_subsection_level() + progress_level = get_progress_level() + if logging.getLevelName(section_level) != "SECTION": + logging.addLevelName(section_level, "SECTION") + if logging.getLevelName(subsection_level) != "SUBSECTION": + logging.addLevelName(subsection_level, "SUBSECTION") + if logging.getLevelName(progress_level) != "PROGRESS": + logging.addLevelName(progress_level, "PROGRESS") + + +# creates the the plaintext logger +def setup_txt_logger(directory, filename="log", logging_level=logging.INFO, + formatter=None, logging_namespace='', isVerbose=False): + logger = logging.getLogger(logging_namespace) + log_formatter = formatter + if log_formatter is None: + log_formatter = logging.Formatter("%(levelname)s - %(message)s") + + if not os.path.isdir(directory): + os.makedirs(directory) + + # Create file logger + logfile_path = os.path.join(directory, filename + ".txt") + filelogger = file_handler.FileHandler(filename=(logfile_path), mode='w+') + filelogger.setLevel(logging_level) + filelogger.setFormatter(log_formatter) + logger.addHandler(filelogger) + + filelogger.addFilter(get_mu_filter(isVerbose)) + + return logfile_path, filelogger + + +# creates the markdown logger +def setup_markdown_logger(directory, filename="log", logging_level=logging.INFO, + formatter=None, logging_namespace='', isVerbose=False): + + logger = logging.getLogger(logging_namespace) + log_formatter = formatter + if log_formatter is None: + log_formatter = logging.Formatter("%(levelname)s - %(message)s") + + if not os.path.isdir(directory): + os.makedirs(directory) + + # add markdown handler + markdown_filename = filename + ".md" + markdown_path = os.path.join(directory, markdown_filename) + if markdown_handler: + markdownHandler = markdown_handler.MarkdownFileHandler(markdown_path, mode="w+") + else: + markdownHandler = logging.FileHandler(markdown_path, mode="w+") + markdownHandler.setFormatter(log_formatter) + + if logging_level <= logging.DEBUG: + logging_level = logging.INFO # we don't show debugging output in markdown since it gets too full + + markdownHandler.addFilter(get_mu_filter(isVerbose)) + + markdownHandler.setLevel(logging_level) + logger.addHandler(markdownHandler) + + return markdown_path, markdownHandler + + +# sets up a colored console logger +def setup_console_logging(logging_level=logging.INFO, formatter=None, logging_namespace='', + isVerbose=False, use_azure_colors=False, use_color=True): + + if formatter is None and isVerbose: + formatter_msg = "%(name)s: %(levelname)s - %(message)s" + elif formatter is None: + formatter_msg = "%(levelname)s - %(message)s" + else: + formatter_msg = formatter + + formatter = logging.Formatter(formatter_msg) + + # create a safe handler so that any logging emitted when creating the ansi logger is handled + safeHandler = logging.StreamHandler() + safeHandler.setLevel(logging_level) + safeHandler.addFilter(get_mu_filter(isVerbose)) + safeHandler.setFormatter(formatter) + logger = logging.getLogger(logging_namespace) + logger.addHandler(safeHandler) + + # create the ansi logger if needed + if use_azure_colors or use_color and ansi_handler: + formatter = ansi_handler.ColoredFormatter(formatter_msg, use_azure=use_azure_colors) + coloredHandler = ansi_handler.ColoredStreamHandler() + coloredHandler.setLevel(logging_level) + coloredHandler.addFilter(get_mu_filter(isVerbose)) + coloredHandler.setFormatter(formatter) + # make sure to remove the safe handler so we don't have two handlers + logger.removeHandler(safeHandler) + logger.addHandler(coloredHandler) + return coloredHandler + # return the safe handler if we didn't create a colored handler + return safeHandler + + +def stop_logging(loghandle, logging_namespace=''): + logger = logging.getLogger(logging_namespace) + if loghandle is None: + return + if isinstance(loghandle, list): + # if it's an array, process each element as a handle + for handle in loghandle: + handle.close() + logger.removeHandler(handle) + else: + loghandle.close() + logger.removeHandler(loghandle) + + +def create_output_stream(level=logging.INFO, logging_namespace=''): + # creates an output stream that is in memory + if string_handler: + handler = string_handler.StringStreamHandler() + else: + handler = logging.StreamHandler() + logger = logging.getLogger(logging_namespace) + handler.setLevel(level) + logger.addHandler(handler) + return handler + + +def remove_output_stream(handler, logging_namespace=''): + logger = logging.getLogger(logging_namespace) + if isinstance(handler, list): + for single_handler in handler: + logger.removeHandler(single_handler) + else: + logger.removeHandler(handler) + +# TODO: how to merge this into mu_build since this is copy and pasted + + +def scan_compiler_output(output_stream): + # seek to the start of the output stream + problems = [] + output_stream.seek(0, 0) + error_exp = re.compile(r"error C(\d+):") + edk2_error_exp = re.compile(r"error F(\d+):") + buildpy_error_exp = re.compile(r"error (\d+)E:") + linker_error_exp = re.compile(r"error LNK(\d+):") + warning_exp = re.compile(r"warning C(\d+):") + for raw_line in output_stream.readlines(): + line = raw_line.strip("\n").strip() + match = error_exp.search(line) + if match is not None: + problems.append((logging.ERROR, "Compile: Error: {0}".format(line))) + match = warning_exp.search(line) + if match is not None: + problems.append((logging.WARNING, "Compile: Warning: {0}".format(line))) + match = linker_error_exp.search(line) + if match is not None: + problems.append((logging.ERROR, "Linker: Error: {0}".format(line))) + match = edk2_error_exp.search(line) + if match is not None: + problems.append((logging.ERROR, "EDK2: Error: {0}".format(line))) + match = buildpy_error_exp.search(line) + if match is not None: + problems.append((logging.ERROR, "Build.py: Error: {0}".format(line))) + return problems + + +class MuLogFilter(logging.Filter): + _allowedLoggers = ["root"] + + def __init__(self): + logging.Filter.__init__(self) + self._verbose = False + self._currentSection = "root" + + def setVerbose(self, isVerbose=True): + self._verbose = isVerbose + + def addSection(self, section): + # TODO request the global singleton? + # how to make this class static + MuLogFilter._allowedLoggers.append(section) + + def filter(self, record): + # check to make sure we haven't already filtered this record + if record.name not in MuLogFilter._allowedLoggers and record.levelno < logging.CRITICAL and not self._verbose: + return False + + return True diff --git a/edk2toolext/environment/__init__.py b/edk2toolext/environment/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/environment/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/environment/conf_mgmt.py b/edk2toolext/environment/conf_mgmt.py new file mode 100644 index 00000000..9b6df4c3 --- /dev/null +++ b/edk2toolext/environment/conf_mgmt.py @@ -0,0 +1,234 @@ +# @file conf_mgmt.py +# Handle Edk2 Conf management +# Customized for edk2-pytools-extensions based build and supports dynamic Visual studio support 2017++ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +import shutil +import time +from edk2toolext.environment import shell_environment +from edk2toollib.windows.locate_tools import FindWithVsWhere +from edk2toolext.environment import version_aggregator + + +class ConfMgmt(): + + def __init__(self, OverrideConf, AdditionalTemplateConfDir): + self.Logger = logging.getLogger("ConfMgmt") + self.env = shell_environment.GetBuildVars() + if (self.env.GetValue("WORKSPACE") is None) or \ + (self.env.GetValue("EDK2_BASE_TOOLS_DIR") is None): + raise Exception("WORKSPACE and EDK2_BASE_TOOLS_DIR must be set prior to running ConfMgmt") + self.__PopulateConf(OverrideConf, AdditionalTemplateConfDir) + + # + # Get the version of a conf file + # + def __GetVersion(self, confFile): + version = "Unknown" + f = open(confFile, "r") + for l in f.readlines(): + if(l.startswith("#!VERSION=")): + version = str(float(l.split("=")[1].split()[0].strip())) + break + + f.close() + return version + + # + # Compare the version of the existing conf file to the template file + # + def __OlderVersion(self, confFile, confTemplateFile): + conf = 0 + template = 0 + + f = open(confFile, "r") + for l in f.readlines(): + if(l.startswith("#!VERSION=")): + conf = float(l.split("=")[1].split()[0].strip()) + logging.debug("Conf version: %s", str(conf)) + break + + f.close() + f = open(confTemplateFile, "r") + for l in f.readlines(): + if(l.startswith("#!VERSION=")): + template = float(l.split("=")[1].split()[0].strip()) + logging.debug("Template Version: %s", str(template)) + break + f.close() + + return (conf < template) + + def __PopulateConf(self, OverrideConf, AdditionalTemplateConfDir): + ws = self.env.GetValue("WORKSPACE") + # Copy Conf template files to conf if not present + target = os.path.join(ws, "Conf", "target.txt") + buildrules = os.path.join(ws, "Conf", "build_rule.txt") + toolsdef = os.path.join(ws, "Conf", "tools_def.txt") + + # BaseTools Template files + target_template = os.path.join("Conf", "target.template") + tools_def_template = os.path.join("Conf", "tools_def.template") + build_rules_template = os.path.join("Conf", "build_rule.template") + + outfiles = [target, toolsdef, buildrules] + tfiles = [target_template, tools_def_template, build_rules_template] + + # check if conf exists + if(not os.path.isdir(os.path.join(ws, "Conf"))): + os.mkdir(os.path.join(ws, "Conf")) + + x = 0 + while(x < len(outfiles)): + # check if the conf file already exists + # don't overwrite if exists. Popup if version is older in conf + TemplateFilePath = "" + Tag = self.env.GetValue("TOOL_CHAIN_TAG") + + if Tag is None: + Tag = "" + + # + # Get the Override template if it exist + # + if(AdditionalTemplateConfDir is not None): + fp = os.path.join(AdditionalTemplateConfDir, tfiles[x] + ".ms") + if os.path.isfile(fp): + TemplateFilePath = fp + + # + # If not found, try toolchain specific templates + # + if(TemplateFilePath == "" and Tag.upper().startswith("VS")): + fp = os.path.join(self.env.GetValue( + "EDK2_BASE_TOOLS_DIR"), tfiles[x] + ".vs") + if os.path.isfile(fp): + TemplateFilePath = fp + + if(TemplateFilePath == "" and Tag.upper().startswith("GCC")): + fp = os.path.join(self.env.GetValue( + "EDK2_BASE_TOOLS_DIR"), tfiles[x] + ".gcc") + if os.path.isfile(fp): + TemplateFilePath = fp + + # + # If not found above try MS templates + # + if(TemplateFilePath == ""): + fp = os.path.join(self.env.GetValue( + "EDK2_BASE_TOOLS_DIR"), tfiles[x] + ".ms") + if os.path.isfile(fp): + TemplateFilePath = fp + + # + # If not found above try TianoCore Template + # + if(TemplateFilePath == ""): + fp = os.path.join(self.env.GetValue( + "EDK2_BASE_TOOLS_DIR"), tfiles[x]) + if TemplateFilePath == "" and os.path.isfile(fp): + TemplateFilePath = fp + + # + # Check to see if found yet -- No more options so now we are broken + # + if(TemplateFilePath == ""): + self.Logger.critical( + "Failed to find Template file for %s" % outfiles[x]) + raise Exception("Template File Missing", outfiles[x]) + else: + self.Logger.debug("Conf file template: [%s]", TemplateFilePath) + + # Check to see if we need the template + if(not os.path.isfile(outfiles[x])): + # file doesn't exist. copy template + self.Logger.debug("%s file not found. Creating from Template file %s" % ( + outfiles[x], TemplateFilePath)) + shutil.copy2(TemplateFilePath, outfiles[x]) + + elif(OverrideConf): + self.Logger.debug( + "%s file replaced as requested" % outfiles[x]) + shutil.copy2(TemplateFilePath, outfiles[x]) + else: + # Both file exists. Do a quick version check + if(self.__OlderVersion(outfiles[x], TemplateFilePath)): + # Conf dir is older. Warn user. + self.Logger.critical( + "Conf file [%s] out-of-date. Please update your conf files! " + "Sleeping 30 seconds to encourage update....", outfiles[x]) + time.sleep(30) + else: + self.Logger.debug("Conf file [%s] up-to-date", outfiles[x]) + version_aggregator.GetVersionAggregator().ReportVersion(outfiles[x], self.__GetVersion(outfiles[x]), + version_aggregator.VersionTypes.INFO) + x = x + 1 + # end of while loop + + def ToolsDefConfigure(self): + Tag = self.env.GetValue("TOOL_CHAIN_TAG") + version_aggregator.GetVersionAggregator().ReportVersion( + "TOOL_CHAIN_TAG", Tag, version_aggregator.VersionTypes.TOOL) + if (Tag is not None) and (Tag.upper().startswith("VS")): + if (not self.VisualStudioSpecificVersions(Tag)): + self.Logger.warning("Potential Toolchain issue. VS specific operation failed.") + return 0 + + def VisualStudioSpecificVersions(self, ToolChainTag: str): + ''' Support VS specific operations for dynmaically setting + the vs tool paths and logging the critical version information. + returns True for success otherwise False + ''' + + # internal functions + def GetVsInstallPath(vsversion, varname): + # check if already specified + path = shell_environment.ShellEnvironment.get_shell_var(varname) + if(path is None): + # Not specified...find latest + (rc, path) = FindWithVsWhere(vs_version=vsversion) + if rc == 0 and path is not None: + self.Logger.debug("Found VS instance for %s", vsversion) + shell_environment.ShellEnvironment.set_shell_var(varname, path) + else: + self.Logger.error("Failed to find VS instance with VsWhere (%d)" % rc) + return path + + def GetVcVersion(path, varname): + # check if already specified + vc_ver = shell_environment.ShellEnvironment.get_shell_var(varname) + if(vc_ver is None): + # Not specified...find latest + p2 = os.path.join(path, "VC", "Tools", "MSVC") + if not os.path.isdir(p2): + self.Logger.critical( + "Failed to find VC tools. Might need to check for VS install") + return vc_ver + vc_ver = os.listdir(p2)[-1].strip() # get last in list + self.Logger.debug("Found VC Tool version is %s" % vc_ver) + shell_environment.ShellEnvironment.set_shell_var(varname, vc_ver) + + if(vc_ver): + version_aggregator.GetVersionAggregator().ReportVersion( + "VC Version", vc_ver, version_aggregator.VersionTypes.TOOL) + return vc_ver + + if ToolChainTag.lower() == "vs2019": + ipath = GetVsInstallPath(ToolChainTag.lower(), "VS160INSTALLPATH") + iver = GetVcVersion(ipath, "VS160TOOLVER") + return (ipath is not None) and (iver is not None) + + elif ToolChainTag.lower() == "vs2017": + ipath = GetVsInstallPath(ToolChainTag.lower(), "VS150INSTALLPATH") + iver = GetVcVersion(ipath, "VS150TOOLVER") + return (ipath is not None) and (iver is not None) + + else: + logging.warning("No dynameic support for this VS toolchain") + return False diff --git a/edk2toolext/environment/environment_descriptor_files.py b/edk2toolext/environment/environment_descriptor_files.py new file mode 100644 index 00000000..7aeffaac --- /dev/null +++ b/edk2toolext/environment/environment_descriptor_files.py @@ -0,0 +1,125 @@ +# @file EnvironmentDescriptorFiles.py +# This module contains code for working with the JSON environment +# descriptor files. It can parse the files, validate them, and return +# objects representing their contents. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import yaml + + +class PathEnv(object): + def __init__(self, descriptor): + super(PathEnv, self).__init__() + + # + # Set the data for this object. + # + self.scope = descriptor['scope'] + self.flags = descriptor['flags'] + self.var_name = descriptor.get('var_name', None) + + self.descriptor_location = os.path.dirname( + descriptor['descriptor_file']) + self.published_path = self.descriptor_location + + +class DescriptorFile(object): + def __init__(self, file_path): + super(DescriptorFile, self).__init__() + + self.file_path = file_path + self.descriptor_contents = None + + with open(file_path, 'r') as file: + try: + self.descriptor_contents = yaml.safe_load(file) + except: + pass # We'll pick up this error when looking at the data. + + # + # Make sure that we loaded the file successfully. + # + if self.descriptor_contents is None: + raise ValueError( + "Could not load contents of descriptor file '%s'!" % file_path) + + # The file path is an implicit descriptor field. + self.descriptor_contents['descriptor_file'] = self.file_path + + # All files require a scope. + if 'scope' not in self.descriptor_contents: + raise ValueError("File '%s' missing required field '%s'!" % + (self.file_path, 'scope')) + + # If a file has flags, make sure they're sane. + if 'flags' in self.descriptor_contents: + # If a flag requires a name, make sure a name is provided. + for name_required in ('set_shell_var', 'set_build_var'): + if name_required in self.descriptor_contents['flags']: + if 'var_name' not in self.descriptor_contents: + raise ValueError( + "File '%s' has a flag requesting a var, but does not provide 'var_name'!" % self.file_path) + + # clean up each string item for more reliable processing + for (k, v) in self.descriptor_contents.items(): + if(isinstance(v, str)): + self.descriptor_contents[k] = self.sanitize_string(v) + + # + # Clean up a string "value" in the descriptor file. + # + def sanitize_string(self, s): + # Perform any actions needed to clean the string. + return s.strip() + + +class PathEnvDescriptor(DescriptorFile): + def __init__(self, file_path): + super(PathEnvDescriptor, self).__init__(file_path) + + # + # Validate file contents. + # + # Make sure that the required fields are present. + for required_field in ('flags',): + if required_field not in self.descriptor_contents: + raise ValueError("File '%s' missing required field '%s'!" % ( + self.file_path, required_field)) + + +class ExternDepDescriptor(DescriptorFile): + def __init__(self, file_path): + super(ExternDepDescriptor, self).__init__(file_path) + + # + # Validate file contents. + # + # Make sure that the required fields are present. + for required_field in ('scope', 'type', 'name', 'source', 'version'): + if required_field not in self.descriptor_contents: + raise ValueError("File '%s' missing required field '%s'!" % ( + self.file_path, required_field)) + + +class PluginDescriptor(DescriptorFile): + def __init__(self, file_path): + super(PluginDescriptor, self).__init__(file_path) + + # + # Validate file contents. + # + # Make sure that the required fields are present. + for required_field in ('scope', 'name', 'module'): + if required_field not in self.descriptor_contents: + raise ValueError("File '%s' missing required field '%s'!" % ( + self.file_path, required_field)) + + # Make sure the module item doesn't have .py on the end + if(self.descriptor_contents["module"].lower().endswith(".py")): + # remove last 3 chars + self.descriptor_contents["module"] = self.descriptor_contents["module"][:-3] diff --git a/edk2toolext/environment/extdeptypes/__init__.py b/edk2toolext/environment/extdeptypes/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/environment/extdeptypes/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/environment/extdeptypes/git_dependency.py b/edk2toolext/environment/extdeptypes/git_dependency.py new file mode 100644 index 00000000..62ad25f6 --- /dev/null +++ b/edk2toolext/environment/extdeptypes/git_dependency.py @@ -0,0 +1,106 @@ +# @file GitDependency.py +# This module implements ExternalDependency for a git repository +# This should only be used for read-only repositories. Any changes in +# these extdeps will be removed. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import logging +from urllib.parse import urlsplit, urlunsplit +from edk2toolext.environment.external_dependency import ExternalDependency +from edk2toolext.environment import repo_resolver +from edk2toolext.edk2_git import Repo +from edk2toolext.environment import version_aggregator +from edk2toolext.environment import shell_environment + + +class GitDependency(ExternalDependency): + ''' + ext_dep fields: + - source: url for git clone + - version: commit from git repo + - url_creds_var: shell_var name for credential updating [optional] + ''' + + TypeString = "git" + + def __init__(self, descriptor): + super().__init__(descriptor) + + # Check to see whether this URL should be patched. + url_creds_var = descriptor.get('url_creds_var', None) + if url_creds_var is not None: + env = shell_environment.GetEnvironment() + url_creds = env.get_shell_var(url_creds_var) + if url_creds is not None: + # Break things up. + source_parts = urlsplit(self.source) + # Modify the URL host with the creds. + new_parts = (source_parts.scheme, + url_creds + '@' + source_parts.netloc, + source_parts.path, + source_parts.query, + source_parts.fragment) + # Put things back together. + self.source = urlunsplit(new_parts) + + self.repo_url = self.source + self.commit = self.version + self._local_repo_root_path = os.path.join(os.path.abspath(self.contents_dir), self.name) + self.logger = logging.getLogger("git-dependency") + + # valid_attributes = ["Path", "Url", "Branch", "Commit", "ReferencePath", "Full"] + self._repo_resolver_dep_obj = {"Path": self.name, "Url": self.repo_url, "Commit": self.commit} + + def fetch(self): + + # def resolve(file_system_path, dependency, force=False, ignore=False, update_ok=False): + repo_resolver.resolve(self._local_repo_root_path, self._repo_resolver_dep_obj, update_ok=True) + + # Add a file to track the state of the dependency. + self.update_state_file() + + def clean(self): + self.logger.debug("Cleaning git dependency directory for '%s'..." % self.name) + + if os.path.isdir(self._local_repo_root_path): + # Clean up git dependency specific stuff + repo_resolver.clear_folder(self.contents_dir) + + # Let super class clean up common dependency stuff + super().clean() + + # override verify due to different scheme with git + def verify(self, logversion=True): + result = True + + if not os.path.isdir(self._local_repo_root_path): + self.logger.error("no dir for Git Dependency") + result = False + + if result and len(os.listdir(self._local_repo_root_path)) == 0: + self.logger.error("no files in Git Dependency") + result = False + + if result: + # valid repo folder + r = Repo(self._local_repo_root_path) + if(not r.initalized): + self.logger.error("Git Dependency: Not Initialized") + result = False + elif(r.dirty): + self.logger.error("Git Dependency: dirty") + result = False + + if(r.head.commit != self.version): + self.logger.error(f"Git Dependency: head is {r.head.commit} and version is {self.version}") + result = False + + self.logger.debug("Verify '%s' returning '%s'." % (self.name, result)) + if(logversion): + version_aggregator.GetVersionAggregator().ReportVersion(self.name, self.version, + version_aggregator.VersionTypes.INFO) + return result diff --git a/edk2toolext/environment/extdeptypes/nuget_dependency.py b/edk2toolext/environment/extdeptypes/nuget_dependency.py new file mode 100644 index 00000000..78357bdb --- /dev/null +++ b/edk2toolext/environment/extdeptypes/nuget_dependency.py @@ -0,0 +1,178 @@ +# @file nuget_dependency.py +# This module implements ExternalDependency for NuGet packages. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import logging +import shutil +from io import StringIO +from edk2toolext.environment.external_dependency import ExternalDependency +from edk2toollib.utility_functions import RunCmd +from edk2toollib.utility_functions import GetHostInfo +import pkg_resources + + +class NugetDependency(ExternalDependency): + TypeString = "nuget" + global_cache_path = None + + #### + # Add mono to front of command and resolve full path of exe for mono, + # Used to add nuget support on posix platforms. + # https://docs.microsoft.com/en-us/nuget/install-nuget-client-tools + # + # @return list containing either ["nuget.exe"] or ["mono", "/PATH/TO/nuget.exe"] + #### + @staticmethod + def GetNugetCmd(): + file = "NuGet.exe" + cmd = [] + if GetHostInfo().os == "Linux": + cmd += ["mono"] + # TODO Find the Nuget rom our bin file + requirement = pkg_resources.Requirement.parse("edk2-pytool-extensions") + nuget_file_path = os.path.join("edk2toolext", "bin", file) + nuget_path = pkg_resources.resource_filename(requirement, nuget_file_path) + + # check if we don't have it, look for nuget in the path + if not os.path.isfile(nuget_path): + for env_var in os.getenv("PATH").split(os.pathsep): + env_var = os.path.join(os.path.normpath(env_var), file) + if os.path.isfile(env_var): + nuget_path = '"' + env_var + '"' + break + # we've probably found something by now? + cmd += [nuget_path] + # if we're still hosed + if not os.path.isfile(nuget_path): + logging.error("We weren't able to find Nuget! Please reinstall your pip environment") + return None + return cmd + + @staticmethod + def normalize_version(version): + version_parts = tuple(int(num) for num in version.split('.')) + if len(version_parts) > 4: + raise RuntimeError("Unparsable version '%s'!") + + # Remove extra trailing zeros (beyond 3 elements). + if len(version_parts) == 4 and version_parts[3] == 0: + version_parts = version_parts[0:2] + + # Add missing trailing zeros (below 3 elements). + if len(version_parts) < 3: + version_parts = version_parts + (0,) * (3 - len(version_parts)) + + # Return reformed version. + return ".".join((str(num) for num in version_parts)) + + def _fetch_from_cache(self, package_name): + result = False + + # + # We still need to use Nuget to figure out where the + # "global-packages" cache is on this machine. + # + if NugetDependency.global_cache_path is None: + cmd = NugetDependency.GetNugetCmd() + cmd += ["locals", "global-packages", "-list"] + return_buffer = StringIO() + if (RunCmd(cmd[0], " ".join(cmd[1:]), outstream=return_buffer) == 0): + # Seek to the beginning of the output buffer and capture the output. + return_buffer.seek(0) + return_string = return_buffer.read() + NugetDependency.global_cache_path = return_string.strip().strip("global-packages: ") + + # + # If the path couldn't be found, we can't do anything else. + # + if not os.path.isdir(NugetDependency.global_cache_path): + logging.info( + "Could not determine Nuget global packages cache location.") + return False + + # + # Now, try to locate our actual cache path + nuget_version = NugetDependency.normalize_version(self.version) + cache_search_path = os.path.join( + NugetDependency.global_cache_path, package_name.lower(), nuget_version, package_name) + if os.path.isdir(cache_search_path): + logging.info( + "Local Cache found for Nuget package '%s'. Skipping fetch.", package_name) + shutil.copytree(cache_search_path, self.contents_dir) + self.update_state_file() + result = True + + return result + + def fetch(self): + package_name = self.name + # + # Before trying anything with Nuget feeds, + # check to see whether the package is already in + # our local cache. If it is, we avoid a lot of + # time and network cost by copying it directly. + # + if self._fetch_from_cache(package_name): + # We successfully found the package in the cache. + # The published path may change now that the package has been unpacked. + # Bail. + self.published_path = self.compute_published_path() + return + + # + # If we are still here, the package wasn't in the cache. + # We need to ask Nuget to find it. + # + + # + # First, fetch the contents of the package. + # + temp_directory = self.get_temp_dir() + cmd = NugetDependency.GetNugetCmd() + cmd += ["install", package_name] + cmd += ["-Source", self.source] + cmd += ["-ExcludeVersion"] + cmd += ["-Version", self.version] + cmd += ["-Verbosity", "detailed"] + cmd += ["-OutputDirectory", '"' + temp_directory + '"'] + RunCmd(cmd[0], " ".join(cmd[1:])) + + # + # Next, copy the contents of the package to the + # final resting place. + # + # Depending on packaging, the package content will be in one of two + # possible locations: + # 1. temp_directory\package_name\package_name\ + # 2. temp_directory\package_name\ + # + source_dir = os.path.join(temp_directory, package_name, package_name) + if not os.path.isdir(source_dir): + source_dir = os.path.join(temp_directory, package_name) + shutil.move(source_dir, self.contents_dir) + + # + # Add a file to track the state of the dependency. + # + self.update_state_file() + + # + # Finally, delete the temp directory. + # + self._clean_directory(temp_directory) + + # The published path may change now that the package has been unpacked. + self.published_path = self.compute_published_path() + + def get_temp_dir(self): + return self.contents_dir + "_temp" + + def clean(self): + super(NugetDependency, self).clean() + if os.path.isdir(self.get_temp_dir()): + self._clean_directory(self.get_temp_dir()) diff --git a/edk2toolext/environment/extdeptypes/web_dependency.py b/edk2toolext/environment/extdeptypes/web_dependency.py new file mode 100644 index 00000000..fe6d7d73 --- /dev/null +++ b/edk2toolext/environment/extdeptypes/web_dependency.py @@ -0,0 +1,152 @@ +# @file web_dependency.py +# This module implements ExternalDependency for files that are available for download online. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +import shutil +import tarfile +import zipfile +import tempfile +import urllib.error +import urllib.request +from edk2toolext.environment.external_dependency import ExternalDependency + + +class WebDependency(ExternalDependency): + ''' + ext_dep fields: + - internal_path: Describes layout of what we're downloading. Include / at the beginning + if the ext_dep is a directory. Item located at internal_path will + unpacked into the ext_dep folder and this is what the path/shell vars + will point to when compute_published_path is run. + - compression_type: optional. supports zip and tar. If the file isn't compressed, do not include this field. + - sha256: optional. hash of downloaded file to be checked against. + ''' + + TypeString = "web" + + def __init__(self, descriptor): + super().__init__(descriptor) + self.internal_path = os.path.normpath(descriptor['internal_path']) + self.compression_type = descriptor.get('compression_type', None) + self.sha256 = descriptor.get('sha256', None) + + # If the internal path starts with a / that means we are downloading a directory + self.download_is_directory = self.internal_path.startswith(os.path.sep) + + # Now we can get rid of the leading / + self.internal_path = self.internal_path.strip(os.path.sep) + + def linuxize_path(path): + ''' + path: path that uses os.sep, to be replaced with / for compatibility with zipfile + ''' + return "/".join(path.split("\\")) + + def unpack(compressed_file_path, destination, internal_path, compression_type): + ''' + compressed_file_path: name of compressed file to unpack. + destination: directory you would like it unpacked into. + internal_path: internal structure of the compressed volume that you would like extracted. + compression_type: type of compression. tar and zip supported. + ''' + + # First, we will open the file depending on the type of compression we're dealing with. + + # tarfile and zipfile both use the Linux path seperator / instead of using os.sep + linux_internal_path = WebDependency.linuxize_path(internal_path) + + if compression_type == "zip": + logging.info(f"{compressed_file_path} is a zip file, trying to unpack it.") + _ref = zipfile.ZipFile(compressed_file_path, 'r') + files_in_volume = _ref.namelist() + + elif compression_type and "tar" in compression_type: + logging.info(f"{compressed_file_path} is a tar file, trying to unpack it.") + # r:* tells tarfile to look at the header and figure out how to extract it + _ref = tarfile.open(compressed_file_path, "r:*") + files_in_volume = _ref.getnames() + + else: + raise RuntimeError(f"{compressed_file_path} was labeled as {compression_type}, which is not supported.") + + # Filter the files inside to only the ones that are inside the important folder + files_to_extract = [name for name in files_in_volume if linux_internal_path in name] + + for file in files_to_extract: + _ref.extract(member=file, path=destination) + _ref.close() + + def get_internal_path_root(outer_dir, internal_path): + temp_path_root = internal_path.split(os.sep)[0] if os.sep in internal_path else internal_path + unzip_root = os.path.join(outer_dir, temp_path_root) + return unzip_root + + def fetch(self): + url = self.source + temp_folder = tempfile.mkdtemp() + temp_file_path = os.path.join(temp_folder, f"{self.name}_{self.version}") + + try: + # Download the file and save it locally under `temp_file_path` + with urllib.request.urlopen(url) as response, open(temp_file_path, 'wb') as out_file: + out_file.write(response.read()) + except urllib.error.HTTPError as e: + logging.error(f"ran into an issue when resolving ext_dep {self.name} at {self.source}") + raise e + + # check if file hash is as expected, if it was provided in the ext_dep.json + if self.sha256: + with open(temp_file_path, "rb") as file: + import hashlib + temp_file_sha256 = hashlib.sha256(file.read()).hexdigest() + if temp_file_sha256 != self.sha256: + raise RuntimeError(f"{self.name} - sha256 does not match\n\tdownloaded:" + f"\t{temp_file_sha256}\n\tin json:\t{self.sha256}") + + if os.path.isfile(temp_file_path) is False: + raise RuntimeError(f"{self.name} did not download") + + # Next, we will look at what's inside it and pull out the parts we need. + if self.compression_type: + WebDependency.unpack(temp_file_path, temp_folder, self.internal_path, self.compression_type) + + # internal_path points to the "important" part of the ext_dep we're unpacking + complete_internal_path = os.path.join(temp_folder, self.internal_path) + + # # If we're unpacking a directory, we can copy the important parts into + # # a directory named self.contents_dir + if self.download_is_directory: + # The root of the internal path is the folder we will see populated in descriptor_location + unzip_root = WebDependency.get_internal_path_root(self.descriptor_location, self.internal_path) + + logging.info(f"Copying directory from {complete_internal_path} to {self.contents_dir}") + if os.path.isdir(complete_internal_path) is False: + # internal_path was not accurate, exit + raise RuntimeError(f"{self.name} was expecting {complete_internal_path} to exist after unpacking") + + # Move the important folder out and rename it to contents_dir + shutil.move(complete_internal_path, self.contents_dir) + + # If the unzipped directory still exists, delete it. + if os.path.isdir(unzip_root): + logging.debug(f"Cleaning up {unzip_root}") + shutil.rmtree(unzip_root) + + # If we just downloaded a file, we need to create a directory named self.contents_dir, + # copy the file inside, and name it self.internal_path + else: + logging.info(f"Copying file to {complete_internal_path}") + shutil.move(temp_file_path, complete_internal_path) + + # Add a file to track the state of the dependency. + self.update_state_file() + + # The published path may change now that the package has been unpacked. + self.published_path = self.compute_published_path() diff --git a/edk2toolext/environment/external_dependency.py b/edk2toolext/environment/external_dependency.py new file mode 100644 index 00000000..48dd6ad2 --- /dev/null +++ b/edk2toolext/environment/external_dependency.py @@ -0,0 +1,165 @@ +# @file external_dependencies.py +# This module contains helper objects that can manipulate, +# retrieve, validate, and clean external dependencies for the +# build environment. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +import shutil +import time +import yaml +from edk2toolext.environment import version_aggregator +from edk2toollib.utility_functions import GetHostInfo + + +class ExternalDependency(object): + ''' + ext_dep fields: + - scope: Determines if descriptor is included on a particular build. + - type: Type of ext_dep. + - name: Name of ext_dep, used to name the folder the ext_dep will be unpacked in to + - source: Source to query for ext_dep. + - version: Version string to keep track of what version is currently installed. + - flags: Flags dictating what actions should be taken once this dependency is resolved + More info: (docs/feature_extdep/) + - var_name: Used with set_*_var flag. Determines name of var to be set. + ''' + + def __init__(self, descriptor): + super(ExternalDependency, self).__init__() + + # + # Set the data for this object. + # + self.scope = descriptor['scope'] + self.type = descriptor['type'] + self.name = descriptor['name'] + self.source = descriptor['source'] + self.version = descriptor['version'] + self.flags = descriptor.get('flags', None) + self.var_name = descriptor.get('var_name', None) + + self.descriptor_location = os.path.dirname( + descriptor['descriptor_file']) + self.contents_dir = os.path.join( + self.descriptor_location, self.name + "_extdep") + self.state_file_path = os.path.join( + self.contents_dir, "extdep_state.json") + self.published_path = self.compute_published_path() + + def compute_published_path(self): + new_published_path = self.contents_dir + + if self.flags and "host_specific" in self.flags and self.verify(): + host = GetHostInfo() + + logging.info("Computing path for {0} located at {1} on {2}".format(self.name, + self.contents_dir, + str(host))) + + acceptable_names = [] + + # we want to list all the possible folders we would be comfortable using + # and then check if they are present. + # The "ideal" directory name is OS-ARCH-BIT + acceptable_names.append("-".join((host.os, host.arch, host.bit))) + acceptable_names.append("-".join((host.os, host.arch))) + acceptable_names.append("-".join((host.os, host.bit))) + acceptable_names.append("-".join((host.arch, host.bit))) + acceptable_names.append(host.os) + acceptable_names.append(host.arch) + acceptable_names.append(host.bit) + + new_published_path = None + for name in acceptable_names: + dirname = os.path.join(self.contents_dir, name) + if os.path.isdir(dirname): + logging.info("{0} was found!".format(dirname)) + new_published_path = dirname + break + logging.debug("{0} does not exist".format(dirname)) + + if new_published_path is None: + logging.error("Could not find appropriate folder for {0}. {1}".format(self.name, str(host))) + new_published_path = self.contents_dir + + if self.flags and "include_separator" in self.flags: + new_published_path += os.path.sep + + return new_published_path + + def _clean_directory(self, dir_path): + retry = 1 + while True: + try: + shutil.rmtree(dir_path) + except OSError: + if not retry: + # If we're out of retries, bail. + raise + time.sleep(5) + retry -= 1 + continue + break + + def clean(self): + logging.debug("Cleaning dependency directory for '%s'..." % self.name) + if os.path.isdir(self.contents_dir): + self._clean_directory(self.contents_dir) + + def fetch(self): + # The base class does not implement a fetch. + logging.critical("Fetch() CALLED ON BASE EXTDEP CLASS!") + pass + + def verify(self): + result = True + state_data = None + + # See whether or not the state file exists. + if not os.path.isfile(self.state_file_path): + result = False + + # Attempt to load the state file. + if result: + with open(self.state_file_path, 'r') as file: + try: + state_data = yaml.safe_load(file) + except Exception: + pass + if state_data is None: + result = False + + # If loaded, check the version. + if result and state_data['version'] != self.version: + result = False + + logging.debug("Verify '%s' returning '%s'." % (self.name, result)) + version_aggregator.GetVersionAggregator().ReportVersion(self.name, self.version, + version_aggregator.VersionTypes.INFO) + return result + + def update_state_file(self): + with open(self.state_file_path, 'w+') as file: + yaml.dump({'version': self.version}, file) + + +def ExtDepFactory(descriptor): + # Add all supported external dependencies here to avoid import errors. + from edk2toolext.environment.extdeptypes.web_dependency import WebDependency + from edk2toolext.environment.extdeptypes.nuget_dependency import NugetDependency + from edk2toolext.environment.extdeptypes.git_dependency import GitDependency + if descriptor['type'] == NugetDependency.TypeString: + return NugetDependency(descriptor) + elif descriptor['type'] == WebDependency.TypeString: + return WebDependency(descriptor) + elif descriptor['type'] == GitDependency.TypeString: + return GitDependency(descriptor) + + raise ValueError("Unknown extdep type '%s' requested!" % descriptor['type']) diff --git a/edk2toolext/environment/multiple_workspace.py b/edk2toolext/environment/multiple_workspace.py new file mode 100644 index 00000000..42cef4de --- /dev/null +++ b/edk2toolext/environment/multiple_workspace.py @@ -0,0 +1,157 @@ +## @file +# manage multiple workspace file. +# +# This file is required to make Python interpreter treat the directory +# as containing package. +# +# file slightly modified from Edk2 BaseTools\Source\Python\Common\MultipleWorkspace.py +# +# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.
+# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +# + +import os + +# MultipleWorkspace +# +# This class manage multiple workspace behavior +# +# @param class: +# +# @var WORKSPACE: defined the current WORKSPACE +# @var PACKAGES_PATH: defined the other WORKSAPCE, if current WORKSPACE is invalid, +# search valid WORKSPACE from PACKAGES_PATH +# + + +class MultipleWorkspace(object): + WORKSPACE = '' + PACKAGES_PATH = None + + # convertPackagePath() + # + # Convert path to match workspace. + # + # @param cls The class pointer + # @param Ws The current WORKSPACE + # @param Path Path to be converted to match workspace. + # + @classmethod + def convertPackagePath(cls, Ws, Path): + if str(os.path.normcase(Path)).startswith(Ws): + return os.path.join(Ws, Path[len(Ws) + 1:]) + return Path + + # setWs() + # + # set WORKSPACE and PACKAGES_PATH environment + # + # @param cls The class pointer + # @param Ws initialize WORKSPACE variable + # @param PackagesPath initialize PackagesPath variable + # + @classmethod + def setWs(cls, Ws, PackagesPath=None): + cls.WORKSPACE = Ws + if PackagesPath: + cls.PACKAGES_PATH = [cls.convertPackagePath(Ws, os.path.normpath( + Path.strip())) for Path in PackagesPath.split(os.pathsep)] + else: + cls.PACKAGES_PATH = [] + + # join() + # + # rewrite os.path.join function + # + # @param cls The class pointer + # @param Ws the current WORKSPACE + # @param *p path of the inf/dec/dsc/fdf/conf file + # @retval Path the absolute path of specified file + # + @classmethod + def join(cls, Ws, *p): + Path = os.path.join(Ws, *p) + if not os.path.exists(Path): + for Pkg in cls.PACKAGES_PATH: + Path = os.path.join(Pkg, *p) + if os.path.exists(Path): + return Path + Path = os.path.join(Ws, *p) + return Path + + # relpath() + # + # rewrite os.path.relpath function + # + # @param cls The class pointer + # @param Path path of the inf/dec/dsc/fdf/conf file + # @param Ws the current WORKSPACE + # @retval Path the relative path of specified file + # + @classmethod + def relpath(cls, Path, Ws): + for Pkg in cls.PACKAGES_PATH: + if Path.lower().startswith(Pkg.lower()): + Path = os.path.relpath(Path, Pkg) + return Path + if Path.lower().startswith(Ws.lower()): + Path = os.path.relpath(Path, Ws) + return Path + + # getWs() + # + # get valid workspace for the path + # + # @param cls The class pointer + # @param Ws the current WORKSPACE + # @param Path path of the inf/dec/dsc/fdf/conf file + # @retval Ws the valid workspace relative to the specified file path + # + @classmethod + def getWs(cls, Ws, Path): + absPath = os.path.join(Ws, Path) + if not os.path.exists(absPath): + for Pkg in cls.PACKAGES_PATH: + absPath = os.path.join(Pkg, Path) + if os.path.exists(absPath): + return Pkg + return Ws + + # handleWsMacro() + # + # handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it. + # + # @param cls The class pointer + # @retval PathStr Path string include the $(WORKSPACE) + # + @classmethod + def handleWsMacro(cls, PathStr): + TAB_WORKSPACE = '$(WORKSPACE)' + if TAB_WORKSPACE in PathStr: + PathList = PathStr.split() + if PathList: + for i, str in enumerate(PathList): + MacroStartPos = str.find(TAB_WORKSPACE) + if MacroStartPos != -1: + Substr = str[MacroStartPos:] + Path = Substr.replace(TAB_WORKSPACE, cls.WORKSPACE).strip() + if not os.path.exists(Path): + for Pkg in cls.PACKAGES_PATH: + Path = Substr.replace(TAB_WORKSPACE, Pkg).strip() + if os.path.exists(Path): + break + PathList[i] = str[0:MacroStartPos] + Path + PathStr = ' '.join(PathList) + return PathStr + + # getPkgPath() + # + # get all package pathes. + # + # @param cls The class pointer + # + @classmethod + def getPkgPath(cls): + return cls.PACKAGES_PATH diff --git a/edk2toolext/environment/plugin_manager.py b/edk2toolext/environment/plugin_manager.py new file mode 100644 index 00000000..18b129f9 --- /dev/null +++ b/edk2toolext/environment/plugin_manager.py @@ -0,0 +1,101 @@ +# @file plugin_manager.py +# This module contains code that supports Build Plugins +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import sys +import os +import imp +import logging +from edk2toolext.environment import shell_environment + + +class PluginDescriptor(object): + def __init__(self, t): + self.descriptor = t + self.Obj = None + self.Name = t["name"] + self.Module = t["module"] + + def __str__(self): + return "PLUGIN DESCRIPTOR:{0}".format(self.Name) + + +class PluginManager(object): + + def __init__(self): + self.Descriptors = [] + + # + # Pass tuple of Environment Descriptor dictionaries to be loaded as plugins + # + def SetListOfEnvironmentDescriptors(self, newlist): + env = shell_environment.GetBuildVars() + failed = [] + if newlist is None: + return [] + for a in newlist: + b = PluginDescriptor(a) + if(self._load(b) == 0): + val = env.GetValue(b.Module.upper()) + if val and val == "skip": + logging.info(f"{b.Module} turned off by environment variable") + continue + self.Descriptors.append(b) + else: + failed.append(a) + return failed + + # + # Return List of all plugins of a given class + # + def GetPluginsOfClass(self, classobj): + temp = [] + for a in self.Descriptors: + if(isinstance(a.Obj, classobj)): + temp.append(a) + return temp + + # + # Return List of all plugins + # + def GetAllPlugins(self): + return self.Descriptors + + # + # Load and Instantiate the plugin + # + def _load(self, PluginDescriptor): + PluginDescriptor.Obj = None + PythonFileName = PluginDescriptor.descriptor["module"] + ".py" + PyModulePath = os.path.join(os.path.dirname(os.path.abspath( + PluginDescriptor.descriptor["descriptor_file"])), PythonFileName) + logging.debug("Loading Plugin from %s", PyModulePath) + try: + with open(PyModulePath, "r") as plugin_file: + _module = imp.load_module( + "UefiBuild_Plugin_" + PluginDescriptor.descriptor["module"], + plugin_file, + PyModulePath, + ("py", "r", imp.PY_SOURCE)) + + except Exception: + exc_info = sys.exc_info() + logging.error("Failed to import plugin: %s", + PyModulePath, exc_info=exc_info) + return -1 + + # Instantiate the plugin + try: + obj = getattr(_module, PluginDescriptor.descriptor["module"]) + PluginDescriptor.Obj = obj() + except AttributeError: + exc_info = sys.exc_info() + logging.error("Failed to instantiate plugin: %s", + PyModulePath, exc_info=exc_info) + return -1 + + return 0 diff --git a/edk2toolext/environment/plugintypes/__init__.py b/edk2toolext/environment/plugintypes/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/environment/plugintypes/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/environment/plugintypes/ci_build_plugin.py b/edk2toolext/environment/plugintypes/ci_build_plugin.py new file mode 100644 index 00000000..cacbc539 --- /dev/null +++ b/edk2toolext/environment/plugintypes/ci_build_plugin.py @@ -0,0 +1,125 @@ +# @file ci_build_plugin +# Plugin that supports adding tests or operations to the ci environment +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +import os +import logging + + +class ICiBuildPlugin(object): + + ## + # External function of plugin. This function is used to perform the task of the CiBuild Plugin + # + # - package is the edk2 path to package. This means workspace/packagepath relative. + # - edk2path object configured with workspace and packages path + # - PkgConfig Object (dict) for the pkg + # - EnvConfig Object + # - Plugin Manager Instance + # - Plugin Helper Obj Instance + # - Junit Logger + # - output_stream the StringIO output stream from this plugin via logging + def RunBuildPlugin(self, packagename, Edk2pathObj, pkgconfig, environment, PLM, PLMHelper, tc, output_stream): + pass + + ## + # Return tuple (string, string) that is the (test case name, test case base class name) + # + # + def GetTestName(self, packagename, environment): + pass + + ## + # Returns true or false if plugin would like to be called for each target + ## + def IsTargetDependent(self): + return False + + ## + # Validates a configurations package .mu.json + ## + + def ValidateConfig(self, config, name=""): + # rather than doing the validation in the plugin, perhaps the plugin + # can return their required list and their optional list + # raise an exception if error is found + pass + + # + # Walks a directory for all items ending in certain extension + # Default is to walk all of workspace + # + def WalkDirectoryForExtension(self, extensionlist, directory, ignorelist=None): + if not isinstance(extensionlist, list): + logging.critical("Expected list but got " + str(type(extensionlist))) + return -1 + + if directory is None: + logging.critical("No directory given") + return -2 + + if not os.path.isabs(directory): + logging.critical("Directory not abs path") + return -3 + + if not os.path.isdir(directory): + logging.critical("Invalid find directory to walk") + return -4 + + if ignorelist is not None: + ignorelist_lower = list() + for item in ignorelist: + ignorelist_lower.append(item.lower()) + + extensionlist_lower = list() + for item in extensionlist: + extensionlist_lower.append(item.lower()) + + returnlist = list() + for Root, Dirs, Files in os.walk(directory): + for File in Files: + for Extension in extensionlist_lower: + if File.lower().endswith(Extension): + ignoreIt = False + if(ignorelist is not None): + for c in ignorelist_lower: + if(File.lower().startswith(c)): + ignoreIt = True + break + if not ignoreIt: + logging.debug(os.path.join(Root, File)) + returnlist.append(os.path.join(Root, File)) + + return returnlist + + # Gets the first DSC it can find in a particular folder (currently doesn't handle .mu.dsc.yamls) + # returns None when none are found + def get_dsc_name_in_dir(self, folderpath): + dsc_list = self.get_dscs_in_dir(folderpath) + if len(dsc_list) == 0: + return None + else: + return dsc_list[0] + + # Gets the DSC's for a particular folder (currently doesn't handle .mu.dsc.yamls) + # returns an empty list when none ar efound + def get_dscs_in_dir(self, folderpath): + try: + directory = folderpath + allEntries = os.listdir(directory) + dscsFound = [] + for entry in allEntries: + if entry.endswith(".dsc"): + dscsFound.append(os.path.join(directory, entry)) + if entry.endswith(".mu.dsc.yaml"): + jsonFile = entry + logging.info("We should create a DSC from the JSON file on the fly: {0}".format(jsonFile)) + return dscsFound + except Exception: + logging.error("Unable to find DSC for package:{0}".format(folderpath)) + return [] diff --git a/edk2toolext/environment/plugintypes/dsc_processor_plugin.py b/edk2toolext/environment/plugintypes/dsc_processor_plugin.py new file mode 100644 index 00000000..c7146518 --- /dev/null +++ b/edk2toolext/environment/plugintypes/dsc_processor_plugin.py @@ -0,0 +1,32 @@ +# @file dsc_processor_plugin +# Plugin for for parsing DSCs +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +class IDscProcessorPlugin(object): + + ## + # does the transform on the DSC + # + # @param dsc - the in-memory model of the DSC + # @param thebuilder - UefiBuild object to get env information + # + # @return 0 for success NonZero for error. + ## + def do_transform(self, dsc, thebuilder): + return 0 + + ## + # gets the level that this transform operates at + # + # @param thebuilder - UefiBuild object to get env information + # + # @return 0 for the most generic level + ## + def get_level(self, thebuilder): + + return 0 diff --git a/edk2toolext/environment/plugintypes/uefi_build_plugin.py b/edk2toolext/environment/plugintypes/uefi_build_plugin.py new file mode 100644 index 00000000..524fb50a --- /dev/null +++ b/edk2toolext/environment/plugintypes/uefi_build_plugin.py @@ -0,0 +1,33 @@ +# @file UefiBuildPlugin +# Plugin that supports Pre and Post Build steps +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +class IUefiBuildPlugin(object): + + ## + # Run Post Build Operations + # + # @param thebuilder - UefiBuild object to get env information + # + # @return 0 for success NonZero for error. + ## + def do_post_build(self, thebuilder): + return 0 + + ## + # Run Pre Build Operations + # + # @param thebuilder - UefiBuild object to get env information + # + # @return 0 for success NonZero for error. + ## + def do_pre_build(self, thebuilder): + ''' + Run Pre build Operation + ''' + return 0 diff --git a/edk2toolext/environment/plugintypes/uefi_helper_plugin.py b/edk2toolext/environment/plugintypes/uefi_helper_plugin.py new file mode 100644 index 00000000..b4ad02ea --- /dev/null +++ b/edk2toolext/environment/plugintypes/uefi_helper_plugin.py @@ -0,0 +1,73 @@ +# @file uefi_helper_plugin +# Plugin that supports adding Extension or helper methods +# to the build environment +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import logging + + +class IUefiHelperPlugin(object): + + ## + # Function that allows plugin to register its functions with the + # obj. + # @param obj[in, out]: HelperFunctions object that allows functional + # registration. + # + def RegisterHelpers(self, obj): + pass + +# Supports IUefiHelperPlugin type + + +class HelperFunctions(object): + def __init__(self): + self.RegisteredFunctions = {} + + # + # Function to logging.debug all registered functions and their source path + # + def DebugLogRegisteredFunctions(self): + logging.debug("Logging all Registered Helper Functions:") + for name, file in self.RegisteredFunctions.items(): + logging.debug(" Function %s registered from file %s", name, file) + logging.debug("Finished logging %d functions", + len(self.RegisteredFunctions)) + + # + # Plugins that want to register a helper function should call + # this routine for each function + # + # @param name[in]: name of function + # @param function[in] function being registered + # @param filepath[in] filepath registering function. used for tracking and debug purposes + # + def Register(self, name, function, filepath): + if(name in self.RegisteredFunctions.keys()): + raise Exception("Function %s already registered from plugin file %s. Can't register again from %s" % ( + name, self.RegisteredFunctions[name], filepath)) + setattr(self, name, function) + self.RegisteredFunctions[name] = filepath + + def HasFunction(self, name): + if(name in self.RegisteredFunctions.keys()): + return True + else: + return False + + def LoadFromPluginManager(self, pm): + error = 0 + for Descriptor in pm.GetPluginsOfClass(IUefiHelperPlugin): + logging.info(Descriptor) + logging.debug("Helper Plugin Register: %s", Descriptor.Name) + try: + Descriptor.Obj.RegisterHelpers(self) + except: + logging.warning( + "Unable to register {0}".format(Descriptor.Name)) + error += 1 + return error diff --git a/edk2toolext/environment/repo_resolver.py b/edk2toolext/environment/repo_resolver.py new file mode 100644 index 00000000..9834aedb --- /dev/null +++ b/edk2toolext/environment/repo_resolver.py @@ -0,0 +1,238 @@ +# @file repo_resolver.py +# This module supports git operations (git repos). +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +import shutil +import stat +from edk2toolext import edk2_logging +from edk2toolext.edk2_git import Repo + +# this follows a documented flow chart +# TODO: include link to flowchart? + + +# checks out dependency at git_path +def resolve(file_system_path, dependency, force=False, ignore=False, update_ok=False): + logger = logging.getLogger("git") + logger.info("Checking for dependency {0}".format(dependency["Path"])) + git_path = os.path.abspath(file_system_path) + + # check if we have a path in our dependency + if "Path" in dependency and not git_path.endswith(os.path.relpath(dependency["Path"])): + # if we don't already the the path from the dependency at the end of the path we've been giving + git_path = os.path.join(git_path, dependency["Path"]) + + ## + # NOTE - this process is defined in the Readme.md including flow chart for this behavior + ## + if not os.path.isdir(git_path): + clone_repo(git_path, dependency) + r = Repo(git_path) + checkout(git_path, dependency, r, True, False) + return r + + folder_empty = len(os.listdir(git_path)) == 0 + if folder_empty: # if the folder is empty, we can clone into it + clone_repo(git_path, dependency) + r = Repo(git_path) + checkout(git_path, dependency, r, True, False) + return r + + repo = Repo(git_path) + if not repo.initalized: # if there isn't a .git folder in there + if force: + clear_folder(git_path) + logger.warning( + "Folder {0} is not a git repo and is being overwritten!".format(git_path)) + clone_repo(git_path, dependency) + checkout(git_path, dependency, repo, True, False) + return repo + else: + if(ignore): + logger.warning( + "Folder {0} is not a git repo but Force parameter not used. " + "Ignore State Allowed.".format(git_path)) + return repo + else: + logger.critical( + "Folder {0} is not a git repo and it is not empty.".format(git_path)) + raise Exception( + "Folder {0} is not a git repo and it is not empty".format(git_path)) + + if repo.dirty: + if force: + clear_folder(git_path) + logger.warning( + "Folder {0} is a git repo but is dirty and is being overwritten as requested!".format(git_path)) + clone_repo(git_path, dependency) + checkout(git_path, dependency, repo, True, False) + return repo + else: + if(ignore): + logger.warning( + "Folder {0} is a git repo but is dirty and Force parameter not used. " + "Ignore State Allowed.".format(git_path)) + return repo + else: + logger.critical( + "Folder {0} is a git repo and is dirty.".format(git_path)) + raise Exception( + "Folder {0} is a git repo and is dirty.".format(git_path)) + + if repo.remotes.origin.url != dependency["Url"]: + if force: + clear_folder(git_path) + logger.warning( + "Folder {0} is a git repo but it is at a different repo and is " + "being overwritten as requested!".format(git_path)) + clone_repo(git_path, dependency) + checkout(git_path, dependency, repo, True, False) + else: + if ignore: + logger.warning( + "Folder {0} is a git repo pointed at a different remote. " + "Can't checkout or sync state".format(git_path)) + return + else: + logger.critical("The URL of the git Repo {2} in the folder {0} does not match {1}".format( + git_path, dependency["Url"], repo.remotes.origin.url)) + raise Exception("The URL of the git Repo {2} in the folder {0} does not match {1}".format( + git_path, dependency["Url"], repo.remotes.origin.url)) + + checkout(git_path, dependency, repo, update_ok, ignore, force) + return repo + +## +# dependencies is a list of objects - it has Path, Commit, Branch, + + +def resolve_all(WORKSPACE_PATH, dependencies, force=False, ignore=False, update_ok=False, omnicache_dir=None): + logger = logging.getLogger("git") + repos = [] + if force: + logger.info("Resolving dependencies by force") + if update_ok: + logger.info("Resolving dependencies with updates as needed") + for dependency in dependencies: + if "ReferencePath" not in dependency and omnicache_dir: + dependency["ReferencePath"] = omnicache_dir + if "ReferencePath" in dependency: # make sure that the omnicache dir is relative to the working directory + dependency["ReferencePath"] = os.path.join(WORKSPACE_PATH, dependency["ReferencePath"]) + git_path = os.path.join(WORKSPACE_PATH, dependency["Path"]) + repos.append(git_path) + resolve(git_path, dependency, force, ignore, update_ok) + + # print out the details- this is optional + for dependency in dependencies: + git_path = os.path.join(WORKSPACE_PATH, dependency["Path"]) + GitDetails = get_details(git_path) + # print out details + logger.info("{3} = Git Details: Url: {0} Branch {1} Commit {2}".format( + GitDetails["Url"], GitDetails["Branch"], GitDetails["Commit"], dependency["Path"])) + + return repos + + +# Gets the details of a particular repo +def get_details(abs_file_system_path): + repo = Repo(abs_file_system_path) + url = repo.remotes.origin.url + active_branch = repo.active_branch + head = repo.head.commit + return {"Url": url, "Branch": active_branch, "Commit": head} + + +def clear_folder(abs_file_system_path): + logger = logging.getLogger("git") + logger.warning("WARNING: Deleting contents of folder {0} to make way for Git repo".format( + abs_file_system_path)) + + def dorw(action, name, exc): + os.chmod(name, stat.S_IWRITE) + if(os.path.isdir(name)): + os.rmdir(name) + else: + os.remove(name) + + shutil.rmtree(abs_file_system_path, onerror=dorw) + +# Clones the repo in the folder we need using the dependency object from the json + + +def clone_repo(abs_file_system_path, DepObj): + logger = logging.getLogger("git") + logger.log(edk2_logging.get_progress_level(), "Cloning repo: {0}".format(DepObj["Url"])) + dest = abs_file_system_path + if not os.path.isdir(dest): + os.makedirs(dest, exist_ok=True) + shallow = False + if "Commit" in DepObj: + shallow = False + if "Full" in DepObj and DepObj["Full"] is True: + shallow = False + reference = None + if "ReferencePath" in DepObj and os.path.exists(DepObj["ReferencePath"]): + reference = os.path.abspath(DepObj["ReferencePath"]) + result = Repo.clone_from(DepObj["Url"], dest, shallow=shallow, reference=reference) + + if result is None: + if "ReferencePath" in DepObj: + # attempt a retry without the reference + logger.warning("Reattempting to clone without a reference. {0}".format(DepObj["Url"])) + result = Repo.clone_from(DepObj["Url"], dest, shallow=shallow) + if result is None: + return None + + return dest + + +def checkout(abs_file_system_path, dep, repo, update_ok=False, ignore_dep_state_mismatch=False, force=False): + logger = logging.getLogger("git") + if "Commit" in dep: + if update_ok or force: + repo.fetch() + repo.checkout(commit=dep["Commit"]) + repo.submodule("update", "--init", "--recursive") + else: + if repo.head.commit == dep["Commit"]: + logger.debug( + "Dependency {0} state ok without update".format(dep["Path"])) + return + elif ignore_dep_state_mismatch: + logger.warning( + "Dependency {0} is not in sync with requested commit. Ignore state allowed".format(dep["Path"])) + return + else: + logger.critical( + "Dependency {0} is not in sync with requested commit. Fail.".format(dep["Path"])) + raise Exception( + "Dependency {0} is not in sync with requested commit. Fail.".format(dep["Path"])) + + elif "Branch" in dep: + if update_ok or force: + repo.fetch() + repo.checkout(branch=dep["Branch"]) + repo.submodule("update", "--init", "--recursive") + else: + if repo.active_branch == dep["Branch"]: + logger.debug( + "Dependency {0} state ok without update".format(dep["Path"])) + return + elif ignore_dep_state_mismatch: + logger.warning( + "Dependency {0} is not in sync with requested branch. Ignore state allowed".format(dep["Path"])) + return + else: + error = "Dependency {0} is not in sync with requested branch. Expected: {1}. Got {2} Fail.".format( + dep["Path"], dep["Branch"], repo.active_branch) + logger.critical(error) + raise Exception(error) + else: + raise Exception( + "Branch or Commit must be specified for {0}".format(dep["Path"])) diff --git a/edk2toolext/environment/self_describing_environment.py b/edk2toolext/environment/self_describing_environment.py new file mode 100644 index 00000000..15ab1087 --- /dev/null +++ b/edk2toolext/environment/self_describing_environment.py @@ -0,0 +1,286 @@ +# @file self_describing_environment.py +# This module contains code that is capable of scanning the source tree for +# files that describe the source and dependencies and acting upon those files. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +from edk2toolext.environment import shell_environment +from edk2toolext.environment import environment_descriptor_files as EDF +from edk2toolext.environment import external_dependency + +ENVIRONMENT_BOOTSTRAP_COMPLETE = False +ENV_STATE = None + + +class self_describing_environment(object): + def __init__(self, workspace_path, scopes=()): + super(self_describing_environment, self).__init__() + + self.workspace = workspace_path + + # Determine the final set of scopes. + # Start with the provided set. + self.scopes = scopes + + # Validate that all scopes are unique. + if len(self.scopes) != len(set(self.scopes)): + raise ValueError(f"All scopes must be unique!\n\t{self.scopes}") + + self.paths = None + self.extdeps = None + self.plugins = None + + def _gather_env_files(self, ext_strings, base_path): + # Make sure that the search extension matches easily. + search_files = tuple(ext_string.lower() for ext_string in ext_strings) + + # Walk all of the directories under base_path and find all files + # matching the extension. + matches = {} + for root, dirs, files in os.walk(base_path, topdown=True): + # Check to see whether any of these directories should be skipped. + # TODO: Allow these to be passed in via arguments. + for index, dir in enumerate(dirs): + if dir == '.git': + del dirs[index] + + # Check for any files that match the extensions we're looking for. + for file in files: + for search_file in search_files: + if file.lower().endswith(search_file + ".json") or file.lower().endswith(search_file + ".yaml"): + if search_file in matches: + matches[search_file].append( + os.path.join(root, file)) + else: + matches[search_file] = [os.path.join(root, file)] + + return matches + + def load_workspace(self): + logging.debug("--- self_describing_environment.load_workspace()") + logging.debug("Loading workspace: %s" % self.workspace) + logging.debug(" Including scopes: %s" % ', '.join(self.scopes)) + + # + # First, we need to get all of the files that describe our environment. + # + env_files = self._gather_env_files( + ('path_env', 'ext_dep', 'plug_in'), self.workspace) + + # + # Now that the files have been found, load them, sort them, and filter them + # so they can be applied to the environment. + # + def _sort_and_filter_descriptors(class_type, file_list, scopes): + all_descriptors = tuple(class_type( + desc_file).descriptor_contents for desc_file in file_list) + + known_ids = {} + active_overrides = {} + final_list = [] + + for scope in scopes: + for descriptor in all_descriptors: + # If this descriptor isn't in the current scope, we can ignore it for now. + if descriptor['scope'].lower() != scope.lower(): + continue + + cur_file = descriptor['descriptor_file'] + + # If this descriptor has an ID, we need to check for overrides and collisions. + if 'id' in descriptor: + cur_id = descriptor['id'].lower() + + # First, check for overrides. There's no reason to process this file if it's being overridden. + if cur_id in active_overrides: + logging.debug("Descriptor '%s' is being overridden by descriptor '%s' based on ID '%s'." % ( + cur_file, active_overrides[cur_id], cur_id)) + continue + + # Next, check for ID collisions. + if cur_id in known_ids: + raise RuntimeError( + "Descriptor '%s' shares the same ID '%s' with descriptor '%s'." % ( + cur_file, cur_id, known_ids[cur_id]) + ) + + # Finally, we can add this file to the known IDs list. + known_ids[cur_id] = cur_file + + # If we're still processing, we can add this descriptor to the output. + logging.debug("Adding descriptor '%s' to the environment with scope '%s'." % ( + cur_file, scope)) + final_list.append(descriptor) + + # Finally, check to see whether this descriptor overrides anything else. + if 'override_id' in descriptor: + cur_override_id = descriptor['override_id'].lower() + # If we're attempting to override someting that's already been processed, + # we should spit out a warning of sort. + if cur_override_id in known_ids: + logging.warning("Descriptor '%s' is trying to override iID '%s', " + "but it's already been processed." % + (cur_file, cur_override_id)) + active_overrides[cur_override_id] = descriptor['descriptor_file'] + + return tuple(final_list) + + if 'path_env' in env_files: + self.paths = _sort_and_filter_descriptors( + EDF.PathEnvDescriptor, env_files['path_env'], self.scopes) + + if 'ext_dep' in env_files: + self.extdeps = _sort_and_filter_descriptors( + EDF.ExternDepDescriptor, env_files['ext_dep'], self.scopes) + + if 'plug_in' in env_files: + self.plugins = _sort_and_filter_descriptors( + EDF.PluginDescriptor, env_files['plug_in'], self.scopes) + + return self + + # This is a generator to reduce code duplication when wrapping the pathenv objects. + def _get_paths(self): + if self.paths is not None: + # Apply in reverse order to get the expected hierarchy. + for path_descriptor in reversed(self.paths): + # Use the helper factory to get an object + # capable of managing each dependency. + yield EDF.PathEnv(path_descriptor) + + # This is a generator to reduce code duplication when wrapping the extdep objects. + def _get_extdeps(self): + if self.extdeps is not None: + # Apply in reverse order to get the expected hierarchy. + for extdep_descriptor in reversed(self.extdeps): + # Use the helper factory to get an object + # capable of managing each dependency. + yield external_dependency.ExtDepFactory(extdep_descriptor) + + def _apply_descriptor_object_to_env(self, desc_object, env_object): + # Walk through each possible environment modification + # and apply to the environment as required. + + if 'set_path' in desc_object.flags: + env_object.insert_path(desc_object.published_path) + if 'set_pypath' in desc_object.flags: + env_object.insert_pypath(desc_object.published_path) + if 'set_build_var' in desc_object.flags: + env_object.set_build_var( + desc_object.var_name, desc_object.published_path) + if 'set_shell_var' in desc_object.flags: + env_object.set_shell_var( + desc_object.var_name, desc_object.published_path) + + def update_simple_paths(self, env_object): + logging.debug("--- self_describing_environment.update_simple_paths()") + for path in self._get_paths(): + self._apply_descriptor_object_to_env(path, env_object) + + def update_extdep_paths(self, env_object): + logging.debug("--- self_describing_environment.update_extdep_paths()") + for extdep in self._get_extdeps(): + self._apply_descriptor_object_to_env(extdep, env_object) + + def update_extdeps(self, env_object): + logging.debug("--- self_describing_environment.update_extdeps()") + for extdep in self._get_extdeps(): + # Check to see whether it's necessary to fetch the files. + if not extdep.verify(): + # Get rid of extdep's published path since it could get changed + # during the fetch routine. + if 'set_path' in extdep.flags: + env_object.remove_path_element(extdep.published_path) + if 'set_pypath' in extdep.flags: + env_object.remove_pypath_element(extdep.published_path) + extdep.clean() + extdep.fetch() + # Re-apply the extdep to environment + self._apply_descriptor_object_to_env(extdep, env_object) + + def clean_extdeps(self, env_object): + for extdep in self._get_extdeps(): + extdep.clean() + # TODO: Determine whether we want to update the env. + + def verify_extdeps(self, env_object): + result = True + for extdep in self._get_extdeps(): + if not extdep.verify(): + result = False + logging.error("Dependency '%s' is not met!" % extdep.name) + + return result + + +def DestroyEnvironment(): + ''' Destroys global envirornment state ''' + global ENVIRONMENT_BOOTSTRAP_COMPLETE, ENV_STATE + + ENVIRONMENT_BOOTSTRAP_COMPLETE = False + ENV_STATE = None + + +def BootstrapEnvironment(workspace, scopes=()): + global ENVIRONMENT_BOOTSTRAP_COMPLETE, ENV_STATE + + if not ENVIRONMENT_BOOTSTRAP_COMPLETE: + # + # ENVIRONMENT BOOTSTRAP STAGE 1 + # Locate and load all environment description files. + # + build_env = self_describing_environment( + workspace, scopes).load_workspace() + + # + # ENVIRONMENT BOOTSTRAP STAGE 2 + # Parse all of the PATH-related descriptor files to make sure that + # any required tools or Python modules are now available. + # + shell_env = shell_environment.GetEnvironment() + build_env.update_simple_paths(shell_env) + + # + # ENVIRONMENT BOOTSTRAP STAGE 3 + # Now that the preliminary paths have been loaded, + # we can load the modules that had greater dependencies. + # + build_env.update_extdep_paths(shell_env) + + # Debug the environment that was produced. + shell_env.log_environment() + + ENVIRONMENT_BOOTSTRAP_COMPLETE = True + ENV_STATE = (build_env, shell_env) + + # Return the environment as it's configured. + return ENV_STATE + + +def CleanEnvironment(workspace, scopes=()): + # Bootstrap the environment. + (build_env, shell_env) = BootstrapEnvironment(workspace, scopes) + + # Clean all the dependencies. + build_env.clean_extdeps(shell_env) + + +def UpdateDependencies(workspace, scopes=()): + # Bootstrap the environment. + (build_env, shell_env) = BootstrapEnvironment(workspace, scopes) + + # Clean all the dependencies. + build_env.update_extdeps(shell_env) + + +def VerifyEnvironment(workspace, scopes=()): + # Bootstrap the environment. + (build_env, shell_env) = BootstrapEnvironment(workspace, scopes) + + # Clean all the dependencies. + return build_env.verify_extdeps(shell_env) diff --git a/edk2toolext/environment/shell_environment.py b/edk2toolext/environment/shell_environment.py new file mode 100644 index 00000000..2868a10c --- /dev/null +++ b/edk2toolext/environment/shell_environment.py @@ -0,0 +1,275 @@ +# @file shell_environment.py +# This module contains code that helps to manage the build environment +# including PATH, PYTHONPATH, and ENV variables. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import sys +import copy +import logging +from edk2toolext.environment import var_dict + +LOGGING_GROUP = "EnvDict" +MY_LOGGER = logging.getLogger(LOGGING_GROUP) + + +# +# Copy the Singleton pattern from... +# https://stackoverflow.com/a/6798042 +# +class Singleton(type): + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] + + +class ShellEnvironment(metaclass=Singleton): + # Easy definition for the very first checkpoint + # when the environment is first created. + INITIAL_CHECKPOINT = 0 + + def __init__(self): + # Add all of our logging to the EnvDict group. + self.logger = logging.getLogger(LOGGING_GROUP) + + # Initialize all other things. + self.active_environ = None + self.active_path = None + self.active_pypath = None + self.active_buildvars = var_dict.VarDict() + self.checkpoints = [] + + # Grab a copy of the environment as it exists. + self.import_environment() + + # Create the initial checkpoint. + self.checkpoint() + + # + # Management methods. + # These methods manage the singleton, the surrounding environment, and checkpoints. + # + def import_environment(self): + # Create a complete copy of os.environ + self.active_environ = dict() + for key, value in os.environ.items(): + self.active_environ[key] = value + + # Record the PATH elements of the current environment. + path = self.active_environ.get('PATH', "") + + # Filter removes empty elements. + # List creates an actual list rather than a generator. + # Filter removes empty strings. + self.active_path = list(filter(None, path.split(os.pathsep))) + + # Record the PYTHONPATH elements of the current environment. + # When reading PYTHONPATH, try reading the live path from sys. + self.active_pypath = sys.path + + # Remove PATH and PYTHONPATH from environ copy to force use of active_path and active_pypath + self.active_environ.pop("PATH", None) + self.active_environ.pop("PYTHONPATH", None) + + def export_environment(self): + # Purge all keys that aren't in the export. + for key, value in os.environ.items(): + if key not in self.active_environ: + os.environ.pop(key) + + # Export all internal keys. + for key, value in self.active_environ.items(): + os.environ[key] = value + + # Set the PATH and PYTHONPATH vars. + os.environ["PATH"] = os.pathsep.join(self.active_path) + os.environ["PYTHONPATH"] = os.pathsep.join(self.active_pypath) + + sys.path = self.active_pypath + + def log_environment(self): + self.logger.debug("FINAL PATH:") + self.logger.debug(", ".join(self.active_path)) + + self.logger.debug("FINAL PYTHONPATH:") + self.logger.debug(", ".join(self.active_pypath)) + + self.logger.debug("FINAL ENVIRON:") + environ_list = [] + for key, value in self.active_environ.items(): + environ_list.append("({0}:{1})".format(key, value)) + self.logger.debug(", ".join(environ_list)) + + def checkpoint(self): + new_index = len(self.checkpoints) + self.checkpoints.append({ + 'environ': copy.copy(self.active_environ), + 'path': self.active_path, + 'pypath': self.active_pypath, + 'buildvars': copy.copy(self.active_buildvars) + }) + + return new_index + + def restore_checkpoint(self, index): + if index < len(self.checkpoints): + chkpt = self.checkpoints[index] + self.active_environ = copy.copy(chkpt['environ']) + self.active_path = chkpt['path'] + self.active_pypath = chkpt['pypath'] + self.active_buildvars = copy.copy(chkpt['buildvars']) + + self.export_environment() + + else: + raise IndexError("Checkpoint %s does not exist" % index) + + def restore_initial_checkpoint(self): + self.restore_checkpoint(ShellEnvironment.INITIAL_CHECKPOINT) + + # + # Environment manipulation methods. + # These methods interact with the current environment. + # + def _internal_set_path(self, path_elements): + self.active_path = list(path_elements) + os.environ["PATH"] = os.pathsep.join(self.active_path) + + def _internal_set_pypath(self, path_elements): + self.active_pypath = list(path_elements) + os.environ["PYTHONPATH"] = os.pathsep.join(self.active_pypath) + sys.path = self.active_pypath + + def set_path(self, new_path): + self.logger.debug("Overriding PATH with new value.") + if type(new_path) is str: + new_path = list(new_path.split(os.pathsep)) + self._internal_set_path(new_path) + + def set_pypath(self, new_path): + self.logger.debug("Overriding PYTHONPATH with new value.") + if type(new_path) is str: + new_path = list(new_path.split(os.pathsep)) + self._internal_set_pypath(new_path) + + def append_path(self, path_element): + self.logger.debug("Appending PATH element '%s'." % path_element) + if path_element not in self.active_path: + self._internal_set_path(self.active_path + [path_element]) + + def insert_path(self, path_element): + self.logger.debug("Inserting PATH element '%s'." % path_element) + if path_element not in self.active_path: + self._internal_set_path([path_element] + self.active_path) + + def append_pypath(self, path_element): + self.logger.debug("Appending PYTHONPATH element '%s'." % path_element) + if path_element not in self.active_pypath: + self._internal_set_pypath(self.active_pypath + [path_element]) + + def insert_pypath(self, path_element): + self.logger.debug("Inserting PYTHONPATH element '%s'." % path_element) + if path_element not in self.active_pypath: + self._internal_set_pypath([path_element] + self.active_pypath) + + def replace_path_element(self, old_path_element, new_path_element): + # Generate a new PATH by iterating through the old PATH and replacing + # old_path_element with new_path_element where it is found. + self.logger.debug("Replacing PATH element {0} with {1}".format(old_path_element, new_path_element)) + self._internal_set_path([x if x != old_path_element else new_path_element for x in self.active_path]) + + def replace_pypath_element(self, old_pypath_element, new_pypath_element): + # Generate a new PYPATH by iterating through the old PYPATH and replacing + # old_pypath_element with new_pypath_element where it is found. + self.logger.debug("Replacing PYPATH element {0} with {1}".format(old_pypath_element, new_pypath_element)) + self._internal_set_pypath([x if x != old_pypath_element else new_pypath_element for x in self.active_pypath]) + + def remove_path_element(self, path_element): + # Generate a new PATH by iterating through the old PYPATH and removing + # path_element if it is found. + self.logger.debug("Removing PATH element {0}".format(path_element)) + self._internal_set_path([x for x in self.active_path if x != path_element]) + + def remove_pypath_element(self, pypath_element): + # Generate a new PYPATH by iterating through the old PYPATH and removing + # pypath_element if it is found. + self.logger.debug("Removing PYPATH element {0}".format(pypath_element)) + self._internal_set_pypath([x for x in self.active_pypath if x != pypath_element]) + + def get_build_var(self, var_name): + return self.active_buildvars.GetValue(var_name) + + def set_build_var(self, var_name, var_data): + self.logger.debug( + "Updating BUILD VAR element '%s': '%s'." % (var_name, var_data)) + self.active_buildvars.SetValue(var_name, var_data, '', overridable=True) + + def get_shell_var(self, var_name): + return self.active_environ.get(var_name, None) + + # TODO: Don't allow setting PATH or PYTHONPATH. + def set_shell_var(self, var_name, var_data): + # Check for the "special" shell vars. + if var_name == 'PATH': + self.set_path(var_data) + elif var_name == 'PYTHONPATH': + self.set_pypath(var_data) + else: + self.logger.debug( + "Updating SHELL VAR element '%s': '%s'." % (var_name, var_data)) + self.active_environ[var_name] = var_data + os.environ[var_name] = var_data + + +def GetEnvironment(): + return ShellEnvironment() + + +def GetBuildVars(): + # + # Tricky! + # Define a wrapper class that always forwards commands to the + # BuildVars associated with the current environment. + # + # Will be deprecated. + # + class BuildVarsWrapper(object): + def __init__(self): + self.internal_shell_env = ShellEnvironment() + + def __getattr__(self, attrname): + # Instead, invoke on the active BuildVars object. + return getattr(self.internal_shell_env.active_buildvars, attrname) + + return BuildVarsWrapper() + + +# +# TODO: These are convenience methods that should be deprecated. +# +checkpoint_list = list() + + +def CheckpointBuildVars(): + global checkpoint_list + new_checkpoint = ShellEnvironment().checkpoint() + checkpoint_list.append(new_checkpoint) + MY_LOGGER.debug("Created checkpoint {0} for build vars".format(new_checkpoint)) + + +def RevertBuildVars(): + global checkpoint_list + if len(checkpoint_list) > 0: + last_checkpoint = checkpoint_list.pop() + MY_LOGGER.debug("Reverting to checkpoint {0} for build vars".format(last_checkpoint)) + ShellEnvironment().restore_checkpoint(last_checkpoint) + else: + MY_LOGGER.getLogger("No more checkpoints!") + raise RuntimeError("No more checkpoints!") diff --git a/edk2toolext/environment/uefi_build.py b/edk2toolext/environment/uefi_build.py new file mode 100644 index 00000000..0b794e83 --- /dev/null +++ b/edk2toolext/environment/uefi_build.py @@ -0,0 +1,531 @@ +# @file uefi_build.py +# This module contains code that supports the Tianocore Edk2 build system +# This class is designed to be subclassed by a platform to allow +# more extensive and custom behavior. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +import os +import logging +from edk2toolext.environment.multiple_workspace import MultipleWorkspace +from edk2toolext.environment import conf_mgmt +import traceback +import shutil +import time +from edk2toolext.environment import shell_environment +from edk2toollib.uefi.edk2.parsers.targettxt_parser import TargetTxtParser +from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser +from edk2toollib.utility_functions import RunCmd +from edk2toolext import edk2_logging +from edk2toolext.environment.plugintypes.UefiBuildPlugin import IUefiBuildPlugin +import datetime + + +class UefiBuilder(object): + + def __init__(self): + self.SkipBuild = False + self.SkipPreBuild = False + self.SkipPostBuild = False + self.FlashImage = False + self.ShowHelpOnly = False + self.OutputBuildEnvBeforeBuildToFile = None + self.Clean = False + self.UpdateConf = False + self.OutputConfig = None + + def AddCommandLineOptions(self, parserObj): + ''' adds command line options to the argparser ''' + parserObj.add_argument("--SKIPBUILD", "--skipbuild", "--SkipBuild", dest="SKIPBUILD", + action='store_true', default=False, help="Skip the build process") + parserObj.add_argument("--SKIPPREBUILD", "--skipprebuild", "--SkipPrebuild", dest="SKIPPREBUILD", + action='store_true', default=False, help="Skip prebuild process") + parserObj.add_argument("--SKIPPOSTBUILD", "--skippostbuild", "--SkipPostBuild", dest="SKIPPOSTBUILD", + action='store_true', default=False, help="Skip postbuild process") + parserObj.add_argument("--FLASHONLY", "--flashonly", "--FlashOnly", dest="FLASHONLY", + action='store_true', default=False, help="Flash rom after build.") + parserObj.add_argument("--FLASHROM", "--flashrom", "--FlashRom", dest="FLASHROM", + action='store_true', default=False, help="Flash rom. Rom must be built previously.") + parserObj.add_argument("--UPDATECONF", "--updateconf", "--UpdateConf", + dest="UPDATECONF", action='store_true', default=False, + help="Update Conf. Builders Conf files will be replaced with latest template files") + parserObj.add_argument("--CLEAN", "--clean", "--CLEAN", dest="CLEAN", + action='store_true', default=False, + help="Clean. Remove all old build artifacts and intermediate files") + parserObj.add_argument("--CLEANONLY", "--cleanonly", "--CleanOnly", dest="CLEANONLY", + action='store_true', default=False, + help="Clean Only. Do clean operation and don't build just exit.") + parserObj.add_argument("--OUTPUTCONFIG", "--outputconfig", "--OutputConfig", + dest='OutputConfig', required=False, type=str, + help='Provide shell variables in a file') + + def RetrieveCommandLineOptions(self, args): + ''' Retrieve command line options from the argparser''' + self.OutputConfig = os.path.abspath(args.OutputConfig) if args.OutputConfig else None + + if(args.SKIPBUILD): + self.SkipBuild = True + elif(args.SKIPPREBUILD): + self.SkipPreBuild = True + elif(args.SKIPPOSTBUILD): + self.SkipPostBuild = True + elif(args.FLASHONLY): + self.SkipPostBuild = True + self.SkipBuild = True + self.SkipPreBuild = True + self.FlashImage = True + elif(args.FLASHROM): + self.FlashImage = True + elif(args.UPDATECONF): + self.UpdateConf = True + elif(args.CLEAN): + self.Clean = True + elif(args.CLEANONLY): + self.Clean = True + self.SkipBuild = True + self.SkipPreBuild = True + self.SkipPostBuild = True + self.FlashImage = False + + def Go(self, WorkSpace, PackagesPath, PInHelper, PInManager): + self.env = shell_environment.GetBuildVars() + self.mws = MultipleWorkspace() + self.mws.setWs(WorkSpace, PackagesPath) + self.ws = WorkSpace + self.pp = PackagesPath # string using os.pathsep + self.Helper = PInHelper + self.pm = PInManager + + try: + edk2_logging.log_progress("Start time: {0}".format(datetime.datetime.now())) + start_time = time.perf_counter() + + self.Helper.DebugLogRegisteredFunctions() + + ret = self.SetEnv() + if(ret != 0): + logging.critical("SetEnv failed") + return ret + + # clean + if(self.Clean): + edk2_logging.log_progress("Cleaning") + ret = self.CleanTree() + if(ret != 0): + logging.critical("Clean failed") + return ret + + # prebuild + if(self.SkipPreBuild): + edk2_logging.log_progress("Skipping Pre Build") + else: + ret = self.PreBuild() + if(ret != 0): + logging.critical("Pre Build failed") + return ret + + # Output Build Environment to File - this is mostly for debug of build + # issues or adding other build features using existing variables + if(self.OutputConfig is not None): + edk2_logging.log_progress("Writing Build Env Info out to File") + logging.debug("Found an Output Build Env File: " + self.OutputConfig) + self.env.PrintAll(self.OutputConfig) + + if(self.env.GetValue("GATEDBUILD") is not None) and (self.env.GetValue("GATEDBUILD").upper() == "TRUE"): + ShouldGatedBuildRun = self.PlatformGatedBuildShouldHappen() + logging.debug("Platform Gated Build Should Run returned: %s" % str( + ShouldGatedBuildRun)) + if(not self.SkipBuild): + self.SkipBuild = not ShouldGatedBuildRun + if(not self.SkipPostBuild): + self.SkipPostBuild = not ShouldGatedBuildRun + + # build + if(self.SkipBuild): + edk2_logging.log_progress("Skipping Build") + else: + ret = self.Build() + + if(ret != 0): + logging.critical("Build failed") + return ret + + # postbuild + if(self.SkipPostBuild): + edk2_logging.log_progress("Skipping Post Build") + else: + ret = self.PostBuild() + if(ret != 0): + logging.critical("Post Build failed") + return ret + + # flash + if(self.FlashImage): + edk2_logging.log_progress("Flashing Image") + ret = self.FlashRomImage() + if(ret != 0): + logging.critical("Flash Image failed") + return ret + + except: + logging.critical("Build Process Exception") + logging.error(traceback.format_exc()) + return -1 + finally: + end_time = time.perf_counter() + elapsed_time_s = int((end_time - start_time)) + edk2_logging.log_progress("End time: {0}\t Total time Elapsed: {1}".format( + datetime.datetime.now(), datetime.timedelta(seconds=elapsed_time_s))) + + return 0 + + def CleanTree(self, RemoveConfTemplateFilesToo=False): + ret = 0 + # loop thru each build target set. + edk2_logging.log_progress("Cleaning All Output for Build") + + d = self.env.GetValue("BUILD_OUTPUT_BASE") + if(os.path.isdir(d)): + logging.debug("Removing [%s]", d) + # if the folder is opened in Explorer do not fail the entire Rebuild + try: + shutil.rmtree(d) + except WindowsError as wex: + logging.debug(wex) + + else: + logging.debug("Directory [%s] already clean" % d) + + # delete the conf .dbcache + # this needs to be removed in case build flags changed + d = os.path.join(self.ws, "Conf", ".cache") + if(os.path.isdir(d)): + shutil.rmtree(d) + logging.debug("Removing [%s]" % d) + + if(RemoveConfTemplateFilesToo): + for a in ["target.txt", "build_rule.txt", "tools_def.txt"]: + d = os.path.join(self.ws, "Conf", a) + if(os.path.isfile(d)): + os.remove(d) + logging.debug("Removing [%s]" % d) + + return ret + + # + # Build step + # + + def Build(self): + BuildType = self.env.GetValue("TARGET") + edk2_logging.log_progress("Running Build %s" % BuildType) + + # set target, arch, toolchain, threads, and platform + params = "-p " + self.env.GetValue("ACTIVE_PLATFORM") + params += " -b " + BuildType + params += " -t " + self.env.GetValue("TOOL_CHAIN_TAG") + params += " -n " + self.env.GetValue("MAX_CONCURRENT_THREAD_NUMBER") + + # Set the arch flags. Multiple are split by space + rt = self.env.GetValue("TARGET_ARCH").split(" ") + for t in rt: + params += " -a " + t + + # get the report options and setup the build command + if(self.env.GetValue("BUILDREPORTING") == "TRUE"): + params += " -y " + self.env.GetValue("BUILDREPORT_FILE") + rt = self.env.GetValue("BUILDREPORT_TYPES").split(" ") + for t in rt: + params += " -Y " + t + + # add special processing to handle building a single module + mod = self.env.GetValue("BUILDMODULE") + if(mod is not None and len(mod.strip()) > 0): + params += " -m " + mod + edk2_logging.log_progress("Single Module Build: " + mod) + self.SkipPostBuild = True + self.FlashImage = False + + # attach the generic build vars + buildvars = self.env.GetAllBuildKeyValues(BuildType) + for key, value in buildvars.items(): + params += " -D " + key + "=" + value + output_stream = edk2_logging.create_output_stream() + + env = shell_environment.ShellEnvironment() + # WORKAROUND - Pin the PYTHONHASHSEED so that TianoCore build tools + # have consistent ordering. Addresses incremental builds. + pre_build_env_chk = env.checkpoint() + env.set_shell_var('PYTHONHASHSEED', '0') + env.log_environment() + ret = RunCmd("build", params) + # WORKAROUND - Undo the workaround. + env.restore_checkpoint(pre_build_env_chk) + + problems = edk2_logging.scan_compiler_output(output_stream) + edk2_logging.remove_output_stream(output_stream) + for level, problem in problems: + logging.log(level, problem) + + if(ret != 0): + return ret + + return 0 + + def PreBuild(self): + edk2_logging.log_progress("Running Pre Build") + # + # Run the plaform pre-build steps. + # + ret = self.PlatformPreBuild() + + if(ret != 0): + logging.critical("PlatformPreBuild failed %d" % ret) + return ret + # + # run all loaded UefiBuild Plugins + # + for Descriptor in self.pm.GetPluginsOfClass(IUefiBuildPlugin): + rc = Descriptor.Obj.do_pre_build(self) + if(rc != 0): + if(rc is None): + logging.error( + "Plugin Failed: %s returned NoneType" % Descriptor.Name) + ret = -1 + else: + logging.error("Plugin Failed: %s returned %d" % + (Descriptor.Name, rc)) + ret = rc + break # fail on plugin error + else: + logging.debug("Plugin Success: %s" % Descriptor.Name) + return ret + + def PostBuild(self): + edk2_logging.log_progress("Running Post Build") + # + # Run the platform post-build steps. + # + ret = self.PlatformPostBuild() + + if(ret != 0): + logging.critical("PlatformPostBuild failed %d" % ret) + return ret + + # + # run all loaded UefiBuild Plugins + # + for Descriptor in self.pm.GetPluginsOfClass(IUefiBuildPlugin): + rc = Descriptor.Obj.do_post_build(self) + if(rc != 0): + if(rc is None): + logging.error( + "Plugin Failed: %s returned NoneType" % Descriptor.Name) + ret = -1 + else: + logging.error("Plugin Failed: %s returned %d" % + (Descriptor.Name, rc)) + ret = rc + break # fail on plugin error + else: + logging.debug("Plugin Success: %s" % Descriptor.Name) + + return ret + + def SetEnv(self): + edk2_logging.log_progress("Setting up the Environment") + shell_environment.GetEnvironment().set_shell_var("WORKSPACE", self.ws) + shell_environment.GetBuildVars().SetValue("WORKSPACE", self.ws, "Set in SetEnv") + + if(self.pp is not None): + shell_environment.GetEnvironment().set_shell_var("PACKAGES_PATH", self.pp) + shell_environment.GetBuildVars().SetValue( + "PACKAGES_PATH", self.pp, "Set in SetEnv") + + # process platform parameters defined in platform build file + ret = self.SetPlatformEnv() + if(ret != 0): + logging.critical("Set Platform Env failed") + return ret + + # set some basic defaults + self.SetBasicDefaults() + + # Handle all the template files for workspace/conf/ Allow override + TemplatesForConf = self.env.GetValue("CONF_TEMPLATE_DIR") + if(TemplatesForConf is not None): + TemplatesForConf = self.mws.join(self.ws, TemplatesForConf) + logging.debug( + "Platform defined override for Template Conf Files: %s", TemplatesForConf) + e = conf_mgmt.ConfMgmt(self.UpdateConf, TemplatesForConf) + + # parse target file + ret = self.ParseTargetFile() + if(ret != 0): + logging.critical("ParseTargetFile failed") + return ret + + ret = e.ToolsDefConfigure() + if(ret != 0): + logging.critical("ParseTargetFile failed") + return ret + + # parse DSC file + ret = self.ParseDscFile() + if(ret != 0): + logging.critical("ParseDscFile failed") + return ret + + # parse FDF file + ret = self.ParseFdfFile() + if(ret != 0): + logging.critical("ParseFdfFile failed") + return ret + + # set build output base envs for all builds + self.env.SetValue("BUILD_OUT_TEMP", os.path.join( + self.ws, self.env.GetValue("OUTPUT_DIRECTORY")), "Computed in SetEnv") + + target = self.env.GetValue("TARGET") + self.env.SetValue("BUILD_OUTPUT_BASE", os.path.join(self.env.GetValue( + "BUILD_OUT_TEMP"), target + "_" + self.env.GetValue("TOOL_CHAIN_TAG")), "Computed in SetEnv") + + # We have our build target now. Give platform build one more chance for target specific settings. + ret = self.SetPlatformEnvAfterTarget() + if(ret != 0): + logging.critical("SetPlatformEnvAfterTarget failed") + return ret + + # set the build report file + self.env.SetValue("BUILDREPORT_FILE", os.path.join( + self.env.GetValue("BUILD_OUTPUT_BASE"), "BUILD_REPORT.TXT"), True) + + # set environment variables for the build process + os.environ["EFI_SOURCE"] = self.ws + + return 0 + + def FlashRomImage(self): + return self.PlatformFlashImage() + + # ----------------------------------------------------------------------- + # Methods that will be overridden by child class + # ----------------------------------------------------------------------- + + @classmethod + def PlatformPreBuild(self): + return 0 + + @classmethod + def PlatformPostBuild(self): + return 0 + + @classmethod + def SetPlatformEnv(self): + return 0 + + @classmethod + def SetPlatformEnvAfterTarget(self): + return 0 + + @classmethod + def PlatformBuildRom(self): + return 0 + + @classmethod + def PlatformFlashImage(self): + return 0 + + @classmethod + def PlatformGatedBuildShouldHappen(self): + return True + + # ------------------------------------------------------------------------ + # HELPER FUNCTIONS + # ------------------------------------------------------------------------ + # + + # + # Parse the TargetText file and add them as env settings. + # set them so they can be overridden. + # + def ParseTargetFile(self): + if(os.path.isfile(self.mws.join(self.ws, "Conf", "target.txt"))): + # parse TargetTxt File + logging.debug("Parse Target.txt file") + ttp = TargetTxtParser() + ttp.ParseFile(self.mws.join(self.ws, "Conf", "target.txt")) + for key, value in ttp.Dict.items(): + # set env as overrideable + self.env.SetValue(key, value, "From Target.txt", True) + + else: + logging.error("Failed to find target.txt file") + return -1 + + return 0 + + # + # Parse the Active platform DSC file. This will get lots of variable info to + # be used in the build. This makes it so we don't have to define things twice + # + + def ParseDscFile(self): + dsc_file_path = self.mws.join( + self.ws, self.env.GetValue("ACTIVE_PLATFORM")) + if(os.path.isfile(dsc_file_path)): + # parse DSC File + logging.debug( + "Parse Active Platform DSC file: {0}".format(dsc_file_path)) + dscp = DscParser().SetBaseAbsPath(self.ws).SetPackagePaths( + self.pp.split(os.pathsep)).SetInputVars(self.env.GetAllBuildKeyValues()) + dscp.ParseFile(dsc_file_path) + for key, value in dscp.LocalVars.items(): + # set env as overrideable + self.env.SetValue(key, value, "From Platform DSC File", True) + + else: + logging.error("Failed to find DSC file") + return -1 + + return 0 + + # + # Parse the Active platform FDF file. This will get lots of variable info to + # be used in the build. This makes it so we don't have to define things twice + # the FDF file usually comes from the Active Platform DSC file so it needs to + # be parsed first. + # + def ParseFdfFile(self): + if(self.env.GetValue("FLASH_DEFINITION") is None): + logging.debug("No flash definition set") + return 0 + if(os.path.isfile(self.mws.join(self.ws, self.env.GetValue("FLASH_DEFINITION")))): + # parse the FDF file- fdf files have similar syntax to DSC and therefore parser works for both. + logging.debug("Parse Active Flash Definition (FDF) file") + fdfp = DscParser().SetBaseAbsPath(self.ws).SetPackagePaths( + self.pp.split(os.pathsep)).SetInputVars(self.env.GetAllBuildKeyValues()) + pa = self.mws.join(self.ws, self.env.GetValue("FLASH_DEFINITION")) + fdfp.ParseFile(pa) + for key, value in fdfp.LocalVars.items(): + self.env.SetValue(key, value, "From Platform FDF File", True) + + else: + logging.error("Failed to find FDF file") + return -2 + + return 0 + + # + # Function used to set default values for numerous build + # flow control variables + # + def SetBasicDefaults(self): + self.env.SetValue("WORKSPACE", self.ws, "DEFAULT") + if(self.pp is not None): + self.env.SetValue("PACKAGES_PATH", self.pp, "DEFAULT") + return 0 diff --git a/edk2toolext/environment/var_dict.py b/edk2toolext/environment/var_dict.py new file mode 100644 index 00000000..9d2066e0 --- /dev/null +++ b/edk2toolext/environment/var_dict.py @@ -0,0 +1,179 @@ +# @file var_dict.py +# This module contains code for a special overridable dictionary. +# This stores most of the build configuration data and allows +# extensive config sharing for the build process, pre-build, and +# post-build. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import logging + + +class EnvEntry(object): + def __init__(self, value, comment, overridable=False): + self.Value = value + self.Comment = comment + self.Overrideable = overridable + + def PrintEntry(self, f=None): + print("Value: %s" % self.Value, file=f) + print("Comment: %s" % self.Comment, file=f) + if(self.Overrideable): + print("Value overridable", file=f) + print("**********************", file=f) + # + # Function used to override the value if option allows it + # + + def SetValue(self, value, comment, overridable=False): + if (value == self.Value): + return True + + if(not self.Overrideable): + logging.debug("Can't set value [%s] as it isn't overrideable. Previous comment %s" % ( + value, self.Comment)) + return False + + self.Value = value + self.Comment = comment + self.Overrideable = overridable + return True + + def GetValue(self): + return self.Value + + +class VarDict(object): + def __init__(self): + self.Logger = logging.getLogger("EnvDict") + self.Dstore = {} # a set of envs + + def GetEntry(self, key): + return self.Dstore.get(key.upper()) + + def __copy__(self): + new_copy = VarDict() + new_copy.Logger = self.Logger + + new_copy.Dstore = {} + for key in self.Dstore: + entry = self.GetEntry(key) + value = entry.Value + comment = entry.Comment + override = entry.Overrideable + new_copy.SetValue(key, value, comment, override) + return new_copy + + def GetValue(self, k, default=None): + key = k.upper() + en = self.GetEntry(key) + if(en is not None): + self.Logger.debug("Key %s found. Value %s" % (key, en.GetValue())) + return en.GetValue() + else: + self.Logger.debug("Key %s not found" % key) + return default + + def SetValue(self, k, v, comment, overridable=False): + key = k.upper() + en = self.GetEntry(key) + value = str(v) + self.Logger.debug("Trying to set key %s to value %s" % (k, v)) + if(en is None): + # new entry + en = EnvEntry(value, comment, overridable) + self.Dstore[key] = en + return True + + return en.SetValue(value, comment, overridable) + + # + # function used to get a build var value for given key and buildtype + # + # if BuildType is None + # Build vars are defined by vars that start with BLD_ + # BLD_*_ means all build types + # BLD_DEBUG_ means build of debug type + # BLD_RELEASE_ means build of release type + # etc + # + + def GetBuildValue(self, key, BuildType=None): + rv = None + + if(BuildType is None): + BuildType = self.GetValue("TARGET") + + if(BuildType is None): + logging.debug( + "GetBuildValue - Invalid Parameter BuildType is None and Target Not set. Key is: " + key) + return None + + if(key is None): + logging.debug( + "GetBuildValue - Invalid Parameter key is None. BuildType is: " + BuildType) + return None + + ty = BuildType.upper().strip() + tk = key.upper().strip() + # see if specific + k = "BLD_" + ty + "_" + tk + rv = self.GetValue(k) + if(rv is None): + # didn't fine build type specific so check for generic + k = "BLD_*_" + tk + rv = self.GetValue(k) + + # return value...if not found should return None + return rv + + # + # function used to get a dictionary for all build vars + # + # Build vars are defined by vars that start with BLD_ + # BLD_*_ means all build types + # BLD_DEBUG_ means build of debug type + # BLD_RELEASE_ means build of release type + # etc + # + def GetAllBuildKeyValues(self, BuildType=None): + returndict = {} + if(BuildType is None): + BuildType = self.GetValue("TARGET") + + if(BuildType is None): + logging.debug( + "GetAllBuildKeyValues - Invalid Parameter BuildType is None and Target Not Set.") + return returndict + + ty = BuildType.upper().strip() + logging.debug("Getting all build keys for build type " + ty) + + # get all the generic build options + for key, value in self.Dstore.items(): + if(key.startswith("BLD_*_")): + k = key[6:] + returndict[k] = value.GetValue() + + # will override with specific for this build type + # figure out offset part of key name to strip + ks = len(ty) + 5 + for key, value in self.Dstore.items(): + if(key.startswith("BLD_" + ty + "_")): + k = key[ks:] + returndict[k] = value.GetValue() + + return returndict + + def PrintAll(self, fp=None): + f = None + if(fp is not None): + f = open(fp, 'a+') + for key, value in self.Dstore.items(): + print("Key = %s" % key, file=f) + value.PrintEntry(f) + if(f): + f.close() diff --git a/edk2toolext/environment/version_aggregator.py b/edk2toolext/environment/version_aggregator.py new file mode 100644 index 00000000..31821730 --- /dev/null +++ b/edk2toolext/environment/version_aggregator.py @@ -0,0 +1,79 @@ +# @file version_aggregator facilitates the collection of information +# regarding the tools, binaries, submodule configuration used in a build +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import copy +import logging +from enum import Enum + +VERSION_AGGREGATOR = None + + +class version_aggregator(object): + def __init__(self): + super(version_aggregator, self).__init__() + self.Versions = {} + self._logger = logging.getLogger("version_aggregator") + + def ReportVersion(self, key, value, versionType): + """ + Report the version of something. + + key -- The name of what you are reporting. + value -- The value of what you are reporting. + versionType -- The method of categorizing what is being reported. See VersionTypes for details. + """ + if key in self.Versions: + if self.Versions[key]["version"] == value: + self._logger.warning("version_aggregator: This {0}:{1} key/value pair " + "was already registered".format(key, value)) + else: + error = "version_aggregator: {0} key registered with a different value\n\t" \ + "Old:{1}\n\tNew:{2}".format(key, self.Versions[key]["version"], value) + self._logger.error(error) + raise Exception(error) + return + + self.Versions[key] = { + "name": key, + "version": value, + "type": versionType.name + } + self._logger.debug("version_aggregator logging version: {0}".format(str(self.Versions[key]))) + + def GetAggregatedVersionInformation(self): + """ + Returns a copy of the aggregated information. + """ + return copy.deepcopy(self.Versions) + + +class VersionTypes(Enum): + """ + COMMIT is for the commit hash of a repository. + BINARY is for a pre-packaged binary that is distributed with a version number. + TOOL is for recording the version number of a tool that was used during the build process. + INFO is for recording miscellanious information. + """ + TOOL = 1 + COMMIT = 2 + BINARY = 3 + INFO = 4 + + +def GetVersionAggregator(): + """ + Returns a singleton instance of this class for global use. + """ + global VERSION_AGGREGATOR + + if VERSION_AGGREGATOR is None: + logging.debug("Setting up version aggregator") + VERSION_AGGREGATOR = version_aggregator() + + return VERSION_AGGREGATOR diff --git a/edk2toolext/invocables/__init__.py b/edk2toolext/invocables/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/invocables/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/invocables/edk2_ci_build.py b/edk2toolext/invocables/edk2_ci_build.py new file mode 100644 index 00000000..dc86ee0c --- /dev/null +++ b/edk2toolext/invocables/edk2_ci_build.py @@ -0,0 +1,310 @@ +# @file Edk2CiBuild.py +# This module contains code that supports CI/CD +# This is the main entry for the build and test process +# of Non-Product builds +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import sys +import logging +import yaml +import traceback +from edk2toollib.uefi.edk2.path_utilities import Edk2Path +from edk2toollib.log.junit_report_format import JunitTestReport +from edk2toolext.edk2_invocable import Edk2Invocable +from edk2toolext.environment import self_describing_environment +from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin +# from edk2toolext.environment import plugin_manager +from edk2toolext.environment import shell_environment +from edk2toolext import edk2_logging +from edk2toolext import config_validator +# import pkg_resources + + +class CiBuildSettingsManager(): + ''' Platform settings will be accessed through this implementation. ''' + + def GetActiveScopes(self): + ''' get scope ''' + raise NotImplementedError() + + def GetDependencies(self): + pass + + def GetPackages(self): + pass + + def GetPackagesPath(self): + pass + + def GetArchSupported(self): + raise NotImplementedError() + + def GetTargetsSupported(self): + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' get WorkspacePath ''' + raise NotImplementedError() + + def GetGroupName(self): + ''' ''' + raise NotImplementedError() + + def GetName(self): + ''' ''' + raise NotImplementedError() + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self, args): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + def GetPluginSettings(self): + ''' Implement in subclass to pass dictionary of settings for individual plugins ''' + return {} + + +def merge_config(config, pkg_config, descriptor={}): + plugin_name = "" + config = dict() + if "module" in descriptor: + plugin_name = descriptor["module"] + if "config_name" in descriptor: + plugin_name = descriptor["config_name"] + + if plugin_name == "": + return config + + if plugin_name in config: + config.update(config[plugin_name]) + + if plugin_name in pkg_config: + config.update(pkg_config[plugin_name]) + + return config + + +class Edk2CiBuild(Edk2Invocable): + def GetLoggingLevel(self, loggerType): + ''' Get the logging level for a given type + base == lowest logging level supported + con == Screen logging + txt == plain text file logging + md == markdown file logging + ''' + if(loggerType == "con") and not self.Verbose: + return logging.WARNING + return logging.DEBUG + + def AddCommandLineOptions(self, parser): + + parser.add_argument('-p', '--pkg', '--pkg-dir', dest='packageList', nargs="+", type=str, + help='A package or folder you want to test (abs path or cwd relative). ' + 'Can list multiple by doing -p ', default=[]) + + def RetrieveCommandLineOptions(self, args): + ''' Retrieve command line options from the argparser ''' + self.packageList = args.packageList + + def GetSettingsClass(self): + return CiBuildSettingsManager + + def GetLoggingFileName(self, loggerType): + return "CI_BUILDLOG" + + def Go(self): + log_directory = os.path.join(self.GetWorkspaceRoot(), self.GetLoggingFolderRelativeToRoot()) + + # SET PACKAGE PATH + # + # Get Package Path from config file + pplist = self.PlatformSettings.GetPackagesPath() if self.PlatformSettings.GetPackagesPath() else [] + + # Check Dependencies for Repo + for dependency in self.PlatformSettings.GetDependencies(): + pplist.append(dependency["Path"]) + + # make Edk2Path object to handle all path operations + try: + edk2path = Edk2Path(self.GetWorkspaceRoot(), pplist) + except Exception as e: + logging.error("You need to run stewart_ci_setup to resolve all repos.") + raise e + + logging.info(f"Running CI Build: {self.PlatformSettings.GetName()}") + logging.info(f"WorkSpace: {self.GetWorkspaceRoot()}") + logging.info(f"Package Path: {self.PlatformSettings.GetPackagesPath()}") + # logging.info("mu_build version: {0}".format(pkg_resources.get_distribution("mu_build").version)) + # logging.info("mu_python_library version: " + pkg_resources.get_distribution("mu_python_library").version) + # logging.info("mu_environment version: " + pkg_resources.get_distribution("mu_environment").version) + # Bring up the common minimum environment. + logging.log(edk2_logging.SECTION, "Getting Enviroment") + (build_env, shell_env) = self_describing_environment.BootstrapEnvironment( + self.GetWorkspaceRoot(), self.GetActiveScopes()) + env = shell_environment.GetBuildVars() + + # Bind our current execution environment into the shell vars. + ph = os.path.dirname(sys.executable) + if " " in ph: + ph = '"' + ph + '"' + shell_env.set_shell_var("PYTHON_HOME", ph) + # PYTHON_COMMAND is required to be set for using edk2 python builds. + # todo: work with edk2 to remove the bat file and move to native python calls + pc = sys.executable + if " " in pc: + pc = '"' + pc + '"' + shell_env.set_shell_var("PYTHON_COMMAND", pc) + + archSupported = " ".join(self.PlatformSettings.GetArchSupported()) + env.SetValue("TARGET_ARCH", archSupported, "from PlatformSettings.GetArchSupported()") + + _targets = " ".join(self.PlatformSettings.GetTargetsSupported()) + + # Generate consumable XML object- junit format + JunitReport = JunitTestReport() + + # Keep track of failures + failure_num = 0 + total_num = 0 + + # Load plugins + logging.log(edk2_logging.SECTION, "Loading plugins") + + pluginList = self.plugin_manager.GetPluginsOfClass(ICiBuildPlugin) + if len(self.packageList) == 0: + self.packageList.extend(self.PlatformSettings.GetPackages()) + + for pkgToRunOn in self.packageList: + # + # run all loaded Edk2CiBuild Plugins/Tests + # + logging.log(edk2_logging.SECTION, f"Building {pkgToRunOn} Package") + logging.info(f"Running on Package: {pkgToRunOn}") + ts = JunitReport.create_new_testsuite(pkgToRunOn, + f"Edk2CiBuild.{self.PlatformSettings.GetGroupName}.{pkgToRunOn}") + packagebuildlog_path = os.path.join(log_directory, pkgToRunOn) + _, txthandle = edk2_logging.setup_txt_logger( + packagebuildlog_path, f"BUILDLOG_{pkgToRunOn}", logging_level=logging.DEBUG, isVerbose=True) + _, mdhandle = edk2_logging.setup_markdown_logger( + packagebuildlog_path, f"BUILDLOG_{pkgToRunOn}", logging_level=logging.DEBUG, isVerbose=True) + loghandle = [txthandle, mdhandle] + shell_environment.CheckpointBuildVars() + env = shell_environment.GetBuildVars() + + # load the package level .mu.json + pkg_config_file = edk2path.GetAbsolutePathOnThisSytemFromEdk2RelativePath( + os.path.join(pkgToRunOn, pkgToRunOn + ".mu.yaml")) + if(pkg_config_file): + with open(pkg_config_file, 'r') as f: + pkg_config = yaml.safe_load(f) + else: + logging.info(f"No Pkg Config file for {pkgToRunOn}") + pkg_config = dict() + + # check the resulting configuration + config_validator.check_package_confg(pkgToRunOn, pkg_config, pluginList) + + # get all the defines from the package configuration + if "Defines" in pkg_config: + for definition_key in pkg_config["Defines"]: + definition = pkg_config["Defines"][definition_key] + env.SetValue(definition_key, definition, "Edk2CiBuild.py from PkgConfig yaml", False) + + for Descriptor in pluginList: + # Get our targets + targets = ["DEBUG"] + if Descriptor.Obj.IsTargetDependent() and _targets: + targets = self.PlatformSettings.GetTargetsSupported() + + for target in targets: + edk2_logging.log_progress(f"--Running {pkgToRunOn}: {Descriptor.Name} {target} --") + total_num += 1 + shell_environment.CheckpointBuildVars() + env = shell_environment.GetBuildVars() + + env.SetValue("TARGET", target, "Edk2CiBuild.py before RunBuildPlugin") + (testcasename, testclassname) = Descriptor.Obj.GetTestName(pkgToRunOn, env) + tc = ts.create_new_testcase(testcasename, testclassname) + + # create the stream for the build log + plugin_output_stream = edk2_logging.create_output_stream() + + # merge the repo level and package level for this specific plugin + pkg_plugin_configuration = merge_config(self.PlatformSettings.GetPluginSettings(), + pkg_config, Descriptor.descriptor) + + # perhaps we should ask the validator to run on the package for this target + + # Still need to see if the package decided this should be skipped + if pkg_plugin_configuration is None or\ + "skip" in pkg_plugin_configuration and pkg_plugin_configuration["skip"]: + tc.SetSkipped() + edk2_logging.log_progress("--->Test Skipped by package! %s" % Descriptor.Name) + + else: + try: + # - package is the edk2 path to package. This means workspace/packagepath relative. + # - edk2path object configured with workspace and packages path + # - any additional command line args + # - RepoConfig Object (dict) for the build + # - PkgConfig Object (dict) + # - EnvConfig Object + # - Plugin Manager Instance + # - Plugin Helper Obj Instance + # - testcase Object used for outputing junit results + # - output_stream the StringIO output stream from this plugin + rc = Descriptor.Obj.RunBuildPlugin(pkgToRunOn, edk2path, pkg_plugin_configuration, + env, self.plugin_manager, self.helper, + tc, plugin_output_stream) + except Exception as exp: + exc_type, exc_value, exc_traceback = sys.exc_info() + logging.critical("EXCEPTION: {0}".format(exp)) + exceptionPrint = traceback.format_exception(type(exp), exp, exc_traceback) + logging.critical(" ".join(exceptionPrint)) + tc.SetError("Exception: {0}".format( + exp), "UNEXPECTED EXCEPTION") + rc = 1 + + if(rc != 0): + failure_num += 1 + if(rc is None): + logging.error( + "--->Test Failed: %s returned NoneType" % Descriptor.Name) + else: + logging.error( + "--->Test Failed: %s returned %d" % (Descriptor.Name, rc)) + else: + edk2_logging.log_progress(f"--->Test Success {Descriptor.Name} {target}") + + # revert to the checkpoint we created previously + shell_environment.RevertBuildVars() + # remove the logger + edk2_logging.remove_output_stream(plugin_output_stream) + # finished target loop + # Finished plugin loop + + edk2_logging.stop_logging(loghandle) # stop the logging for this particular buildfile + shell_environment.RevertBuildVars() + # Finished buildable file loop + + JunitReport.Output(os.path.join(self.GetWorkspaceRoot(), "Build", "TestSuites.xml")) + + # Print Overall Success + if(failure_num != 0): + logging.error("Overall Build Status: Error") + edk2_logging.log_progress(f"There were {failure_num} failures out of {total_num} attempts") + else: + edk2_logging.log_progress("Overall Build Status: Success") + + return failure_num + + +def main(): + Edk2CiBuild().Invoke() diff --git a/edk2toolext/invocables/edk2_ci_setup.py b/edk2toolext/invocables/edk2_ci_setup.py new file mode 100644 index 00000000..7ced2934 --- /dev/null +++ b/edk2toolext/invocables/edk2_ci_setup.py @@ -0,0 +1,109 @@ +# @file edk2_ci_setup.py +# Resolves all dependent repos for a CI environment. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import logging + +from edk2toolext.edk2_invocable import Edk2Invocable +from edk2toolext.environment import repo_resolver + + +class CiSetupSettingsManager(): + ''' Platform settings will be accessed through this implementation. ''' + + def GetDependencies(self): + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' get WorkspacePath ''' + raise NotImplementedError() + + def GetOmnicachePath(self): + ''' Optionally point to omnicache path ''' + pass + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self, args): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + +def merge_config(mu_config, pkg_config, descriptor={}): + plugin_name = "" + config = dict() + if "module" in descriptor: + plugin_name = descriptor["module"] + if "config_name" in descriptor: + plugin_name = descriptor["config_name"] + + if plugin_name == "": + return config + + if plugin_name in mu_config: + config.update(mu_config[plugin_name]) + + if plugin_name in pkg_config: + config.update(pkg_config[plugin_name]) + + return config + + +class Edk2CiBuildSetup(Edk2Invocable): + + def AddCommandLineOptions(self, parser): + parser.add_argument('-ignore', '--ignore-git', dest="git_ignore", action="store_true", + help="Whether to ignore errors in the git cloing process", default=False) + parser.add_argument('--omnicache', '--reference', dest='omnicache_path', + default=os.environ.get('OMNICACHE_PATH')) + parser.add_argument('-force', '--force-git', dest="git_force", action="store_true", + help="Whether to force git repos to clone in the git cloing process", default=False) + parser.add_argument('-update-git', '--update-git', dest="git_update", action="store_true", + help="Whether to update git repos as needed in the git cloing process", default=False) + + def GetVerifyCheckRequired(self): + ''' Will not verify environemnt ''' + return False + + def RetrieveCommandLineOptions(self, args): + ''' Retrieve command line options from the argparser ''' + self.git_ignore = args.git_ignore + self.omnicache_path = args.omnicache_path + self.git_force = args.git_force + self.git_update = args.git_update + + def GetSettingsClass(self): + return CiSetupSettingsManager + + def GetLoggingFileName(self, loggerType): + return "CISETUP" + + # get_mu_config + def Go(self): + # Parse command line arguments + + omnicache_path = self.omnicache_path + try: + omnicache_path = self.PlatformSettings.GetOmnicachePath() + except: + pass + + ret = repo_resolver.resolve_all(self.GetWorkspaceRoot(), + self.PlatformSettings.GetDependencies(), + ignore=self.git_ignore, force=self.git_force, + update_ok=self.git_update, omnicache_dir=omnicache_path) + + logging.info(f"Repo resolver resolved {ret}") + + return 0 + + +def main(): + Edk2CiBuildSetup().Invoke() diff --git a/edk2toolext/invocables/edk2_platform_build.py b/edk2toolext/invocables/edk2_platform_build.py new file mode 100644 index 00000000..955f1c9c --- /dev/null +++ b/edk2toolext/invocables/edk2_platform_build.py @@ -0,0 +1,136 @@ +# @file edk2_platform_build +# Invocable classs that does a build. +# Needs a child of UefiBuilder for pre/post build steps. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import sys +import logging +import pkg_resources +from edk2toolext import edk2_logging +from edk2toolext.environment import plugin_manager +from edk2toolext.environment.plugintypes.uefi_helper_plugin import HelperFunctions +from edk2toolext.environment import version_aggregator +from edk2toolext.environment import self_describing_environment +from edk2toolext.environment.uefi_build import UefiBuilder +from edk2toolext.edk2_invocable import Edk2Invocable +from edk2toollib.utility_functions import locate_class_in_module + +PIP_PACKAGES_LIST = ["edk2-pytool-library", "edk2-pytool-extensions", "PyYaml"] + + +class BuildSettingsManager(): + ''' Platform settings will be accessed through this implementation. ''' + + def GetActiveScopes(self): + ''' get scope ''' + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' get WorkspacePath ''' + raise NotImplementedError() + + def GetModulePkgsPath(self): + raise NotImplementedError() + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + +# +# Pass in a list of pip package names and they will be printed as well as +# reported to the global version_aggregator +def display_pip_package_info(package_list): + for package in package_list: + version = pkg_resources.get_distribution(package).version + logging.info("{0} version: {1}".format(package, version)) + version_aggregator.GetVersionAggregator().ReportVersion(package, version, version_aggregator.VersionTypes.TOOL) + + +class Edk2PlatformBuild(Edk2Invocable): + ''' Imports UefiBuilder and calls go ''' + + def AddCommandLineOptions(self, parserObj): + ''' adds command line options to the argparser ''' + + # PlatformSettings could also be a subclass of UefiBuilder, who knows! + if isinstance(self.PlatformSettings, UefiBuilder): + self.PlatformBuilder = self.PlatformSettings + else: + try: + # if it's not, we will try to find it in the module that was originally provided. + self.PlatformBuilder = locate_class_in_module(self.PlatformModule, UefiBuilder)() + except (TypeError): + raise RuntimeError(f"UefiBuild not found in module:\n{dir(self.PlatformModule)}") + + # If PlatformBuilder and PlatformSettings are seperate, give CommandLineOptions to PlatformBuilder + if self.PlatformBuilder is not self.PlatformSettings: + self.PlatformBuilder.AddCommandLineOptions(parserObj) + + def RetrieveCommandLineOptions(self, args): + ''' Retrieve command line options from the argparser ''' + + # If PlatformBuilder and PlatformSettings are seperate, give args to PlatformBuilder + if self.PlatformBuilder is not self.PlatformSettings: + self.PlatformBuilder.RetrieveCommandLineOptions(args) + + def GetSettingsClass(self): + ''' Providing BuildSettingsManager ''' + return BuildSettingsManager + + def GetLoggingFileName(self, loggerType): + return "BUILDLOG" + + def Go(self): + logging.info("Running Python version: " + str(sys.version_info)) + + display_pip_package_info(PIP_PACKAGES_LIST) + + (build_env, shell_env) = self_describing_environment.BootstrapEnvironment( + self.GetWorkspaceRoot(), self.GetActiveScopes()) + + # Bind our current execution environment into the shell vars. + ph = os.path.dirname(sys.executable) + if " " in ph: + ph = '"' + ph + '"' + shell_env.set_shell_var("PYTHON_HOME", ph) + # PYTHON_COMMAND is required to be set for using edk2 python builds. + # todo: work with edk2 to remove the bat file and move to native python calls + pc = sys.executable + if " " in pc: + pc = '"' + pc + '"' + shell_env.set_shell_var("PYTHON_COMMAND", pc) + + # Load plugins + logging.log(edk2_logging.SECTION, "Loading Plugins") + pm = plugin_manager.PluginManager() + failedPlugins = pm.SetListOfEnvironmentDescriptors( + build_env.plugins) + if failedPlugins: + logging.critical("One or more plugins failed to load. Halting build.") + for a in failedPlugins: + logging.error("Failed Plugin: {0}".format(a["name"])) + raise Exception("One or more plugins failed to load.") + + helper = HelperFunctions() + if(helper.LoadFromPluginManager(pm) > 0): + raise Exception("One or more helper plugins failed to load.") + # + # Now we can actually kick off a build. + # + logging.log(edk2_logging.SECTION, "Kicking off build") + return self.PlatformBuilder.Go(self.GetWorkspaceRoot(), + self.PlatformSettings.GetModulePkgsPath(), + helper, pm) + + +def main(): + Edk2PlatformBuild().Invoke() diff --git a/edk2toolext/invocables/edk2_setup.py b/edk2toolext/invocables/edk2_setup.py new file mode 100644 index 00000000..582b60c6 --- /dev/null +++ b/edk2toolext/invocables/edk2_setup.py @@ -0,0 +1,179 @@ +# @file edk2_setup +# updates submodules listed as REQUIRED_REPOS in Config file. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import logging +from io import StringIO +from edk2toolext import edk2_logging +from edk2toolext.environment import version_aggregator +from edk2toolext.edk2_invocable import Edk2Invocable +from edk2toollib.utility_functions import RunCmd +from edk2toollib.utility_functions import version_compare + + +class SetupSettingsManager(): + ''' Platform settings will be accessed through this implementation. ''' + + def GetActiveScopes(self): + ''' get scope ''' + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' get WorkspacePath ''' + raise NotImplementedError() + + def GetRequiredRepos(self): + ''' get required repos ''' + raise NotImplementedError() + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self, args): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + +class Edk2PlatformSetup(Edk2Invocable): + ''' Updates git submodules listed in required_repos ''' + + def AddCommandLineOptions(self, parserObj): + ''' adds command line options to the argparser ''' + parserObj.add_argument('--force', '--FORCE', '--Force', dest="force", action='store_true', default=False) + parserObj.add_argument('--omnicache', '--OMNICACHE', '--Omnicache', dest='omnicache_path', + default=os.environ.get('OMNICACHE_PATH')) + + def RetrieveCommandLineOptions(self, args): + ''' Retrieve command line options from the argparser ''' + self.force_it = args.force + self.omnicache_path = args.omnicache_path + + def GetVerifyCheckRequired(self): + ''' Will not call self_describing_environment.VerifyEnvironment because it hasn't been set up yet ''' + return False + + def GetSettingsClass(self): + ''' Providing SetupSettingsManager ''' + return SetupSettingsManager + + def GetLoggingFileName(self, loggerType): + return "SETUPLOG" + + def Go(self): + required_repos = self.PlatformSettings.GetRequiredRepos() + workspace_path = self.GetWorkspaceRoot() + # Make sure git is installed + return_buffer = StringIO() + RunCmd("git", "--version", outstream=return_buffer, raise_exception_on_nonzero=True) + git_version = return_buffer.getvalue().strip() + return_buffer.close() + version_aggregator.GetVersionAggregator().ReportVersion("Git", + git_version, + version_aggregator.VersionTypes.TOOL) + min_git = "2.11.0" + # This code is highly specific to the return value of "git version"... + cur_git = ".".join(git_version.split(' ')[2].split(".")[:3]) + if version_compare(min_git, cur_git) > 0: + raise RuntimeError("Please upgrade Git! Current version is %s. Minimum is %s." % (cur_git, min_git)) + + # Pre-setup cleaning if "--force" is specified. + if self.force_it: + try: + # Clean and reset the main repo. + edk2_logging.log_progress("## Cleaning the root repo...") + RunCmd("git", "reset --hard", workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + RunCmd("git", "clean -xffd", workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + edk2_logging.log_progress("Done.\n") + + # Clean any submodule repos. + if required_repos: + for required_repo in required_repos: + edk2_logging.log_progress("## Cleaning Git repository: %s..." % required_repo) + required_repo_path = os.path.normpath(os.path.join(workspace_path, required_repo)) + RunCmd("git", "reset --hard", workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + RunCmd("git", "clean -xffd", workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + + edk2_logging.log_progress("Done.\n") + + except RuntimeError as e: + logging.error("FAILED!\n") + logging.error("Error while trying to clean the environment!") + logging.error(str(e)) + return + + # Grab the remaining Git repos. + if required_repos: + # Git Repos: STEP 1 -------------------------------------- + # Make sure that the repos are all synced. + try: + edk2_logging.log_progress("## Syncing Git repositories: %s..." % ", ".join(required_repos)) + RunCmd("git", 'submodule sync -- ' + " ".join(required_repos), + workingdir=workspace_path, logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + + edk2_logging.log_progress("Done.\n") + except RuntimeError as e: + logging.error("FAILED!\n") + logging.error("Error while trying to synchronize the environment!") + logging.error(str(e)) + return + + # Git Repos: STEP 2 -------------------------------------- + # Iterate through all repos and see whether they should be fetched. + for required_repo in required_repos: + try: + edk2_logging.log_progress("## Checking Git repository: %s..." % required_repo) + + # Git Repos: STEP 2a --------------------------------- + # Need to determine whether to skip this repo. + required_repo_path = os.path.normpath(os.path.join(workspace_path, required_repo)) + skip_repo = False + # If the repo exists (and we're not forcing things) make + # sure that it's not in a "dirty" state. + if os.path.exists(required_repo_path) and not self.force_it: + return_buffer = StringIO() + RunCmd("git", 'diff ' + required_repo, outstream=return_buffer, workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + git_data = return_buffer.getvalue().strip() + return_buffer.close() + # If anything was returned, we should skip processing the repo. + # It is either on a different commit or it has local changes. + if git_data != "": + logging.info("-- NOTE: Repo currently exists and appears to have local changes!") + logging.info("-- Skipping fetch!") + skip_repo = True + + # Git Repos: STEP 2b --------------------------------- + # If we're not skipping, grab it. + if not skip_repo or self.force_it: + logging.info("## Fetching repo.") + cmd_string = "submodule update --init --recursive --progress" + if self.omnicache_path is not None: + cmd_string += " --reference " + self.omnicache_path + cmd_string += " " + required_repo + RunCmd('git', cmd_string, workingdir=workspace_path, + logging_level=logging.DEBUG, raise_exception_on_nonzero=True) + + edk2_logging.log_progress("Done.\n") + + except RuntimeError as e: + logging.error("FAILED!\n") + logging.error("Failed to fetch required repository!\n") + logging.error(str(e)) + + return 0 + + # TODO: Install any certs any other things that might be required. + + +def main(): + Edk2PlatformSetup().Invoke() diff --git a/edk2toolext/invocables/edk2_update.py b/edk2toolext/invocables/edk2_update.py new file mode 100644 index 00000000..269f4bd4 --- /dev/null +++ b/edk2toolext/invocables/edk2_update.py @@ -0,0 +1,88 @@ +# @file Edk2Update +# Updates external dependencies for project_scope in workspace_path +# as listed in Platform Config file. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import logging +from edk2toolext import edk2_logging +from edk2toolext.environment import self_describing_environment +from edk2toolext.edk2_invocable import Edk2Invocable + + +class UpdateSettingsManager(): + ''' Platform settings will be accessed through this implementation. ''' + + def GetActiveScopes(self): + ''' get scope ''' + raise NotImplementedError() + + def GetWorkspaceRoot(self): + ''' get WorkspacePath ''' + raise NotImplementedError() + + def AddCommandLineOptions(self, parserObj): + ''' Implement in subclass to add command line options to the argparser ''' + pass + + def RetrieveCommandLineOptions(self, args): + ''' Implement in subclass to retrieve command line options from the argparser ''' + pass + + +def build_env_changed(build_env, build_env_2): + ''' return True if build_env has changed ''' + + return (build_env.paths != build_env_2.paths) or \ + (build_env.extdeps != build_env_2.extdeps) or \ + (build_env.plugins != build_env_2.plugins) + + +class Edk2Update(Edk2Invocable): + ''' Updates dependencies in workspace for active scopes ''' + + def PerformUpdate(self): + (build_env, shell_env) = self_describing_environment.BootstrapEnvironment( + self.GetWorkspaceRoot(), self.GetActiveScopes()) + self_describing_environment.UpdateDependencies(self.GetWorkspaceRoot(), self.GetActiveScopes()) + return (build_env, shell_env) + + def GetVerifyCheckRequired(self): + ''' Will not call self_describing_environment.VerifyEnvironment because ext_deps haven't been unpacked yet ''' + return False + + def GetSettingsClass(self): + ''' Providing UpddateSettingsManger ''' + return UpdateSettingsManager + + def GetLoggingFileName(self, loggerType): + return "UPDATE_LOG" + + def Go(self): + # Get the environment set up. + RetryCount = 0 + logging.log(edk2_logging.SECTION, "First Update") + + (build_env_old, shell_env_old) = self.PerformUpdate() + self_describing_environment.DestroyEnvironment() + + while True: + RetryCount += 1 + logging.log(edk2_logging.SECTION, f"Retry Count: {RetryCount}") + + (build_env, shell_env) = self.PerformUpdate() + + if not build_env_changed(build_env, build_env_old): + break + + build_env_old = build_env + + self_describing_environment.DestroyEnvironment() + + return 0 + + +def main(): + Edk2Update().Invoke() diff --git a/edk2toolext/nuget_publishing.py b/edk2toolext/nuget_publishing.py new file mode 100644 index 00000000..8481c693 --- /dev/null +++ b/edk2toolext/nuget_publishing.py @@ -0,0 +1,491 @@ +# @file NugetPublishing.py +# This tool allows a user to create a configuration for nuget as well as +# pack and push (publishing) a release to a feed. +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import sys +import argparse +import logging +import yaml +import xml.etree.ElementTree as etree +import shutil +import datetime +from io import StringIO +from edk2toolext.NugetDependency import NugetDependency +from edk2toollib.utility_functions import RunCmd + +OPEN_SOURCE_INITIATIVE_URL = "https://opensource.org/licenses/" +LICENSE_TYPE_SUPPORTED = { + "BSD2": OPEN_SOURCE_INITIATIVE_URL + "BSD-2-Clause", + "BSD3": OPEN_SOURCE_INITIATIVE_URL + "BSD-3-Clause", + "APACHE2": OPEN_SOURCE_INITIATIVE_URL + "Apache-2.0", + "MSPL": OPEN_SOURCE_INITIATIVE_URL + "MS-PL", # Microsoft Public License + "MIT": OPEN_SOURCE_INITIATIVE_URL + "MIT", + "BSDpP": OPEN_SOURCE_INITIATIVE_URL + "BSDplusPatent", # BSD + Patent +} + + +class NugetSupport(object): + # NOTE: This *should* have a namespace (http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd) + # but ElementTree is incredibly stupid with namespaces. + NUSPEC_TEMPLATE_XML = r''' + + + + + + + + + + + + + + + + + + +''' + + RELEASE_NOTE_SHORT_STRING_MAX_LENGTH = 500 + + # + # constructor that creates the NugetSupport object + # for new instances without existing config provide the Name parameter. + # for creating instance based on config file provide the path to the ConfigFile + # + def __init__(self, Name=None, ConfigFile=None): + self.Name = Name + self.TempFileToDelete = [] # everytime a temp is created add to list to cleanup + self.NewVersion = None + self.ConfigChanged = False + + if(ConfigFile is not None): + self.FromConfigfile(ConfigFile) + self.Name = self.ConfigData["name"] + else: + if(Name is None): + raise Exception("Cont contruct object with both Name and ConfigFile as None") + self.ConfigData = {"name": Name} + self.Config = None + + def CleanUp(self): + logging.debug("CleanUp Called. Deleting all Temp Files") + for a in self.TempFileToDelete: + os.remove(a) + + # + # Save the object config contents to file + # + def ToConfigFile(self, filepath=None): + if(not self.ConfigChanged): + logging.debug("No Config Changes. Skip Writing config file") + return 0 + + if(filepath is None and self.Config is None): + logging.error("No Config File to save to.") + return -1 + + if(filepath is not None): + self.Config = filepath + + with open(filepath, "w") as c: + yaml.dump(self.ConfigData, c, indent=4) + logging.debug("Wrote config file to: %s" % filepath) + self.ConfigChanged = False + return 0 + + def FromConfigfile(self, filepath): + self.Config = filepath + with open(self.Config, "r") as c: + self.ConfigData = yaml.safe_load(c) + + def SetBasicData(self, authors, license, project, description, server, copyright): + self.ConfigData["author_string"] = authors + self.ConfigData["license_url"] = license + self.ConfigData["project_url"] = project + self.ConfigData["description_string"] = description + self.ConfigData["server_url"] = server + + if not copyright: + copyright = "Copyright %d" % datetime.date.today().year + self.ConfigData["copyright_string"] = copyright + + self.ConfigChanged = True + + def UpdateCopyright(self, copyright): + self.ConfigData["copyright_string"] = copyright + self.ConfigChanged = True + + # + # Print info about this object + # + # + def Print(self): + print("=======================================") + print(" Name: " + self.Name) + if(self.Config): + print(" ConfigFile: " + self.Config) + else: + print(" ConfigFile: NOT SET") + + for k, v in self.ConfigData.items(): + print(" %s: %s" % (k, v)) + + print("----------------------------------------") + print(" Temp Files List: ") + for a in self.TempFileToDelete: + print(" " + a) + print("-----------------------------------------") + print("=======================================") + + def LogObject(self): + logging.debug("=======================================") + logging.debug(" Name: " + self.Name) + if(self.Config): + logging.debug(" ConfigFile: " + self.Config) + else: + logging.debug(" ConfigFile: NOT SET") + + for k, v in self.ConfigData.items(): + logging.debug(" %s: %s" % (k, v)) + + logging.debug("----------------------------------------") + logging.debug(" Temp Files List: ") + for a in self.TempFileToDelete: + logging.debug(" " + a) + logging.debug("-----------------------------------------") + logging.debug("=======================================") + + # + # given NugetSupport object + # create a nuspec file for packing + # + + def _MakeNuspecXml(self, ContentDir, ReleaseNotesText=None): + package = etree.fromstring(NugetSupport.NUSPEC_TEMPLATE_XML) + meta = package.find("./metadata") + meta.find("id").text = self.Name + meta.find("version").text = self.NewVersion + meta.find("authors").text = self.ConfigData["author_string"] + meta.find("licenseUrl").text = self.ConfigData["license_url"] + meta.find("projectUrl").text = self.ConfigData["project_url"] + meta.find("description").text = self.ConfigData["description_string"] + meta.find("copyright").text = self.ConfigData["copyright_string"] + files = package.find("files") + f = files.find("file") + f.set("target", self.Name) + f.set("src", ContentDir + "\\**\\*") + + if(ReleaseNotesText is not None): + logging.debug("Make Nuspec Xml - ReleaseNotesText is not none.") + # + # Make sure it doesn't exceed reasonable length of string + # + if(len(ReleaseNotesText) > NugetSupport.RELEASE_NOTE_SHORT_STRING_MAX_LENGTH): + logging.info("Make Nuspec Xml - ReleaseNotesText too long. Length is (%d)" % len(ReleaseNotesText)) + logging.debug("Original ReleaseNotesText is: %s" % ReleaseNotesText) + # cut it off at max length + ReleaseNotesText = ReleaseNotesText[:NugetSupport.RELEASE_NOTE_SHORT_STRING_MAX_LENGTH] + # walk back to trim at last end of sentence + ReleaseNotesText = ReleaseNotesText.rpartition(".")[0].strip() + logging.debug("New ReleaseNotesText is: %s" % ReleaseNotesText) + + meta.find("releaseNotes").text = ReleaseNotesText + else: + logging.debug("Make Nuspec Xml - ReleaseNotesText None. Removing element from nuspec.") + meta.remove(meta.find("releaseNotes")) + + return etree.tostring(package) + + def _GetNuPkgFileName(self, version): + # Nuget removes leading zeros so to match we must do the same + s = self.Name + "." + parts = version.split(".") + for a in parts: + s += str(int(a)) + "." + + # nuget must have at least x.y.z and will make zero any element undefined + for a in range(len(parts), 3): + s += "0." + + s += "nupkg" + return s + + ## + # Pack the current contents into + # Nupkg + # + def Pack(self, version, OutputDirectory, ContentDir, RelNotesText=None): + self.NewVersion = version + + # content must be absolute path in nuspec otherwise it is assumed + # relative to nuspec file. + cdir = os.path.abspath(ContentDir) + + # make nuspec file + xmlstring = self._MakeNuspecXml(cdir, RelNotesText) + nuspec = os.path.join(OutputDirectory, self.Name + ".nuspec") + self.TempFileToDelete.append(nuspec) + f = open(nuspec, "wb") + f.write(xmlstring) + f.close() + + # run nuget + cmd = NugetDependency.GetNugetCmd() + cmd += ["pack", nuspec] + cmd += ["-OutputDirectory", '"' + OutputDirectory + '"'] + cmd += ["-Verbosity", "detailed"] + # cmd += ["-NonInteractive"] + ret = RunCmd(cmd[0], " ".join(cmd[1:])) + + if(ret != 0): + logging.error("Failed on nuget commend. RC = 0x%x" % ret) + return ret + + self.NuPackageFile = os.path.join(OutputDirectory, self._GetNuPkgFileName(self.NewVersion)) + self.TempFileToDelete.append(self.NuPackageFile) + return ret + + def Push(self, nuPackage, apikey): + if(not os.path.isfile(nuPackage)): + raise Exception("Invalid file path for NuPkg file") + logging.debug("Pushing %s file to server %s" % (nuPackage, self.ConfigData["server_url"])) + + cmd = NugetDependency.GetNugetCmd() + cmd += ["push", nuPackage] + cmd += ["-Verbosity", "detailed"] + # cmd += ["-NonInteractive"] + cmd += ["-Source", self.ConfigData["server_url"]] + cmd += ["-ApiKey", apikey] + output_buffer = StringIO() + ret = RunCmd(cmd[0], " ".join(cmd[1:]), outstream=output_buffer) + + if(ret != 0): + # Rewind the buffer and capture the contents. + output_buffer.seek(0) + output_contents = output_buffer.read() + + # Check for the API message. + if "API key is invalid".lower() in output_contents.lower(): + logging.critical("API key is invalid. Please use --ApiKey to provide a valid key.") + + # Generic error. + logging.error("Failed on nuget commend. RC = 0x%x" % ret) + + return ret + + +def GatherArguments(): + tempparser = argparse.ArgumentParser( + description='Nuget Helper Script for creating, packing, and pushing packages', add_help=False) + tempparser.add_argument('--Operation', dest="op", choices=["New", "Pack", "Push", "PackAndPush"], required=True) + + # Get the operation the user wants to do + (args, rest) = tempparser.parse_known_args() + + # now build up the real parser with required parameters + parser = argparse.ArgumentParser(description='Nuget Helper Script for creating, packing, and pushing packages') + parser.add_argument("--Dirty", dest="Dirty", action="store_true", help="Keep all temp files", default=False) + parser.add_argument('--Operation', dest="Operation", choices=["New", "Pack", "Push", "PackAndPush"], required=True) + parser.add_argument("--OutputLog", dest="OutputLog", help="Create an output log file") + + if(args.op.lower() == "new"): + parser.add_argument("--ConfigFileFolderPath", dest="ConfigFileFolderPath", + help="Path to folder to save new config file to", required=True) + parser.add_argument('--Name', + dest='Name', + help=' The unique id/name of the package. This is a string naming the package', + required=True) + parser.add_argument('--Author', dest="Author", help=" Author string for publishing", required=True) + parser.add_argument("--ProjectUrl", dest="Project", help=" Project Url", required=True) + g = parser.add_mutually_exclusive_group(required=True) + g.add_argument('--CustomLicenseUrl', dest="LicenseUrl", + help=" http url for custom license file. Can use LicenseType for standard licenses") + g.add_argument('--LicenseType', dest="LicenseType", + choices=LICENSE_TYPE_SUPPORTED.keys(), help="Standard Licenses") + parser.add_argument('--Description', dest="Description", + help=" Description of package.", required=True) + parser.add_argument("--FeedUrl", dest="FeedUrl", + help="Feed Url of the nuget server feed", required=True) + parser.add_argument('--Copyright', dest="Copyright", help="Copyright string", required=False) + + elif(args.op.lower() == "pack" or args.op.lower() == "packandpush"): + parser.add_argument("--ConfigFilePath", dest="ConfigFilePath", + help="Path to config file", required=True) + parser.add_argument('--Version', dest="Version", help=" Version to publish", required=True) + parser.add_argument('--ReleaseNotesText', dest="ReleaseNotes", + help="Release Notes String", required=False) + parser.add_argument('--InputFolderPath', dest="InputFolderPath", + help="Relative/Absolute Path to folder containing content to pack.", + required=True) + parser.add_argument('--Copyright', dest="Copyright", help="Change the Copyright string") + parser.add_argument('--ApiKey', dest="ApiKey", + help="Api key to use. Default is 'VSTS' which will invoke interactive login", + default="VSTS") + + elif(args.op.lower() == "push"): + parser.add_argument("--ConfigFilePath", dest="ConfigFilePath", + help="Path to config file", + required=True) + parser.add_argument('--PackageFile', dest="PackageFile", help="Path To Package File", required=True) + parser.add_argument('--ApiKey', dest="ApiKey", + help="Api key to use. Default is 'VSTS' which will invoke interactive login", + default="VSTS") + + if(args.op.lower() == "pack"): + parser.add_argument('--OutputFolderPath', + dest="OutputFolderPath", + help="Output folder where nupkg will be saved. Default is cwd", + default=os.getcwd()) + + return parser.parse_args() + + +def main(): + args = GatherArguments() + ret = 0 + + # setup file based logging if outputReport specified + if(args.OutputLog): + if(len(args.OutputLog) < 2): + logging.critical("the output log file parameter is invalid") + return -2 + + # setup file based logging + filelogger = logging.FileHandler(filename=args.OutputLog, mode='w') + filelogger.setLevel(logging.DEBUG) + logging.getLogger('').addHandler(filelogger) + + logging.info("Log Started: " + datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p")) + + TempOutDir = None + NuPkgFilePath = None + + if(args.Operation.lower() == "new"): + logging.critical("Generating new nuget configuration...") + logging.debug("Checking input parameters for new") + ConfigFilePath = os.path.join(args.ConfigFileFolderPath, args.Name.strip() + ".config.yaml") + + if(not os.path.isdir(args.ConfigFileFolderPath)): + logging.critical("Config File Folder Path doesn't exist. %s" % args.ConfigFileFolderPath) + raise Exception("Invalid Config File Folder. Doesn't exist") + + if(os.path.isfile(ConfigFilePath)): + logging.critical("Config File already exists at that path. %s" % ConfigFilePath) + raise Exception("Can't Create New Config file when file already exists") + + nu = NugetSupport(Name=args.Name) + + # license + lurl = args.LicenseUrl + if(args.LicenseType is not None): + lurl = LICENSE_TYPE_SUPPORTED[args.LicenseType] + nu.SetBasicData(args.Author, lurl, args.Project, args.Description, args.FeedUrl, args.Copyright) + nu.LogObject() + ret = nu.ToConfigFile(ConfigFilePath) + return ret + + elif(args.Operation.lower() == "pack" or args.Operation.lower() == "packandpush"): + logging.critical("Creating nuget package") + logging.debug("Checking input parameters for packing") + # check args + if(not os.path.isfile(args.ConfigFilePath)): + logging.critical("Invalid Config File (%s). File doesn't exist" % args.ConfigFilePath) + raise Exception("Invalid Config File. File doesn't exist") + if(not os.path.isdir(args.InputFolderPath)): + logging.critical("Invalid Input folder (%s). Folder doesn't exist" % args.InputFolderPath) + raise Exception("Invalid Input folder. folder doesn't exist") + contents = os.listdir(args.InputFolderPath) + logging.debug("Input Folder contains %d files" % len(contents)) + if(len(contents) == 0): + logging.critical("No binary contents to pack in %s" % args.InputFolderPath) + raise Exception("No binary contents to package") + + # make a temp dir for the pack operation which actually creates files + TempOutDir = os.path.join(os.getcwd(), "_TEMP_" + str(datetime.datetime.now().time()).replace(":", "_")) + os.mkdir(TempOutDir) + + nu = NugetSupport(ConfigFile=args.ConfigFilePath) + if(args.Copyright is not None): + nu.UpdateCopyright(args.Copyright) + ret = nu.ToConfigFile() + if (ret != 0): + logging.error("Failed to save config file. Return Code 0x%x" % ret) + return ret + + ret = nu.Pack(args.Version, TempOutDir, args.InputFolderPath, args.ReleaseNotes) + if (ret != 0): + logging.error("Failed to pack. Return Code 0x%x" % ret) + return ret + + NuPkgFilePath = nu.NuPackageFile + + if(args.Operation.lower() == "pack"): + if(not os.path.isdir(args.OutputFolderPath)): + logging.critical("Invalid Pack Output Folder (%s). Folder doesn't exist" % args.OutputFolderPath) + raise Exception("Invalid Output folder. folder doesn't exist") + # since it is pack only lets copy nupkg file to output + shutil.copyfile(NuPkgFilePath, os.path.join(args.OutputFolderPath, os.path.basename(NuPkgFilePath))) + NuPkgFilePath = os.path.join(args.OutputFolderPath, os.path.basename(NuPkgFilePath)) + + if(args.Operation.lower() == "push"): + # set the parameters for push + logging.debug("Checking input parameters for push") + # check args + if(not os.path.isfile(args.ConfigFilePath)): + logging.critical("Invalid Config File (%s). File doesn't exist" % args.ConfigFilePath) + raise Exception("Invalid Config File. File doesn't exist") + NuPkgFilePath = args.PackageFile + nu = NugetSupport(ConfigFile=args.ConfigFilePath) + + if(args.Operation.lower() == "push" or args.Operation.lower() == "packandpush"): + # do the pushing + logging.critical("Pushing the package") + logging.debug("NuPkgFilePath is %s" % NuPkgFilePath) + # check args + if(not os.path.isfile(NuPkgFilePath)): + logging.critical("NuPkgFilePath is not valid file. %s" % NuPkgFilePath) + raise Exception("Invalid Pkg File. File doesn't exist") + ret = nu.Push(NuPkgFilePath, args.ApiKey) + + nu.LogObject() + nu.ToConfigFile(args.ConfigFilePath) # save any changes + if(not args.Dirty): + nu.CleanUp() + if(TempOutDir is not None): + os.removedirs(TempOutDir) + return ret + + +# the main method +def go(): + # setup main console as logger + logger = logging.getLogger('') + logger.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(levelname)s - %(message)s") + console = logging.StreamHandler() + console.setLevel(logging.CRITICAL) + console.setFormatter(formatter) + logger.addHandler(console) + + # call main worker function + retcode = main() + + if retcode != 0: + logging.critical("Failed. Return Code: %d" % retcode) + else: + logging.critical("Success!") + # end logging + logging.shutdown() + sys.exit(retcode) + + +if __name__ == '__main__': + go() diff --git a/edk2toolext/omnicache.py b/edk2toolext/omnicache.py new file mode 100644 index 00000000..4fe5609e --- /dev/null +++ b/edk2toolext/omnicache.py @@ -0,0 +1,420 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import sys +import logging +import argparse +import datetime +import yaml +from io import StringIO + +from edk2toolext import edk2_logging +from edk2toollib import utility_functions +from edk2toolext.edk2_git import Repo + + +class OmniCacheConfig(): + ''' + class to manage the Internal Omnicache config file. + Load, Save, Version check, etc. + ''' + + CONFIG_VERSION = 1 + + def __init__(self, absfilepath): + self.version = OmniCacheConfig.CONFIG_VERSION + self.filepath = absfilepath + self.last_change = datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p") + if os.path.isfile(self.filepath): + self._Load() + else: + self.remotes = {} + + def _Load(self): + with open(self.filepath) as ymlfile: + content = yaml.safe_load(ymlfile) + + if "version" not in content: + raise Exception("Unsupported Config Version (None)") + elif content["version"] == self.version: + # parse yml into config data + self.remotes = {x["name"]: x for x in content["remotes"]} + self.last_change = content["last_change"] + else: + self._Transition(content) + + def Save(self): + data = {"version": self.version, "remotes": list(self.remotes.values()), + "last_change": datetime.datetime.strftime(datetime.datetime.now(), + "%A, %B %d, %Y %I:%M%p")} + with open(self.filepath, 'w') as outfile: + yaml.dump(data, outfile, default_flow_style=False) + + def _Transition(self, data): + # Add code here to move old config data to new format + raise Exception("Unsupported config data") + + def Log(self, level=logging.DEBUG): + logging.log(level, "OmniCache Config") + logging.log(level, " Filepath: {0}".format(self.filepath)) + logging.log(level, " Version: {%d}", self.version) + logging.log(level, " Remotes({%d})", len(self.remotes)) + for remote in self.remotes.values(): + rstring = "Name: {0} Url: {1} TagSync: {2}".format(remote["name"], remote["url"], ("tag" in remote)) + logging.log(level, " " + rstring) + + def Add(self, name, url, tags=False): + # check if this already exists + if self.Contains_url(url): + logging.warning("Skipping add this entry %s %s" % (name, url)) + return + # if the name already exists, we overwrite it + remote = {"name": name, "url": url} + if tags: + remote["tag"] = True + self.remotes[name] = remote + + def Contains_url(self, url): + for x in self.remotes.values(): + if x["url"] == url: + return True + return False + + def Contains_name(self, name): + for x in self.remotes.values(): + if x["name"] == name: + return True + return False + + def Remove(self, del_name): + del self.remotes[del_name] + + def Contains(self, name): + return name in self.remotes + + +OMNICACHE_VERSION = "0.9" +OMNICACHE_FILENAME = "omnicache.yaml" + + +def CommonFilePathHandler(path): + ''' + function to check for absolute path and if not + concat with current dir and return absolute real path + ''' + if not os.path.isabs(path): + path = os.path.join(os.getcwd(), path) + path = os.path.realpath(path) + return path + + +def AddEntriesFromConfig(config, input_config_file): + ''' + Add config entries found in the config file + to the omnicache. Entries already in omnicache + with the same name will be updated. + + return + the number of entries added to cache + ''' + + count = 0 + with open(input_config_file) as ymlfile: + content = yaml.safe_load(ymlfile) + if "remotes" in content: + for remote in content["remotes"]: + if config.Contains_url(remote["url"]): + logging.debug("remote with name: {0} already in cache".format(remote["name"])) + continue + if "tag" in remote: + AddEntry(config, remote["name"], remote["url"], bool(remote["tag"])) + else: + AddEntry(config, remote["name"], remote["url"]) + count += 1 + return (count, content["remotes"]) + + +def InitOmnicache(path): + logging.critical("Initialize Omnicache to {0}".format(path)) + os.makedirs(path) + return utility_functions.RunCmd("git", "--bare init", workingdir=path) + + +def AddEntry(config, name, url, tags=False): + logging.info("Adding remote ({0} : {1}) to Omnicache".format(name, url)) + param = "remote add {0} {1}".format(name, url) + + if config.Contains(name): + logging.info("Updating remote ({0} : {1}) in Omnicache".format(name, url)) + param = "remote set-url {0} {1}".format(name, url) + else: + logging.info("Adding remote ({0} : {1}) to Omnicache".format(name, url)) + param = "remote add {0} {1}".format(name, url) + + if(utility_functions.RunCmd("git", param) == 0): + config.Add(name, url, tags) + else: + logging.error("Failed to add remote for {0}".format(name)) + + +def RemoveEntry(config, name): + logging.info("Removing remote named {0}".format(name)) + param = "remote remove {0}".format(name) + if utility_functions.RunCmd("git", param) == 0: + config.Remove(name) + else: + logging.error("Failed to remove remote for {0}".format(name)) + + +def ConsistencyCheckCacheConfig(config): + ''' + Check the git remote list vs what is in the config file + Add remote to git for anything only in config + Add git remote from git into the config file (tags will be false) + + return + 0: success + non-zero: indicates an error + ''' + + logging.debug("start consistency check between git and omnicache config") + out = StringIO() + param = "remote -v" + gitnames = [] # list of git remote names as found in git repo + gitret = utility_functions.RunCmd("git", param, outstream=out) + + if gitret != 0: + logging.critical("Could not list git remotes") + return gitret + + lines = out.getvalue().split('\n') + out.close() + for line in lines: + line = line.strip() + if len(line) == 0: + # empty line + continue + git = line.split() + gitnames.append(git[0]) # save for later + if(not config.Contains(git[0])): + logging.warning("Found entry in git not in config. Name: {0} Url: {1}".format(git[0], git[1])) + config.Add(git[0], git[1]) + config.Save() + + gitnames = set(gitnames) + for remote in config.remotes.values(): + if(remote["name"] not in gitnames): + logging.warning("Found entry in config not in git. Name: {0} Url: {1}".format(remote["name"], + remote["url"])) + param = "remote add {0} {1}".format(remote["name"], remote["url"]) + utility_functions.RunCmd("git", param) + + return 0 + + +def FetchEntry(name, tags=False): + ''' + do git operation to fetch a single entry + + return + 0: success + non-zero: git command line error + ''' + + param = "fetch {0}".format(name) + if not tags: + param += " --no-tags" + else: + param += " --tags" + # might want to look at something more complex to avoid tag conflicts + # https://stackoverflow.com/questions/22108391/git-checkout-a-remote-tag-when-two-remotes-have-the-same-tag-name + # param += "+refs/heads/:refs/remotes/{0}/ +refs/tags/:refs/rtags/{0}/".format(name) + return utility_functions.RunCmd("git", param) + + +def get_cli_options(): + parser = argparse.ArgumentParser(description='Tool to provide easy method create and manage the OMNICACHE', ) + parser.add_argument(dest="cache_dir", help="path to an existing or desired OMNICACHE directory") + parser.add_argument("--scan", dest="scan", default=None, + help="Scans the path provided for top-level folders with repos to add to the OMNICACHE") + parser.add_argument("--new", dest="new", help="Initialize the OMNICACHE. MUST NOT EXIST", + action="store_true", default=False) + parser.add_argument("--init", dest="init", help="Initialize the OMNICACHE if it doesn't already exist", + action="store_true", default=False) + parser.add_argument("-l", "--list", dest="list", default=False, action="store_true", + help="List config of OMNICACHE") + parser.add_argument("-a", "--add", dest="add", nargs='*', action="append", + help="Add config entry to OMNICACHE ", + default=[]) + parser.add_argument("-c", "--configfile", dest="input_config_file", default=None, + help="Add new entries from config file to OMNICACHE") + group = parser.add_mutually_exclusive_group() + group.add_argument("-u", "--update", "--fetch", dest="fetch", action="store_true", + help="Update the Omnicache. All cache changes also cause a fetch", default=False) + group.add_argument("--no-fetch", dest="no_fetch", action="store_true", + help="Prevent auto-fetch if implied by other arguments.", default=False) + parser.add_argument("-r", "--remove", dest="remove", nargs="?", action="append", + help="remove config entry from OMNICACHE ", default=[]) + parser.add_argument('--version', action='version', version='%(prog)s ' + OMNICACHE_VERSION) + parser.add_argument("--debug", dest="debug", help="Output all debug messages to console", + action="store_true", default=False) + args = parser.parse_args() + return args + + +def main(): + # setup main console as logger + logger = logging.getLogger('') + logger.setLevel(logging.NOTSET) + console = edk2_logging.setup_console_logging(False) + logger.addHandler(console) + + ErrorCode = 0 + auto_fetch = False + input_config_remotes = None + + # arg parse + args = get_cli_options() + + if args.debug: + console.setLevel(logging.DEBUG) + + logging.info("Log Started: " + datetime.datetime.strftime( + datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p")) + + args.cache_dir = CommonFilePathHandler(args.cache_dir) + logging.debug("OMNICACHE dir: {0}".format(args.cache_dir)) + + # input config file for adding new entries + if args.input_config_file is not None: + args.input_config_file = CommonFilePathHandler(args.input_config_file) + if not os.path.isfile(args.input_config_file): + logging.critical("Invalid -c argument given. File ({0}) isn't valid".format(args.input_config_file)) + return -4 + + logging.debug("Args: " + str(args)) + + omnicache_config = None # config object + omnicache_config_file = os.path.join(args.cache_dir, OMNICACHE_FILENAME) + + if args.new: + if os.path.isdir(args.cache_dir): + logging.critical("--new argument given but OMNICACHE path already exists!") + return -1 + InitOmnicache(args.cache_dir) + auto_fetch = True + + if args.init: + if os.path.isdir(args.cache_dir): + if os.path.isfile(omnicache_config_file): + logging.debug("OMNICACHE already exists. No need to initialize") + else: + InitOmnicache(args.cache_dir) + auto_fetch = True + + # Check to see if exists + if not os.path.isdir(args.cache_dir): + logging.critical("OMNICACHE path invalid.") + return -2 + + # load config + omnicache_config = OmniCacheConfig(omnicache_config_file) + + os.chdir(args.cache_dir) + + if(len(args.add) > 0): + auto_fetch = True + for inputdata in args.add: + if len(inputdata) == 2: + AddEntry(omnicache_config, inputdata[0], inputdata[1]) + elif len(inputdata) == 3: + AddEntry(omnicache_config, inputdata[0], inputdata[1], bool(inputdata[2])) + else: + logging.critical("Invalid Add Entry. Should be ") + return -3 + + if(args.input_config_file is not None): + (count, input_config_remotes) = AddEntriesFromConfig(omnicache_config, args.input_config_file) + if(count > 0): + auto_fetch = True + + if len(args.remove) > 0: + for inputdata in args.remove: + RemoveEntry(omnicache_config, inputdata) + + # if we need to scan + if args.scan is not None: + logging.critical("OMNICACHE is scanning the folder %s.") + if not os.path.isdir(args.scan): + logging.error("Invalid scan directory") + return -4 + reposFound = dict() + # iterate through top level directories + dirs = os.listdir(args.scan) + while len(dirs) > 0: + item = dirs.pop() + itemDir = os.path.join(args.scan, item) + if os.path.isfile(itemDir): + continue + logging.info("Scanning %s for a git repo" % item) + gitDir = os.path.join(itemDir, ".git") + # Check if it's a directory or a file (submodules usually have a file instead of a folder) + if os.path.isdir(gitDir) or os.path.isfile(gitDir): + repo = Repo(itemDir) + if repo.url: + if repo.url not in reposFound: + reposFound[repo.url] = item + else: + logging.warning("Skipping previously found repo at %s with url %s" % (item, repo.url)) + else: # if repo.url is none + logging.error("Url not found for git repo at: %s" % itemDir) + # check for submodules + if repo.submodules: + for submodule in repo.submodules: + dirs.append(os.path.join(item, submodule)) + else: + logging.error("Git repo not found at %s" % itemDir) + # go through all the URL's I found + for url in reposFound: + omnicache_config.Add(reposFound[url], url) + + omnicache_config.Save() + + if(args.fetch or (auto_fetch and not args.no_fetch)): + logging.critical("Updating OMNICACHE") + # as an optimization, if input config file provided, only fetch remotes specified in input config + # otherwise, fetch all remotes in the OmniCache + if (input_config_remotes is not None): + remotes = (x["name"] for x in input_config_remotes) + else: + remotes = omnicache_config.remotes.keys() + for remote in remotes: + ret = FetchEntry(omnicache_config.remotes[remote]["name"], ("tag" in omnicache_config.remotes[remote])) + if(ret != 0) and (ErrorCode == 0): + ErrorCode = ret + + if args.list: + ret = ConsistencyCheckCacheConfig(omnicache_config) + if (ret != 0) and (ErrorCode == 0): + ErrorCode = ret + print("List OMNICACHE content\n") + if len(omnicache_config.remotes) == 0: + logging.warning("No Remotes to show") + + for remote in omnicache_config.remotes.values(): + rstring = "Name: {0}\n Url: {1}\n Sync Tags: {2}".format(remote["name"], remote["url"], ("tag" in remote)) + print(" " + rstring + "\n\n") + + print("To use your OMNICACHE with Project Mu builds set the env variable:") + print("set OMNICACHE_PATH=" + args.cache_dir) + + return ErrorCode + + +if __name__ == '__main__': + retcode = main() + logging.shutdown() + sys.exit(retcode) diff --git a/edk2toolext/tests/__init__.py b/edk2toolext/tests/__init__.py new file mode 100644 index 00000000..0a69010b --- /dev/null +++ b/edk2toolext/tests/__init__.py @@ -0,0 +1,5 @@ +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## diff --git a/edk2toolext/tests/test_config_validator.py b/edk2toolext/tests/test_config_validator.py new file mode 100644 index 00000000..d8b1facd --- /dev/null +++ b/edk2toolext/tests/test_config_validator.py @@ -0,0 +1,204 @@ +## @file test_config_validator.py +# This contains unit tests for config validator +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import tempfile +import logging +import os +import unittest +from io import StringIO +from edk2toolext import config_validator +import yaml + + +test_dir = None +plugin_list = [] + + +class Edk2Path_Injected(object): + def __init__(self): + self.WorkspacePath = None + + def GetAbsolutePathOnThisSytemFromEdk2RelativePath(self, package): + return os.path.abspath(self.WorkspacePath) + + +class PluginList_Injected(object): + def __init__(self, name): + self.descriptor = dict() + self.Obj = None + self.Name = name + + +class Testconfig_validator(unittest.TestCase): + + @classmethod + def setUpClass(cls): + global test_dir + global plugin_list + + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + # get a temporary directory that we can create the right folders + test_dir = Edk2Path_Injected() + test_dir.WorkspacePath = tempfile.mkdtemp() + + def test_valid_config(self): + global test_dir + global plugin_list + yaml_string = StringIO(""" + { + "Name": "Project Mu Plus Repo CI Build", + "GroupName": "MuPlus", + + # Workspace path relative to this file + "RelativeWorkspaceRoot": "", + "Scopes": [ "corebuild" ], + + # Other Repos that are dependencies + "Dependencies": [ + # FileSystem Path relative to workspace + # Url + # Branch + # Commit + { + "Path": "MU_BASECORE", + "Url": "https://github.com/Microsoft/mu_basecore.git", + "Branch": "release/201808" + }, + ], + + # Edk2 style PackagesPath for resolving dependencies. + # Only needed if it isn't this package and isn't a dependency + "PackagesPath": [], + + # Packages in this repo + "Packages": [ + "UefiTestingPkg" + ], + "ArchSupported": [ + "IA32", + "X64", + "AARCH64" + ], + "Targets": [ + "DEBUG", + "RELEASE" + ] + } + """) + + valid_config = yaml.safe_load(yaml_string) + # make sure the valid configuration is read just fine + try: + config_validator.check_mu_confg(valid_config, test_dir, plugin_list) + except Exception as e: + self.fail("We shouldn't throw an exception", e) + + def test_invalid_configs(self): + global test_dir + global plugin_list + bad_yaml_string = StringIO(""" + { + "Name": "Project Mu Plus Repo CI Build", + "GroupName": "MuPlus", + + # Workspace path relative to this file + "RelativeWorkspaceRoot": "", + "InvalidAttribute": "this will throw an error", + "Scopes": [ "corebuild" ], + + # Other Repos that are dependencies + "Dependencies": [ + # FileSystem Path relative to workspace + # Url + # Branch + # Commit + { + "Path": "MU_BASECORE", + "Url": "https://github.com/Microsoft/mu_basecore.git", + "Branch": "release/201808" + }, + ], + + # Edk2 style PackagesPath for resolving dependencies. + # Only needed if it isn't this package and isn't a dependency + "PackagesPath": [], + + # Packages in this repo + "Packages": [ + "UefiTestingPkg" + ], + "ArchSupported": [ + "IA32", + "X64", + "AARCH64" + ], + "Targets": [ + "DEBUG", + "RELEASE" + ] + } + """) + invalid_config = yaml.safe_load(bad_yaml_string) + with self.assertRaises(Exception): + config_validator.check_mu_confg(invalid_config, test_dir, plugin_list) + + def test_invalid_url_config(self): + global test_dir + global plugin_list + + bad_url_yaml_string = StringIO(""" + { + "Name": "Project Mu Plus Repo CI Build", + "GroupName": "MuPlus", + + # Workspace path relative to this file + "RelativeWorkspaceRoot": "", + "Scopes": [ "corebuild" ], + + # Other Repos that are dependencies + "Dependencies": [ + # FileSystem Path relative to workspace + # Url + # Branch + # Commit + { + "Path": "MU_BASECORE", + "Url": "https://github.com/InvalidRepo", + "Branch": "release/201808" + }, + ], + + # Edk2 style PackagesPath for resolving dependencies. + # Only needed if it isn't this package and isn't a dependency + "PackagesPath": [], + + # Packages in this repo + "Packages": [ + "UefiTestingPkg" + ], + "ArchSupported": [ + "IA32", + "X64", + "AARCH64" + ], + "Targets": [ + "DEBUG", + "RELEASE" + ] + } + """) + + invalid_config = yaml.safe_load(bad_url_yaml_string) + with self.assertRaises(Exception): + config_validator.check_mu_confg(invalid_config, test_dir, plugin_list) + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_edk2_logging.py b/edk2toolext/tests/test_edk2_logging.py new file mode 100644 index 00000000..2ee343be --- /dev/null +++ b/edk2toolext/tests/test_edk2_logging.py @@ -0,0 +1,59 @@ +## @file test_edk2_logging.py +# This contains unit tests for the edk2_logging +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import tempfile +import unittest +import logging +from edk2toolext import edk2_logging + + +class Test_edk2_logging(unittest.TestCase): + + def test_can_create_console_logger(self): + console_logger = edk2_logging.setup_console_logging(False, False) + self.assertIsNot(console_logger, None, "We created a console logger") + edk2_logging.stop_logging(console_logger) + + def test_can_create_txt_logger(self): + test_dir = tempfile.mkdtemp() + location, txt_logger = edk2_logging.setup_txt_logger(test_dir, "test_txt") + logging.info("Testing") + self.assertTrue(os.path.isfile(location), "We should have created a file") + self.assertIsNot(txt_logger, None, "We created a txt logger") + edk2_logging.stop_logging(txt_logger) + + def test_can_create_md_logger(self): + test_dir = tempfile.mkdtemp() + location, txt_logger = edk2_logging.setup_markdown_logger(test_dir, "test_md") + logging.info("Testing") + self.assertTrue(os.path.isfile(location), "We should have created a file") + self.assertIsNot(txt_logger, None, "We created a txt logger") + edk2_logging.stop_logging(txt_logger) + + def test_none_to_close(self): + edk2_logging.stop_logging(None) + + def test_can_close_logger(self): + test_dir = tempfile.mkdtemp() + location, txt_logger = edk2_logging.setup_txt_logger(test_dir, "test_close") + logging.critical("Testing") + self.assertTrue(os.path.isfile(location), "We should have created a file") + file = open(location, "r") + num_lines = len(file.readlines()) + file.close() + self.assertEqual(num_lines, 1, "We should only have one line") + edk2_logging.stop_logging(txt_logger) + logging.critical("Test 2") + file = open(location, "r") + num_lines2 = len(file.readlines()) + file.close() + self.assertEqual(num_lines, num_lines2, "We should only have one line") + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_git_dependency.py b/edk2toolext/tests/test_git_dependency.py new file mode 100644 index 00000000..884b5437 --- /dev/null +++ b/edk2toolext/tests/test_git_dependency.py @@ -0,0 +1,306 @@ +## @file test_git_dependency.py +# Unit test suite for the GitDependency class. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import unittest +import logging +import shutil +import stat +import tempfile +import copy +from edk2toolext.environment import environment_descriptor_files as EDF +from edk2toolext.environment.extdeptypes.git_dependency import GitDependency +from edk2toolext.environment import shell_environment + +test_dir = None +uptodate_version = "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d" +behind_one_version = "762941318ee16e59dabbacb1b4049eec22f0d303" +invalid_version = "762941318ee16e59d123456789049eec22f0d303" + +hw_json_template = ''' +{ + "scope": "global", + "type": "git", + "name": "HelloWorld", + "source": "https://github.com/octocat/Hello-World.git", + "version": "%s", + "flags": [] +} +''' + + +def prep_workspace(): + global test_dir + # if test temp dir doesn't exist + if test_dir is None or not os.path.isdir(test_dir): + test_dir = tempfile.mkdtemp() + logging.debug("temp dir is: %s" % test_dir) + else: + clean_workspace() + test_dir = tempfile.mkdtemp() + + +def clean_workspace(): + global test_dir + if test_dir is None: + return + + if os.path.isdir(test_dir): + + def dorw(action, name, exc): + os.chmod(name, stat.S_IWRITE) + if(os.path.isdir(name)): + os.rmdir(name) + else: + os.remove(name) + + shutil.rmtree(test_dir, onerror=dorw) + test_dir = None + + +class TestGitDependency(unittest.TestCase): + def setUp(self): + prep_workspace() + + @classmethod + def setUpClass(cls): + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + + @classmethod + def tearDownClass(cls): + clean_workspace() + + # good case + def test_fetch_verify_good_repo_at_top_of_tree(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(logversion=False)) + self.assertEqual(ext_dep.version, uptodate_version) + + def test_fetch_verify_good_repo_at_not_top_of_tree(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % behind_one_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(logversion=False)) + self.assertEqual(ext_dep.version, behind_one_version) + + def test_fetch_verify_non_existant_repo_commit_hash(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % invalid_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertEqual(ext_dep.version, invalid_version) + self.assertFalse(ext_dep.verify(logversion=False), "Should not verify") + + def test_verify_no_directory(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % invalid_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + self.assertFalse(ext_dep.verify(logversion=False)) + + def test_verify_empty_repo_dir(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % invalid_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + os.makedirs(ext_dep._local_repo_root_path, exist_ok=True) + self.assertFalse(ext_dep.verify(logversion=False)) + + def test_verify_invalid_git_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % invalid_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + os.makedirs(ext_dep._local_repo_root_path, exist_ok=True) + with open(os.path.join(ext_dep._local_repo_root_path, "testfile.txt"), 'a') as myfile: + myfile.write("Test code\n") + self.assertFalse(ext_dep.verify(logversion=False)) + + def test_verify_dirty_git_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + # now write a new file + with open(os.path.join(ext_dep._local_repo_root_path, "testfile.txt"), 'a') as myfile: + myfile.write("Test code to make repo dirty\n") + self.assertFalse(ext_dep.verify(logversion=False)) + + def test_verify_up_to_date(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(logversion=False)) + + def test_verify_down_level_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % behind_one_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(logversion=False), "Confirm valid ext_dep at one commit behind") + + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + self.assertFalse(ext_dep.verify(logversion=False), "Confirm downlevel repo fails to verify") + ext_dep.fetch() + self.assertTrue(ext_dep.verify(logversion=False), "Confirm repo can be updated") + + # CLEAN TESTS + + def test_clean_no_directory(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + self.assertFalse(os.path.isdir(ext_dep.contents_dir), "Confirm not ext dep directory before cleaning") + ext_dep.clean() + self.assertFalse(os.path.isdir(ext_dep.contents_dir)) + + def test_clean_dir_but_not_git_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % invalid_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + os.makedirs(ext_dep._local_repo_root_path, exist_ok=True) + with open(os.path.join(ext_dep._local_repo_root_path, "testfile.txt"), 'a') as myfile: + myfile.write("Test code\n") + ext_dep.clean() + self.assertFalse(os.path.isdir(ext_dep.contents_dir)) + + def test_clean_dirty_git_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(), "Confirm repo is valid") + # now write a new file + with open(os.path.join(ext_dep._local_repo_root_path, "testfile.txt"), 'a') as myfile: + myfile.write("Test code to make repo dirty\n") + self.assertFalse(ext_dep.verify(), "Confirm repo is dirty") + ext_dep.clean() + self.assertFalse(os.path.isdir(ext_dep.contents_dir)) + + def test_clean_clean_repo(self): + ext_dep_file_path = os.path.join(test_dir, "hw_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(hw_json_template % uptodate_version) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = GitDependency(ext_dep_descriptor) + ext_dep.fetch() + self.assertTrue(ext_dep.verify(), "Confirm repo is valid and clean") + ext_dep.clean() + self.assertFalse(os.path.isdir(ext_dep.contents_dir)) + + +class TestGitDependencyUrlPatching(unittest.TestCase): + TEST_DESCRIPTOR = { + "descriptor_file": os.path.abspath(__file__), + "scope": "global", + "type": "git", + "name": "HelloWorld", + "source": "https://github.com/octocat/Hello-World.git", + "version": "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", + "flags": [] + } + + def tearDown(self): + env = shell_environment.GetEnvironment() + env.restore_checkpoint(TestGitDependencyUrlPatching.env_checkpoint) + + @classmethod + def setUpClass(cls): + env = shell_environment.GetEnvironment() + cls.env_checkpoint = env.checkpoint() + + # + # URL FORMATTING TESTS + # + def test_url_should_not_be_modified_without_env(self): + my_test_descriptor = copy.copy(TestGitDependencyUrlPatching.TEST_DESCRIPTOR) + # Add the indicator for patching. + my_test_descriptor['url_creds_var'] = 'test_creds_var' + + # Initialize the GitDependency object. + gdep = GitDependency(my_test_descriptor) + + # Assert that the URL is identical. + self.assertEqual(gdep.source, my_test_descriptor['source']) + + def test_url_should_not_be_modified_without_descriptor_field(self): + my_test_descriptor = copy.copy(TestGitDependencyUrlPatching.TEST_DESCRIPTOR) + + env = shell_environment.GetEnvironment() + # Add the var to the environment. + env.set_shell_var('test_creds_var', 'my_stuff') + + # Initialize the GitDependency object. + gdep = GitDependency(my_test_descriptor) + + # Assert that the URL is identical. + self.assertEqual(gdep.source, my_test_descriptor['source']) + + def test_url_should_be_modified_if_creds_are_indicated_and_supplied(self): + my_test_descriptor = copy.copy(TestGitDependencyUrlPatching.TEST_DESCRIPTOR) + # Add the indicator for patching. + my_test_descriptor['url_creds_var'] = 'test_creds_var' + + env = shell_environment.GetEnvironment() + # Add the var to the environment. + env.set_shell_var('test_creds_var', 'my_stuff') + + # Initialize the GitDependency object. + gdep = GitDependency(my_test_descriptor) + + # Assert that the URL is identical. + self.assertEqual(gdep.source, "https://my_stuff@github.com/octocat/Hello-World.git") + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_omnicache.py b/edk2toolext/tests/test_omnicache.py new file mode 100644 index 00000000..3175a574 --- /dev/null +++ b/edk2toolext/tests/test_omnicache.py @@ -0,0 +1,141 @@ +## @file test_omnicache.py +# This contains unit tests for omnicache +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import unittest +import logging +import tempfile +import shutil +from io import StringIO +from edk2toolext import omnicache +from edk2toollib import utility_functions + + +test_dir = None +current_dir = None + + +def prep_workspace(): + global test_dir, current_dir + # if test temp dir doesn't exist + if test_dir is None or not os.path.isdir(test_dir): + test_dir = tempfile.mkdtemp() + logging.debug("temp dir is: %s" % test_dir) + else: + shutil.rmtree(test_dir) + test_dir = tempfile.mkdtemp() + current_dir = os.path.abspath(os.getcwd()) + + +def clean_workspace(): + global test_dir, current_dir + os.chdir(current_dir) + if test_dir is None: + return + + if os.path.isdir(test_dir): + shutil.rmtree(test_dir) + test_dir = None + + +class TestOmniCache(unittest.TestCase): + def setUp(self): + prep_workspace() + + @classmethod + def setUpClass(cls): + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + + @classmethod + def tearDownClass(cls): + clean_workspace() + + def test_basic_init(self): + valueabs = os.path.join(os.path.abspath(os.getcwd()), "test", "test2") + result = omnicache.CommonFilePathHandler(valueabs) + assert(result == valueabs) + + def test_commonfilepathhandler_real(self): + valueabs = os.path.join(os.path.abspath(os.getcwd()), "test", "test2") + result = omnicache.CommonFilePathHandler(os.path.join(valueabs, "..", "test2")) + assert(result == valueabs) + + def test_commonfilepathhandler_relative(self): + valueabs = os.path.join(os.path.abspath(os.getcwd()), "test", "test2") + result = omnicache.CommonFilePathHandler(os.path.join("test", "test2")) + assert(result == valueabs) + + def test_omnicache_init(self): + testcache = os.path.join(os.path.abspath(os.getcwd()), test_dir, "testcache") + testconfigs = [ + { + "cfgfile": os.path.join(os.path.abspath(os.getcwd()), test_dir, "testcfg.yaml"), + "name": "openssl", + "url": "https://github.com/openssl/openssl.git", + "tag": "true" + }, + { + "cfgfile": os.path.join(os.path.abspath(os.getcwd()), test_dir, "testcfg2.yaml"), + "name": "openssl", + "url": "https://foobar.com/openssl/openssl.git", + "tag": "true" + } + ] + + for testconfig in testconfigs: + currentdir = os.path.abspath(os.getcwd()) + with open(testconfig["cfgfile"], "w") as configyaml: + configyaml.write("remotes:\n") + configyaml.write("- name: {0}\n".format(testconfig["name"])) + configyaml.write(" url: {0}\n".format(testconfig["url"])) + configyaml.write(" tag: {0}\n".format(testconfig["tag"])) + + omnicache_config_file = os.path.join(testcache, omnicache.OMNICACHE_FILENAME) + if(os.path.isdir(testcache)): + if(os.path.isfile(omnicache_config_file)): + logging.debug("OMNICACHE already exists. No need to initialize") + else: + omnicache.InitOmnicache(testcache) + + omnicache_config = omnicache.OmniCacheConfig(omnicache_config_file) + os.chdir(testcache) + + (count, input_config_remotes) = omnicache.AddEntriesFromConfig(omnicache_config, testconfig["cfgfile"]) + + assert(count == 1) + assert(input_config_remotes is not None) + assert(input_config_remotes[0]["name"] == testconfig["name"]) + assert(input_config_remotes[0]["url"] == testconfig["url"]) + + omnicache_config.Save() + + # check that cache properly initialized/updated + out = StringIO() + param = "remote -v" + gitret = utility_functions.RunCmd("git", param, outstream=out) + assert(gitret == 0) + + lines = out.getvalue().split('\n') + out.close() + assert (len(lines) > 0) + for line in lines: + line = line.strip() + if(len(line) == 0): + # empty line + continue + git = line.split() + assert(git[0] == input_config_remotes[0]["name"]) + assert(git[1] == input_config_remotes[0]["url"]) + + os.chdir(currentdir) + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_repo_resolver.py b/edk2toolext/tests/test_repo_resolver.py new file mode 100644 index 00000000..5ab70c9c --- /dev/null +++ b/edk2toolext/tests/test_repo_resolver.py @@ -0,0 +1,268 @@ +## @file test_repo_resolver.py +# This contains unit tests for repo resolver +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import logging +import os +import unittest +from edk2toolext.environment import repo_resolver +import tempfile + + +branch_dependency = { + "Url": "https://github.com/microsoft/mu", + "Path": "test_repo", + "Branch": "master" +} + +sub_branch_dependency = { + "Url": "https://github.com/microsoft/mu", + "Path": "test_repo", + "Branch": "gh-pages" +} + +commit_dependency = { + "Url": "https://github.com/microsoft/mu", + "Path": "test_repo", + "Commit": "b1e35a5d2bf05fb7f58f5b641a702c70d6b32a98" +} +commit_later_dependency = { + "Url": "https://github.com/microsoft/mu", + "Path": "test_repo", + "Commit": "e28910950c52256eb620e35d111945cdf5d002d1" +} + +microsoft_commit_dependency = { + "Url": "https://github.com/Microsoft/microsoft.github.io", + "Path": "test_repo", + "Commit": "e9153e69c82068b45609359f86554a93569d76f1" +} +microsoft_branch_dependency = { + "Url": "https://github.com/Microsoft/microsoft.github.io", + "Path": "test_repo", + "Commit": "e9153e69c82068b45609359f86554a93569d76f1" +} + +test_dir = None + + +def prep_workspace(): + global test_dir + # if test temp dir doesn't exist + if test_dir is None or not os.path.isdir(test_dir): + test_dir = tempfile.mkdtemp() + logging.debug("temp dir is: %s" % test_dir) + else: + repo_resolver.clear_folder(test_dir) + test_dir = tempfile.mkdtemp() + + +def clean_workspace(): + global test_dir + if test_dir is None: + return + + if os.path.isdir(test_dir): + repo_resolver.clear_folder(test_dir) + test_dir = None + + +def get_first_file(folder): + folder_list = os.listdir(folder) + for file_path in folder_list: + path = os.path.join(folder, file_path) + if os.path.isfile(path): + return path + return None + + +class Testrepo_resolver(unittest.TestCase): + def setUp(self): + prep_workspace() + + @classmethod + def setUpClass(cls): + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + + @classmethod + def tearDownClass(cls): + clean_workspace() + + # check to make sure that we can clone a branch correctly + def test_clone_branch_repo(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, branch_dependency) + folder_path = os.path.join(test_dir, branch_dependency["Path"]) + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], branch_dependency['Url']) + self.assertEqual(details['Branch'], branch_dependency['Branch']) + + # don't create a git repo, create the folder, add a file, try to clone in the folder, it should throw an exception + def test_wont_delete_files(self): + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + os.makedirs(folder_path) + file_path = os.path.join(folder_path, "test.txt") + file_path = os.path.join( + test_dir, branch_dependency["Path"], "test.txt") + out_file = open(file_path, "w+") + out_file.write("Make sure we don't delete this") + out_file.close() + self.assertTrue(os.path.isfile(file_path)) + with self.assertRaises(Exception): + repo_resolver.resolve(test_dir, branch_dependency) + self.fail("We shouldn't make it here") + self.assertTrue(os.path.isfile(file_path)) + + # don't create a git repo, create the folder, add a file, try to clone in the folder, will force it to happen + def test_will_delete_files(self): + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + os.makedirs(folder_path) + file_path = os.path.join(folder_path, "test.txt") + out_file = open(file_path, "w+") + out_file.write("Make sure we don't delete this") + out_file.close() + self.assertTrue(os.path.exists(file_path)) + try: + repo_resolver.resolve(test_dir, commit_dependency, force=True) + except: + self.fail("We shouldn't fail when we are forcing") + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], commit_dependency['Url']) + + def test_wont_delete_dirty_repo(self): + repo_resolver.resolve(test_dir, commit_dependency) + + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + file_path = get_first_file(folder_path) + # make sure the file already exists + self.assertTrue(os.path.isfile(file_path)) + out_file = open(file_path, "a+") + out_file.write("Make sure we don't delete this") + out_file.close() + self.assertTrue(os.path.exists(file_path)) + + with self.assertRaises(Exception): + repo_resolver.resolve(test_dir, commit_dependency, update_ok=True) + + def test_will_delete_dirty_repo(self): + repo_resolver.resolve(test_dir, commit_dependency) + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + file_path = get_first_file(folder_path) + # make sure the file already exists + self.assertTrue(os.path.isfile(file_path)) + out_file = open(file_path, "a+") + out_file.write("Make sure we don't delete this") + out_file.close() + self.assertTrue(os.path.exists(file_path)) + + try: + repo_resolver.resolve(test_dir, commit_later_dependency, force=True) + except: + self.fail("We shouldn't fail when we are forcing") + + # check to make sure we can clone a commit correctly + + def test_clone_commit_repo(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, commit_dependency) + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], commit_dependency['Url']) + self.assertEqual(details['Commit'], commit_dependency['Commit']) + + # check to make sure we can clone a commit correctly + def test_fail_update(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, commit_dependency) + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], commit_dependency['Url']) + self.assertEqual(details['Commit'], commit_dependency['Commit']) + # first we checkout + with self.assertRaises(Exception): + repo_resolver.resolve(test_dir, commit_later_dependency) + + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], commit_dependency['Url']) + self.assertEqual(details['Commit'], commit_dependency['Commit']) + + def test_does_update(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, commit_dependency) + folder_path = os.path.join(test_dir, commit_dependency["Path"]) + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], commit_dependency['Url']) + self.assertEqual(details['Commit'], commit_dependency['Commit']) + # first we checkout + try: + repo_resolver.resolve( + test_dir, commit_later_dependency, update_ok=True) + except: + self.fail("We are not supposed to throw an exception") + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], commit_later_dependency['Url']) + self.assertEqual(details['Commit'], commit_later_dependency['Commit']) + + def test_cant_switch_urls(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, branch_dependency) + folder_path = os.path.join(test_dir, branch_dependency["Path"]) + + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], branch_dependency['Url']) + # first we checkout + with self.assertRaises(Exception): + repo_resolver.resolve(test_dir, microsoft_branch_dependency) + + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], branch_dependency['Url']) + + def test_ignore(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, branch_dependency) + folder_path = os.path.join(test_dir, branch_dependency["Path"]) + + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], branch_dependency['Url']) + # first we checkout + + repo_resolver.resolve( + test_dir, microsoft_branch_dependency, ignore=True) + + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], branch_dependency['Url']) + + def test_will_switch_urls(self): + # create an empty directory- and set that as the workspace + repo_resolver.resolve(test_dir, branch_dependency) + + folder_path = os.path.join(test_dir, branch_dependency["Path"]) + + details = repo_resolver.get_details(folder_path) + + self.assertEqual(details['Url'], branch_dependency['Url']) + # first we checkout + try: + repo_resolver.resolve( + test_dir, microsoft_branch_dependency, force=True) + except: + self.fail("We shouldn't fail when we are forcing") + + details = repo_resolver.get_details(folder_path) + self.assertEqual(details['Url'], microsoft_branch_dependency['Url']) + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_self_describing_environment.py b/edk2toolext/tests/test_self_describing_environment.py new file mode 100644 index 00000000..6b600a71 --- /dev/null +++ b/edk2toolext/tests/test_self_describing_environment.py @@ -0,0 +1,102 @@ +## @file test_self_describing_environment.py +# This contains unit tests for the SDE +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import os +import stat +import shutil +import logging +import unittest +import tempfile +from edk2toolext.invocables.edk2_update import build_env_changed +from edk2toolext.environment import repo_resolver +from edk2toolext.environment import self_describing_environment + +mu_basecore_dependency = { + "Url": "https://github.com/microsoft/mu_basecore", + "Path": None, + "Branch": "master" +} + +test_dir = None + + +def prep_workspace(): + global test_dir + # if test temp dir doesn't exist + if test_dir is None or not os.path.isdir(test_dir): + test_dir = tempfile.mkdtemp() + logging.debug("temp dir is: %s" % test_dir) + else: + clean_workspace() + test_dir = tempfile.mkdtemp() + + +def clean_workspace(): + global test_dir + if test_dir is None: + return + + if os.path.isdir(test_dir): + + def dorw(action, name, exc): + os.chmod(name, stat.S_IWRITE) + if(os.path.isdir(name)): + os.rmdir(name) + else: + os.remove(name) + + shutil.rmtree(test_dir, onerror=dorw) + test_dir = None + + +def do_update(directory, scopes): + (build_env, shell_env) = self_describing_environment.BootstrapEnvironment( + directory, scopes) + self_describing_environment.UpdateDependencies(directory, scopes) + return (build_env, shell_env) + + +class Testself_describing_environment(unittest.TestCase): + def setUp(self): + prep_workspace() + + @classmethod + def setUpClass(cls): + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + + @classmethod + def tearDownClass(cls): + clean_workspace() + + # Test the assertion that two identical code trees should generate + # the same self_describing_environment. + def test_identical_environments(self): + scopes = ("corebuild", "project_mu") + + mu_basecore_dependency_1 = mu_basecore_dependency.copy() + mu_basecore_dependency_2 = mu_basecore_dependency.copy() + + basecore_1_dir = "basecore_1" + basecore_2_dir = "basecore_2" + + mu_basecore_dependency_1["Path"] = basecore_1_dir + mu_basecore_dependency_2["Path"] = basecore_2_dir + + repo_resolver.resolve(test_dir, mu_basecore_dependency_1) + repo_resolver.resolve(test_dir, mu_basecore_dependency_2) + + (build_env_1, shell_env_1) = do_update(os.path.normpath(basecore_1_dir), scopes) + (build_env_2, shell_env_2) = do_update(os.path.normpath(basecore_2_dir), scopes) + + self.assertFalse(build_env_changed(build_env_1, build_env_2)) + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_shell_environment.py b/edk2toolext/tests/test_shell_environment.py new file mode 100644 index 00000000..19b71b68 --- /dev/null +++ b/edk2toolext/tests/test_shell_environment.py @@ -0,0 +1,432 @@ +## @file test_shell_environment.py +# Unit test suite for the ShellEnvironment class. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import sys +import unittest +import edk2toolext.environment.shell_environment as SE + + +class TestShellEnvironmentAssumptions(unittest.TestCase): + + def test_shell_should_be_a_singleton(self): + shell_a = SE.ShellEnvironment() + shell_b = SE.ShellEnvironment() + self.assertIs(shell_a, shell_b, "two instances of ShellEnvironment should be identical") + + def test_shell_tests_need_to_be_able_to_clear_singleton(self): + # This is not currently achievable, and may never be achievable. + pass + + def test_shell_should_always_have_an_initial_checkpoint(self): + shell_env = SE.ShellEnvironment() + self.assertTrue((len(shell_env.checkpoints) > 0), + "a new instance of ShellEnvironment should have at least one checkpoint") + + +class TestBasicEnvironmentManipulation(unittest.TestCase): + + def test_can_set_os_vars(self): + shell_env = SE.ShellEnvironment() + # Remove the test var, if it exists. + os.environ.pop("SE-TEST-VAR-1", None) + # Set a new value and get it directly from the environment. + new_value = 'Dummy' + shell_env.set_shell_var('SE-TEST-VAR-1', new_value) + self.assertEqual(os.environ['SE-TEST-VAR-1'], new_value) + + def test_can_get_os_vars(self): + shell_env = SE.ShellEnvironment() + new_value = 'Dummy2' + shell_env.set_shell_var('SE-TEST-VAR-2', new_value) + self.assertEqual(shell_env.get_shell_var('SE-TEST-VAR-2'), new_value) + + def test_set_path_string(self): + shell_env = SE.ShellEnvironment() + + # Test pass 1. + testpath_elems = ['MYPATH'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_path(testpath_string) + self.assertEqual(os.environ['PATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_path, "the active path should contain all elements") + + # Test pass 2. + testpath_elems = ['/bin/bash', 'new_path', '/root'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_path(testpath_string) + self.assertEqual(os.environ['PATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_path, "the active path should contain all elements") + + def test_set_path_elements(self): + shell_env = SE.ShellEnvironment() + + # Test pass 1. + testpath_elems = ['MYPATH'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_path(testpath_elems) + self.assertEqual(os.environ['PATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_path, "the active path should contain all elements") + + # Test pass 2. + testpath_elems = ['/bin/bash', 'new_path', '/root'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_path(testpath_elems) + self.assertEqual(os.environ['PATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_path, "the active path should contain all elements") + + def test_set_pypath_string(self): + shell_env = SE.ShellEnvironment() + + # Test pass 1. + testpath_elems = ['MYPATH'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_pypath(testpath_string) + self.assertEqual(os.environ['PYTHONPATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_pypath, "the active path should contain all elements") + self.assertIn(elem, sys.path, "the sys path should contain all elements") + + # Test pass 2. + testpath_elems = ['/bin/bash', 'new_path', '/root'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_pypath(testpath_string) + self.assertEqual(os.environ['PYTHONPATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_pypath, "the active path should contain all elements") + self.assertIn(elem, sys.path, "the sys path should contain all elements") + + def test_set_pypath_elements(self): + shell_env = SE.ShellEnvironment() + + # Test pass 1. + testpath_elems = ['MYPATH'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_pypath(testpath_elems) + self.assertEqual(os.environ['PYTHONPATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_pypath, "the active path should contain all elements") + self.assertIn(elem, sys.path, "the sys path should contain all elements") + + # Test pass 2. + testpath_elems = ['/bin/bash', 'new_path', '/root'] + testpath_string = os.pathsep.join(testpath_elems) + shell_env.set_pypath(testpath_elems) + self.assertEqual(os.environ['PYTHONPATH'], testpath_string, "the final string should be correct") + for elem in testpath_elems: + self.assertIn(elem, shell_env.active_pypath, "the active path should contain all elements") + self.assertIn(elem, sys.path, "the sys path should contain all elements") + + def test_insert_append_remove_replace_path(self): + shell_env = SE.ShellEnvironment() + + # Start with a known PATH + mid_elem = 'MIDDLEPATH' + shell_env.set_path(mid_elem) + self.assertEqual(1, len(shell_env.active_path)) + self.assertIn(mid_elem, shell_env.active_path) + # Add an element to the end. + end_elem = 'ENDPATH' + shell_env.append_path(end_elem) + # Add an element to the beginning. + start_elem = 'STARTPATH' + shell_env.insert_path(start_elem) + + # Test for the realities. + self.assertEqual(3, len(shell_env.active_path)) + self.assertEqual(shell_env.active_path[0], start_elem) + self.assertEqual(shell_env.active_path[1], mid_elem) + self.assertEqual(shell_env.active_path[2], end_elem) + for elem in (start_elem, mid_elem, end_elem): + self.assertIn(elem, os.environ["PATH"]) + + # Test replacing an element on the path + new_mid_elem = 'NEWMIDDLEPATH' + shell_env.replace_path_element(mid_elem, new_mid_elem) + self.assertEqual(shell_env.active_path[1], new_mid_elem) + + # Test replacing an element that doesn't exist + old_path = shell_env.active_path + shell_env.replace_path_element("PATH1", "PATH2") + self.assertEqual(old_path, shell_env.active_path) + + # Test that removing an element works as expected + shell_env.remove_path_element(new_mid_elem) + self.assertNotIn(new_mid_elem, shell_env.active_path) + + def test_insert_append_remove_replace_pypath(self): + shell_env = SE.ShellEnvironment() + + # Start with a known PATH + mid_elem = 'MIDDLEPATH' + shell_env.set_pypath(mid_elem) + self.assertEqual(1, len(shell_env.active_pypath)) + self.assertIn(mid_elem, shell_env.active_pypath) + # Add an element to the end. + end_elem = 'ENDPATH' + shell_env.append_pypath(end_elem) + # Add an element to the beginning. + start_elem = 'STARTPATH' + shell_env.insert_pypath(start_elem) + + # Test for the realities. + self.assertEqual(3, len(shell_env.active_pypath)) + self.assertEqual(shell_env.active_pypath[0], start_elem) + self.assertEqual(shell_env.active_pypath[1], mid_elem) + self.assertEqual(shell_env.active_pypath[2], end_elem) + for elem in (start_elem, mid_elem, end_elem): + self.assertIn(elem, os.environ["PYTHONPATH"]) + self.assertIn(elem, sys.path) + + # Test replacing an element on the pypath + new_mid_elem = 'NEWMIDDLEPATH' + shell_env.replace_pypath_element(mid_elem, new_mid_elem) + self.assertEqual(shell_env.active_pypath[1], new_mid_elem) + + # Test replacing an element that doesn't exist + old_pypath = shell_env.active_pypath + shell_env.replace_pypath_element("PATH1", "PATH2") + self.assertEqual(old_pypath, shell_env.active_pypath) + + # Test that removing an element works as expected + shell_env.remove_pypath_element(new_mid_elem) + self.assertNotIn(new_mid_elem, shell_env.active_pypath) + + def test_can_set_and_get_build_vars(self): + shell_env = SE.ShellEnvironment() + + var_name = 'SE-TEST-VAR-3' + var_data = 'Dummy3' + # Make sure it doesn't exist beforehand. + self.assertIs(shell_env.get_build_var(var_name), None, "test var should not exist before creation") + shell_env.set_build_var(var_name, var_data) + self.assertEqual(shell_env.get_build_var(var_name), var_data, "get var data should match set var data") + + def test_set_build_vars_should_default_overrideable(self): + shell_env = SE.ShellEnvironment() + + var_name = 'SE_TEST_VAR_4' + var_data = 'NewData1' + var_data2 = 'NewerData1' + + self.assertIs(shell_env.get_build_var(var_name), None, "test var should not exist before creation") + shell_env.set_build_var(var_name, var_data) + shell_env.set_build_var(var_name, var_data2) + + self.assertEqual(shell_env.get_build_var(var_name), var_data2) + + +class TestShellEnvironmenCheckpoints(unittest.TestCase): + + def setUp(self): + # Grab the singleton and restore the initial checkpoint. + shell_env = SE.ShellEnvironment() + shell_env.restore_initial_checkpoint() + # For testing, purge all checkpoints each time. + shell_env.checkpoints = [shell_env.checkpoints[SE.ShellEnvironment.INITIAL_CHECKPOINT]] + + def test_restore_initial_checkpoint_should_erase_changes(self): + shell_env = SE.ShellEnvironment() + + # Check to make sure the change doesn't exist. + test_path_change = '/SE/TEST/PATH/1' + self.assertNotIn(test_path_change, shell_env.active_path, "starting condition should not have the test change") + + # Make the change and verify. + shell_env.append_path(test_path_change) + self.assertIn(test_path_change, shell_env.active_path) + + # Add a shell_var while we're at it. + self.assertEqual(shell_env.get_shell_var('i_should_not_exist'), None) + shell_env.set_shell_var('i_should_not_exist', 'a_value') + self.assertEqual(shell_env.get_shell_var('i_should_not_exist'), 'a_value') + + # Restore initial checkpoint and verify change is gone. + shell_env.restore_initial_checkpoint() + self.assertNotIn(test_path_change, shell_env.active_path, "restoring checkpoint should remove test change") + self.assertEqual(shell_env.get_shell_var('i_should_not_exist'), None) + + def test_checkpoint_indices_should_be_unique(self): + shell_env = SE.ShellEnvironment() + shell_env.append_path('/SE/TEST/PATH/1') + chkpt1 = shell_env.checkpoint() + shell_env.append_path('/SE/TEST/PATH/2') + chkpt2 = shell_env.checkpoint() + + self.assertNotEqual(chkpt1, SE.ShellEnvironment.INITIAL_CHECKPOINT) + self.assertNotEqual(chkpt2, SE.ShellEnvironment.INITIAL_CHECKPOINT) + self.assertNotEqual(chkpt1, chkpt2) + + def test_restore_new_checkpoint_should_contain_new_changes(self): + shell_env = SE.ShellEnvironment() + + # Check to make sure the change doesn't exist. + test_path_change = '/SE/TEST/PATH/3' + self.assertNotIn(test_path_change, shell_env.active_path, "starting condition should not have the test change") + + # Make the change and checkpoint. + shell_env.append_path(test_path_change) + self.assertIn(test_path_change, shell_env.active_path) + chkpt1 = shell_env.checkpoint() + + # Restore initial checkpoint and verify change is gone. + shell_env.restore_initial_checkpoint() + self.assertNotIn(test_path_change, shell_env.active_path, + "restoring initial checkpoint should remove test change") + + # Restore new checkpoint and verify change is back. + shell_env.restore_checkpoint(chkpt1) + self.assertIn(test_path_change, shell_env.active_path, "restoring new checkpoint should restore test change") + + def test_checkpointed_objects_should_behave_correctly(self): + shell_env = SE.ShellEnvironment() + + # This test is to make sure that pass-by-reference elements don't persist unexpectedly. + + test_var1_name = 'SE_TEST_VAR_3' + test_var1_data = 'MyData1' + test_var1_data2 = 'RevisedData1' + test_var1_data3 = 'MoreRevisedData1' + + test_var2_name = 'SE_TEST_VAR_4' + test_var2_data = 'MyData2' + + # Set the first data and make a checkpoint. + shell_env.set_build_var(test_var1_name, test_var1_data) + chkpt1 = shell_env.checkpoint() + + # Update previous value and set second data. Then checkpoint. + shell_env.set_build_var(test_var1_name, test_var1_data2) + shell_env.set_build_var(test_var2_name, test_var2_data) + chkpt2 = shell_env.checkpoint() + + # Restore the first checkpoint and verify values. + shell_env.restore_checkpoint(chkpt1) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data) + self.assertIs(shell_env.get_build_var(test_var2_name), None) + + # Make a change to be tested later. + shell_env.set_build_var(test_var1_name, test_var1_data3) + + # Restore the second checkpoint and verify values. + shell_env.restore_checkpoint(chkpt2) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data2) + self.assertEqual(shell_env.get_build_var(test_var2_name), test_var2_data) + + # Restore the first checkpoint again and make sure orignal value still stands. + shell_env.restore_checkpoint(chkpt1) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data) + + +class TestShellEnvironmenSpecialBuildVars(unittest.TestCase): + + def setUp(self): + # Grab the singleton and restore the initial checkpoint. + shell_env = SE.ShellEnvironment() + shell_env.restore_initial_checkpoint() + # For testing, purge all checkpoints each time. + shell_env.checkpoints = [shell_env.checkpoints[SE.ShellEnvironment.INITIAL_CHECKPOINT]] + + def test_get_build_vars_should_update_vars(self): + shell_env = SE.ShellEnvironment() + build_vars = SE.GetBuildVars() + + test_var_name = 'SE_TEST_VAR_4' + test_var_data = 'NewData1' + + build_vars.SetValue(test_var_name, test_var_data, 'random set') + + self.assertEqual(shell_env.get_build_var(test_var_name), test_var_data) + + def test_special_build_vars_should_default_non_overrideable(self): + shell_env = SE.ShellEnvironment() + build_vars = SE.GetBuildVars() + + test_var_name = 'SE_TEST_VAR_4' + test_var_data = 'NewData1' + test_var_data2 = 'NewerData1' + + build_vars.SetValue(test_var_name, test_var_data, 'random set') + build_vars.SetValue(test_var_name, test_var_data2, 'another random set') + + self.assertEqual(shell_env.get_build_var(test_var_name), test_var_data) + + def test_special_build_vars_should_always_update_current(self): + shell_env = SE.ShellEnvironment() + build_vars = SE.GetBuildVars() + + test_var1_name = 'SE_TEST_VAR_update_current1' + test_var1_data = 'NewData1' + test_var1_data2 = 'NewerData1' + + test_var2_name = 'SE_TEST_VAR_update_current2' + test_var2_data = 'NewData2' + + # Make a change and checkpoint. + build_vars.SetValue(test_var1_name, test_var1_data, 'var1 set', overridable=True) + shell_env.checkpoint() + + # Make a couple more changes. + build_vars.SetValue(test_var1_name, test_var1_data2, 'var1 set', overridable=True) + build_vars.SetValue(test_var2_name, test_var2_data, 'var2 set', overridable=True) + + # Make sure that the newer changes are valid. + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data2) + self.assertEqual(shell_env.get_build_var(test_var2_name), test_var2_data) + + def test_special_build_vars_should_be_checkpointable(self): + shell_env = SE.ShellEnvironment() + build_vars = SE.GetBuildVars() + + # This test is basically a rehash of the object checkpointing test, + # but this time with the special vars. + + test_var1_name = 'SE_TEST_VAR_3' + test_var1_data = 'MyData1' + test_var1_data2 = 'RevisedData1' + test_var1_data3 = 'MoreRevisedData1' + + test_var2_name = 'SE_TEST_VAR_4' + test_var2_data = 'MyData2' + + # Set the first data and make a checkpoint. + build_vars.SetValue(test_var1_name, test_var1_data, 'var1 set', overridable=True) + chkpt1 = shell_env.checkpoint() + + # Update previous value and set second data. Then checkpoint. + build_vars.SetValue(test_var1_name, test_var1_data2, 'var1 set', overridable=True) + build_vars.SetValue(test_var2_name, test_var2_data, 'var2 set', overridable=True) + chkpt2 = shell_env.checkpoint() + + # Restore the first checkpoint and verify values. + shell_env.restore_checkpoint(chkpt1) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data) + self.assertIs(shell_env.get_build_var(test_var2_name), None) + + # Make a change to be tested later. + build_vars.SetValue(test_var1_name, test_var1_data3, 'var1 set', overridable=True) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data3, + 'even after restore, special build vars should always update current') + + # Restore the second checkpoint and verify values. + shell_env.restore_checkpoint(chkpt2) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data2) + self.assertEqual(shell_env.get_build_var(test_var2_name), test_var2_data) + + # Restore the first checkpoint again and make sure orignal value still stands. + shell_env.restore_checkpoint(chkpt1) + self.assertEqual(shell_env.get_build_var(test_var1_name), test_var1_data) + + +if __name__ == '__main__': + unittest.main() diff --git a/edk2toolext/tests/test_web_dependency.py b/edk2toolext/tests/test_web_dependency.py new file mode 100644 index 00000000..37981616 --- /dev/null +++ b/edk2toolext/tests/test_web_dependency.py @@ -0,0 +1,349 @@ +## @file test_web_dependency.py +# Unit test suite for the WebDependency class. +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import unittest +import logging +import shutil +import tarfile +import zipfile +import tempfile +import urllib.request +from edk2toolext.environment import environment_descriptor_files as EDF +from edk2toolext.environment.extdeptypes.web_dependency import WebDependency + +test_dir = None +bad_json_file = ''' +{ + "scope": "global", + "type": "web", + "name": "mu-pip", + "source": "https://github.com/microsoft/mu_pip_environment/archive/0.tar.gz", + "version": "7.2.1", + "flags": ["set_path"], + "internal_path": "/mu_pip_environment-0.3.7", + "compression_type":"tar", + "sha256":"68f2335344c3f7689f8d69125d182404a3515b8daa53a9c330f115739889f998" +} +''' + + +def prep_workspace(): + global test_dir + # if test temp dir doesn't exist + if test_dir is None or not os.path.isdir(test_dir): + test_dir = tempfile.mkdtemp() + logging.debug("temp dir is: %s" % test_dir) + else: + shutil.rmtree(test_dir) + test_dir = tempfile.mkdtemp() + + +def clean_workspace(): + global test_dir + if test_dir is None: + return + + if os.path.isdir(test_dir): + shutil.rmtree(test_dir) + test_dir = None + + +class TestWebDependency(unittest.TestCase): + def setUp(self): + prep_workspace() + + @classmethod + def setUpClass(cls): + logger = logging.getLogger('') + logger.addHandler(logging.NullHandler()) + unittest.installHandler() + + @classmethod + def tearDownClass(cls): + clean_workspace() + + # throw in a bad url and test that it throws an exception. + def test_fail_with_bad_url(self): + ext_dep_file_path = os.path.join(test_dir, "bad_ext_dep.json") + with open(ext_dep_file_path, "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + ext_dep_descriptor = EDF.ExternDepDescriptor(ext_dep_file_path).descriptor_contents + ext_dep = WebDependency(ext_dep_descriptor) + with self.assertRaises(urllib.error.HTTPError): + ext_dep.fetch() + self.fail("should have thrown an Exception") + + # Test that get_internal_path_root works the way we expect with a flat directory structure. + # test_dir\inner_dir - test_dir\inner_dir should be the root. + def test_get_internal_path_root_flat(self): + outer_dir = test_dir + inner_dir_name = "inner_dir" + inner_dir_path = os.path.join(outer_dir, inner_dir_name) + self.assertEqual(WebDependency.get_internal_path_root(outer_dir, inner_dir_name), inner_dir_path) + + # Test that get_internal_path_root works the way we expect with a flat directory structure + # test_dir\first_dir\second_dir - test_dir\first_dir should be the root + def test_get_internal_path_root_with_subfolders(self): + outer_dir = test_dir + first_level_dir_name = "first_dir" + second_level_dir_name = "second_dir" + inner_dir_path = os.path.join(outer_dir, first_level_dir_name) + self.assertEqual(WebDependency.get_internal_path_root(outer_dir, + os.path.join(first_level_dir_name, second_level_dir_name)), inner_dir_path) + + # Test that a single file zipped is able to be processed by unpack. + def test_unpack_zip_file(self): + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") + destination = test_dir + internal_path = "bad_ext_dep.json" + compression_type = "zip" + + file_path = os.path.join(test_dir, internal_path) + + with open(file_path, "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with zipfile.ZipFile(compressed_file_path, 'w') as _zip: + _zip.write(file_path, arcname=os.path.basename(file_path)) + + os.remove(file_path) + self.assertFalse(os.path.isfile(file_path)) + + WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) + self.assertTrue(os.path.isfile(file_path)) + + # Test that a single file tar volume is able to be processed by unpack. + def test_unpack_tar_file(self): + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar.gz") + destination = test_dir + internal_path = "bad_ext_dep.json" + compression_type = "tar" + + file_path = os.path.join(test_dir, internal_path) + + with open(file_path, "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with tarfile.open(compressed_file_path, "w:gz") as _tar: + _tar.add(file_path, arcname=os.path.basename(file_path)) + + os.remove(file_path) + self.assertFalse(os.path.isfile(file_path)) + + WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) + self.assertTrue(os.path.isfile(file_path)) + + # Test that a zipped directory is processed correctly by unpack. + # If internal_path is first_dir\second_dir... + # Files in test_dir\first_dir\second_dir should be located. + # Files in test_dir\first_dir should not be unpacked. + def test_unpack_zip_directory(self): + + first_level_dir_name = "first_dir" + second_level_dir_name = "second_dir" + first_level_path = os.path.join(test_dir, first_level_dir_name) + second_level_path = os.path.join(first_level_path, second_level_dir_name) + os.makedirs(second_level_path) + + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") + destination = test_dir + internal_path = os.path.join(first_level_dir_name, second_level_dir_name) + compression_type = "zip" + + # only files inside internal_path should be there after unpack + # (file path, is this file expected to be unpacked?) + test_files = [(os.path.join(test_dir, internal_path, "bad_json_file.json"), True), + (os.path.join(test_dir, first_level_dir_name, "json_file.json"), False)] + + for test_file in test_files: + with open(test_file[0], "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with zipfile.ZipFile(compressed_file_path, 'w') as _zip: + for test_file in test_files: + _zip.write(test_file[0], arcname=test_file[0].split(test_dir)[1]) + + shutil.rmtree(first_level_path) + self.assertFalse(os.path.isdir(first_level_path)) + + WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) + + for test_file in test_files: + if test_file[1]: + self.assertTrue(os.path.isfile(test_file[0])) + else: + self.assertFalse(os.path.isfile(test_file[0])) + + # Test that a tar directory is processed correctly by unpack. + # If internal_path is first_dir\second_dir... + # Files in test_dir\first_dir\second_dir should be located. + # Files in test_dir\first_dir should not be unpacked. + def test_unpack_tar_directory(self): + first_level_dir_name = "first_dir" + second_level_dir_name = "second_dir" + first_level_path = os.path.join(test_dir, first_level_dir_name) + second_level_path = os.path.join(first_level_path, second_level_dir_name) + os.makedirs(second_level_path) + + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") + destination = test_dir + internal_path = os.path.join(first_level_dir_name, second_level_dir_name) + compression_type = "tar" + + # only files inside internal_path should be there after unpack + # (file path, is this file expected to be unpacked?) + test_files = [(os.path.join(test_dir, internal_path, "bad_json_file.json"), True), + (os.path.join(test_dir, first_level_dir_name, "json_file.json"), False)] + + for test_file in test_files: + with open(test_file[0], "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with tarfile.open(compressed_file_path, "w:gz") as _tar: + for test_file in test_files: + _tar.add(test_file[0], arcname=test_file[0].split(test_dir)[1]) + + shutil.rmtree(first_level_path) + self.assertFalse(os.path.isdir(first_level_path)) + + WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) + + for test_file in test_files: + if test_file[1]: + self.assertTrue(os.path.isfile(test_file[0])) + else: + self.assertFalse(os.path.isfile(test_file[0])) + + # Test that three levels of internal path all work properly + def test_multi_level_directory(self): + global test_dir + number_of_layers = 5 + directory_name = "test" + file_name = "file" + compression_type = "tar" + internal_paths = [""] + + # Set up internal_paths list.... + # It will look like: + # ["test", "test/testtest", "test/testtest/testtesttest"] + # To describe the file structure: + # test_dir/ + # > test/ + # >> testtest/ + # >>> testtesttest/ + # >>>> testtesttesttest/ + for i in range(1, number_of_layers): + internal_path = (directory_name * i) + if i - 1 > 0: + internal_path = os.path.join(internal_paths[i - 1], internal_path) + internal_paths.insert(i, internal_path) + + # We will pick internal_path each iteration and make sure + # only the files INSIDE the internal_path were unpacked. + # If the second level directory is the internal_path, the first level + # file SHOULD NOT be unpacked because it is out of scope. + for internal_path_level in range(1, number_of_layers): + destination = test_dir + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar") + os.makedirs(os.path.join(test_dir, internal_paths[-1])) + + # create files in each folder + files = [""] + for file_list_counter in range(1, number_of_layers): + files.insert(file_list_counter, + os.path.join(test_dir, internal_paths[file_list_counter], file_name * file_list_counter)) + with open(files[file_list_counter], "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + # zip up the whole thing + with tarfile.open(compressed_file_path, "w:gz") as _tar: + for file in files[1:]: + _tar.add(file, arcname=file.split(test_dir)[1]) + + shutil.rmtree(os.path.join(test_dir, directory_name)) + self.assertFalse(os.path.isdir(os.path.join(test_dir, directory_name))) + + # The internal path moves down the directory structure each iteration + internal_path = internal_paths[internal_path_level] + + WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) + + # the file should be unpacked if file_list_counter >= internal_path_level + for file_list_counter in range(1, number_of_layers): + if internal_path_level <= file_list_counter: + self.assertTrue(os.path.isfile(files[file_list_counter])) + else: + self.assertFalse(os.path.isfile(files[file_list_counter])) + + clean_workspace() + prep_workspace() + + # Test that zipfile uses / internally and not os.sep. + # This is not exactly a test of WebDependency, more an assertion of an assumption + # the code is making concerning the functionality of zipfile. + def test_zip_uses_linux_path_sep(self): + first_level_dir_name = "first_dir" + second_level_dir_name = "second_dir" + first_level_path = os.path.join(test_dir, first_level_dir_name) + second_level_path = os.path.join(first_level_path, second_level_dir_name) + os.makedirs(second_level_path) + + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") + internal_path = os.path.join(first_level_dir_name, second_level_dir_name) + internal_path_win = "\\".join((first_level_dir_name, second_level_dir_name)) + + test_file = os.path.join(test_dir, internal_path, "bad_json_file.json") + + with open(test_file, "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with zipfile.ZipFile(compressed_file_path, 'w') as _zip: + _zip.write(test_file, arcname=test_file.split(test_dir)[1]) + + with zipfile.ZipFile(compressed_file_path, 'r') as _zip: + namelist = _zip.namelist() + + self.assertTrue(len(namelist) == 1) + self.assertFalse(internal_path_win in namelist[0]) + self.assertTrue(WebDependency.linuxize_path(internal_path_win) in namelist[0]) + + # Test that tarfile uses / internally and not os.sep. + # This is not exactly a test of WebDependency, more an assertion of an assumption + # the code is making concerning the functionality of tarfile. + def test_tar_uses_linux_path_sep(self): + first_level_dir_name = "first_dir" + second_level_dir_name = "second_dir" + first_level_path = os.path.join(test_dir, first_level_dir_name) + second_level_path = os.path.join(first_level_path, second_level_dir_name) + os.makedirs(second_level_path) + + compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") + internal_path = os.path.join(first_level_dir_name, second_level_dir_name) + internal_path_win = "\\".join((first_level_dir_name, second_level_dir_name)) + + test_file = os.path.join(test_dir, internal_path, "bad_json_file.json") + + with open(test_file, "w+") as ext_dep_file: + ext_dep_file.write(bad_json_file) + + with tarfile.open(compressed_file_path, "w:gz") as _tar: + _tar.add(test_file, arcname=test_file.split(test_dir)[1]) + + with tarfile.open(compressed_file_path, "r:*") as _tar: + namelist = _tar.getnames() + + self.assertTrue(len(namelist) == 1) + self.assertFalse(internal_path_win in namelist[0]) + self.assertTrue(WebDependency.linuxize_path(internal_path_win) in namelist[0]) + + +if __name__ == '__main__': + unittest.main() diff --git a/readme.md b/readme.md index a2029a46..f85ae299 100644 --- a/readme.md +++ b/readme.md @@ -2,17 +2,17 @@ This is a Tianocore maintained project consisting of command line and other python tools and extensions for building and maintaining an Edk2 based UEFI firmware code tree. This package's intent is to provide tools, scripts, and a plugin based environment for use within the tools and scripts. This environment has command line interfaces to support building a product, building CI, running tests, and downloading dependencies. This environment also provides the building blocks for developers to write their own tools to launch in the environment and leverage the capabilities provided by the environment. The unique capabilities provided help support building products with multiple repositories and having each repository contribute/plugin to the build process in a scalable way. The environment will scan the files in the code tree (multiple repos) and discover plugins, dependencies, path adjustments, environment variable settings, etc. This provides easy methods for common repositories to share build tools/steps. -Inclusion of this package is best managed using Pip/Pypi. This package makes use of edk2-pytool-library. +Inclusion of this package is best managed using Pip/Pypi. This package makes use of edk2-pytool-library. This is a supplemental package and is not required to be used for edk2 builds. ## Content -The package contains cli tools and a basic common environment for running tools. This common environment supports extensions, subclassing, and plugin to allow great flexibility for building and maintaining a code tree. +The package contains cli tools and a basic common environment for running tools. This common environment supports extensions, subclassing, and plugin to allow great flexibility for building and maintaining a code tree. Examples: -* CI build support with plugin +* CI build support with plugin * Binary dependency resolution (nuget, urls, git repos) * Loggers (markdown, file, memory, and colored console) * Plugins (pre/post build, function injection) @@ -33,6 +33,7 @@ NOTE: It is strongly recommended that you use python virtual environments. Virt * To install run `pip install --upgrade edk2-pytool-extensions` * To use in your python code + ```python from edk2toolext. import ``` @@ -43,7 +44,7 @@ NOTE: It is strongly recommended that you use python virtual environments. Virt Initial release ported from Project Mu. For history and documentation prior to this see the original Project Mu project -https://github.com/microsoft/mu_pip_python_environment and https://github.com/microsoft/mu_pip_build + and ## Current Status @@ -54,7 +55,7 @@ https://github.com/microsoft/mu_pip_python_environment and https://github.com/mi ## Contribution Process -This project welcomes all types of contributions. +This project welcomes all types of contributions. For issues, bugs, and questions it is best to open a [github issue](https://github.com/tianocore/edk2-pytool-extensions/issues). ### Code Contributions @@ -70,7 +71,7 @@ For best success please follow the below process. 6. Contributor submits PR to master branch of tianocore/edk2-pytool-extensions 1. PR reviewers will provide feedback on change. If any modifications are required, contributor will make changes and push updates. 2. PR automation will run and validate tests pass - 3. If all comments resolved, maintainers approved, and tests pass the PR will be squash merged and closed by the maintainers. + 3. If all comments resolved, maintainers approved, and tests pass the PR will be squash merged and closed by the maintainers. ## Maintainers diff --git a/requirements.publisher.txt b/requirements.publisher.txt new file mode 100644 index 00000000..8f3da39f --- /dev/null +++ b/requirements.publisher.txt @@ -0,0 +1,3 @@ +setuptools +wheel +twine diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..5ca1f856 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +pytest +pytest-html +pytest-cov +flake8 diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..cc06503c --- /dev/null +++ b/setup.py @@ -0,0 +1,83 @@ +## @file setup.py +# This contains setup info for edk2-pytool-extensions pip module +# +## +# Copyright (c) Microsoft Corporation +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +import setuptools +from setuptools.command.sdist import sdist +from setuptools.command.install import install +from setuptools.command.develop import develop +from edk2toolext.bin.nuget import DownloadNuget + +with open("readme.md", "r") as fh: + long_description = fh.read() + + +class PostSdistCommand(sdist): + """Post-sdist.""" + def run(self): + # we need to download nuget so throw the exception if we don't get it + DownloadNuget() + sdist.run(self) + + +class PostInstallCommand(install): + """Post-install.""" + def run(self): + try: + DownloadNuget() + except: + pass + install.run(self) + + +class PostDevCommand(develop): + """Post-develop.""" + def run(self): + try: + DownloadNuget() + except: + pass + develop.run(self) + + +setuptools.setup( + name="edk2-pytool-extensions", + author="Tianocore Edk2-PyTool-Extensions team", + author_email="sean.brogan@microsoft.com", + description="Python tools supporting UEFI EDK2 firmware development", + long_description=long_description, + url="https://github.com/tianocore/edk2-pytool-extensions", + license='BSD-2-Clause-Patent', + packages=setuptools.find_packages(), + use_scm_version=True, + setup_requires=['setuptools_scm'], + cmdclass={ + 'sdist': PostSdistCommand, + 'install': PostInstallCommand, + 'develop': PostDevCommand, + }, + include_package_data=True, + entry_points={ + 'console_scripts': ['stuart_setup=edk2toolext.invocables.edk2_setup:main', + 'stuart_update=edk2toolext.invocables.edk2_update:main', + 'stuart_build=edk2toolext.invocables.edk2_platform_build:main', + 'stuart_ci_build=edk2toolext.invocables.edk2_ci_build:main', + 'stuart_ci_setup=edk2toolext.invocables.edk2_ci_setup:main', + 'omnicache=edk2toolext.omnicache:main'] + }, + install_requires=[ + 'pyyaml', + 'edk2-pytool-library>=0.9.1' + ], + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Development Status :: 4 - Beta", + "Intended Audience :: Developers" + ] +)